diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 17561e4e..6c912b45 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -4,6 +4,8 @@ Please briefly explain the changes you made here. ### Pull request type +Please delete options that are not relevant. + - [ ] Bugfix - [ ] Feature - [ ] Code style update (formatting, renaming) @@ -15,7 +17,18 @@ Please briefly explain the changes you made here. ### Related issues -Delete section if this PR doesn't resolve any issues. Link the issue if it does. +Delete section if this PR doesn't resolve any issues. + +Closes (link to issue) + +### Checklist: + +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my own code +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes ###################################### diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index c2e339d7..3f014924 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -34,6 +34,10 @@ jobs: - name: Run memgraph instance run: | docker run -d -p 7687:7687 memgraph/memgraph --telemetry-enabled=False + - name: Install Memgraph + run: | + curl -O https://download.memgraph.com/memgraph/v2.2.1/ubuntu-20.04/memgraph_2.2.1-1_amd64.deb + sudo dpkg -i memgraph_2.2.1-1_amd64.deb - name: Test project run: | poetry install @@ -71,4 +75,4 @@ jobs: - name: Test project run: | poetry install - poetry run pytest -vvv -m "not slow" + poetry run pytest -vvv -m "not slow and not ubuntu and not docker" diff --git a/docs/reference/gqlalchemy/connection.md b/docs/reference/gqlalchemy/connection.md deleted file mode 100644 index ad843e52..00000000 --- a/docs/reference/gqlalchemy/connection.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -sidebar_label: connection -title: gqlalchemy.connection ---- - -## Connection Objects - -```python -class Connection(ABC) -``` - -#### execute - -```python -@abstractmethod -def execute(query: str) -> None -``` - -Executes Cypher query without returning any results. - -#### execute\_and\_fetch - -```python -@abstractmethod -def execute_and_fetch(query: str) -> Iterator[Dict[str, Any]] -``` - -Executes Cypher query and returns iterator of results. - -#### is\_active - -```python -@abstractmethod -def is_active() -> bool -``` - -Returns True if connection is active and can be used - -#### create - -```python -@staticmethod -def create(**kwargs) -> "Connection" -``` - -Creates an instance of a connection. - -## MemgraphConnection Objects - -```python -class MemgraphConnection(Connection) -``` - -#### execute - -```python -def execute(query: str) -> None -``` - -Executes Cypher query without returning any results. - -#### execute\_and\_fetch - -```python -def execute_and_fetch(query: str) -> Iterator[Dict[str, Any]] -``` - -Executes Cypher query and returns iterator of results. - -#### is\_active - -```python -def is_active() -> bool -``` - -Returns True if connection is active and can be used - diff --git a/docs/reference/gqlalchemy/disk_storage.md b/docs/reference/gqlalchemy/disk_storage.md index ca63b02e..ae13fd58 100644 --- a/docs/reference/gqlalchemy/disk_storage.md +++ b/docs/reference/gqlalchemy/disk_storage.md @@ -9,6 +9,8 @@ title: gqlalchemy.disk_storage class OnDiskPropertyDatabase(ABC) ``` +An abstract class for implementing on-disk storage features with specific databases. + #### save\_node\_property ```python @@ -79,6 +81,15 @@ def execute_query(query: str) -> List[str] Executes an SQL query on the on disk property database. +**Arguments**: + +- `query` - A string representing an SQL query. + + +**Returns**: + + A list of strings representing the results of the query. + #### drop\_database ```python @@ -95,6 +106,12 @@ def save_node_property(node_id: int, property_name: str, property_value: str) -> Saves a node property to an on disk database. +**Arguments**: + +- `node_id` - An integer representing the internal id of the node. +- `property_name` - A string representing the name of the property. +- `property_value` - A string representing the value of the property. + #### load\_node\_property ```python @@ -103,6 +120,16 @@ def load_node_property(node_id: int, property_name: str) -> Optional[str] Loads a node property from an on disk database. +**Arguments**: + +- `node_id` - An integer representing the internal id of the node. +- `property_name` - A string representing the name of the property. + + +**Returns**: + + An optional string representing the property value. + #### delete\_node\_property ```python @@ -111,6 +138,11 @@ def delete_node_property(node_id: int, property_name: str) -> None Deletes a node property from an on disk database. +**Arguments**: + +- `node_id` - An integer representing the internal id of the node. +- `property_name` - A string representing the name of the property. + #### save\_relationship\_property ```python @@ -119,6 +151,12 @@ def save_relationship_property(relationship_id: int, property_name: str, propert Saves a relationship property to an on disk database. +**Arguments**: + +- `relationship_id` - An integer representing the internal id of the relationship. +- `property_name` - A string representing the name of the property. +- `property_value` - A string representing the value of the property. + #### load\_relationship\_property ```python @@ -127,6 +165,16 @@ def load_relationship_property(relationship_id: int, property_name: str) -> Opti Loads a relationship property from an on disk database. +**Arguments**: + +- `relationship_id` - An integer representing the internal id of the relationship. +- `property_name` - A string representing the name of the property. + + +**Returns**: + + An optional string representing the property value. + #### delete\_relationship\_property ```python @@ -135,3 +183,8 @@ def delete_relationship_property(relationship_id: int, property_name: str) -> No Deletes a node property from an on disk database. +**Arguments**: + +- `relationship_id` - An integer representing the internal id of the relationship. +- `property_name` - A string representing the name of the property. + diff --git a/docs/reference/gqlalchemy/instance_runner.md b/docs/reference/gqlalchemy/instance_runner.md new file mode 100644 index 00000000..43ae2353 --- /dev/null +++ b/docs/reference/gqlalchemy/instance_runner.md @@ -0,0 +1,134 @@ +--- +sidebar_label: instance_runner +title: gqlalchemy.instance_runner +--- + +#### wait\_for\_port + +```python +def wait_for_port(host: str = "127.0.0.1", port: int = 7687, delay: float = 0.01, timeout: float = 5.0) -> None +``` + +Wait for a TCP port to become available. + +**Arguments**: + +- `host` - A string representing the IP address that is being checked. +- `port` - A string representing the port that is being checked. +- `delay` - A float that defines how long to wait between retries. +- `timeout` - A float that defines how long to wait for the port. + + +**Raises**: + +- `TimeoutError` - Raises an error when the host and port are not accepting + connections after the timeout period has passed. + +#### wait\_for\_docker\_container + +```python +def wait_for_docker_container(container: "docker.Container", delay: float = 0.01, timeout: float = 5.0) -> None +``` + +Wait for a Docker container to enter the status `running`. + +**Arguments**: + +- `container` - The Docker container to wait for. +- `delay` - A float that defines how long to wait between retries. +- `timeout` - A float that defines how long to wait for the status. + + +**Raises**: + +- `TimeoutError` - Raises an error when the container isn't running after the + timeout period has passed. + +## MemgraphInstanceBinary Objects + +```python +class MemgraphInstanceBinary(MemgraphInstance) +``` + +A class for managing Memgraph instances started from binary files on Unix +systems. + +**Attributes**: + +- `binary_path` - A string representing the path to a Memgraph binary + file. +- `user` - A string representing the user that should start the Memgraph + process. + +#### start + +```python +def start(restart: bool = False) -> "Memgraph" +``` + +Start the Memgraph instance and return the connection object. + +**Attributes**: + +- `restart` - A bool indicating if the instance should be + restarted if it's already running. + +#### stop + +```python +def stop() -> int +``` + +Stop the Memgraph instance. + +#### is\_running + +```python +def is_running() -> bool +``` + +Check if the Memgraph instance is still running. + +## MemgraphInstanceDocker Objects + +```python +class MemgraphInstanceDocker(MemgraphInstance) +``` + +A class for managing Memgraph instances started in Docker containers. + +**Attributes**: + +- `docker_image` - An enum representing the Docker image. Values: + `DockerImage.MEMGRAPH` and `DockerImage.MAGE`. +- `docker_image_tag` - A string representing the tag of the Docker image. + +#### start + +```python +def start(restart: bool = False) -> "Memgraph" +``` + +Start the Memgraph instance and return the connection object. + +**Attributes**: + +- `restart` - A bool indicating if the instance should be + restarted if it's already running. + +#### stop + +```python +def stop() -> Dict +``` + +Stop the Memgraph instance. + +#### is\_running + +```python +def is_running() -> bool +``` + +Check if the Memgraph instance is still running. + diff --git a/docs/reference/gqlalchemy/memgraph.md b/docs/reference/gqlalchemy/memgraph.md index a17b7326..a968f7e2 100644 --- a/docs/reference/gqlalchemy/memgraph.md +++ b/docs/reference/gqlalchemy/memgraph.md @@ -97,6 +97,14 @@ def create_stream(stream: MemgraphStream) -> None Create a stream +#### start\_stream + +```python +def start_stream(stream: MemgraphStream) -> None +``` + +Start a stream + #### get\_streams ```python @@ -135,7 +143,7 @@ Creates a trigger def get_triggers() -> List[str] ``` -Creates a trigger +Returns a list of all database triggers #### drop\_trigger @@ -145,6 +153,14 @@ def drop_trigger(trigger) -> None Drop a trigger +#### drop\_triggers + +```python +def drop_triggers() -> None +``` + +Drops all triggers in the database + #### new\_connection ```python @@ -203,6 +219,14 @@ fields from Memgraph and updates it's fields. Otherwise it creates a new node with the same properties. Null properties are ignored. +#### save\_nodes + +```python +def save_nodes(nodes: List[Node]) -> None +``` + +Saves a list of nodes to Memgraph. + #### save\_node\_with\_id ```python @@ -287,6 +311,14 @@ If relationship._id is None, it creates a new relationship. If you want to set a relationship._id instead of creating a new relationship, use `load_relationship` first. +#### save\_relationships + +```python +def save_relationships(relationships: List[Relationship]) -> None +``` + +Saves a list of relationships to Memgraph. + #### save\_relationship\_with\_id ```python diff --git a/docs/reference/gqlalchemy/models.md b/docs/reference/gqlalchemy/models.md index 4511b2a5..c3557204 100644 --- a/docs/reference/gqlalchemy/models.md +++ b/docs/reference/gqlalchemy/models.md @@ -3,35 +3,88 @@ sidebar_label: models title: gqlalchemy.models --- +## TriggerEventType Objects + +```python +class TriggerEventType() +``` + +An enum representing types of trigger events. + +## TriggerEventObject Objects + +```python +class TriggerEventObject() +``` + +An enum representing types of trigger objects. + +NODE -> `()` +RELATIONSHIP -> `-->` + +## TriggerExecutionPhase Objects + +```python +class TriggerExecutionPhase() +``` + +An enum representing types of trigger objects. + +Enum: + BEFORE + AFTER + ## MemgraphKafkaStream Objects ```python -@dataclass(frozen=True, eq=True) class MemgraphKafkaStream(MemgraphStream) ``` +A class for creating and managing Kafka streams in Memgraph. + +**Arguments**: + +- `name` - A string representing the stream name. +- `topics` - A list of strings representing the stream topics. +- `transform` - A string representing the name of the transformation procedure. +- `consumer_group` - A string representing the consumer group. +- `name` - A string representing the batch interval. +- `name` - A string representing the batch size. +- `name` - A string or list of strings representing bootstrap server addresses. + #### to\_cypher ```python def to_cypher() -> str ``` -Converts Kafka stream to a cypher clause. +Converts Kafka stream to a Cypher clause. ## MemgraphPulsarStream Objects ```python -@dataclass(frozen=True, eq=True) class MemgraphPulsarStream(MemgraphStream) ``` +A class for creating and managing Pulsar streams in Memgraph. + +**Arguments**: + +- `name` - A string representing the stream name. +- `topics` - A list of strings representing the stream topics. +- `transform` - A string representing the name of the transformation procedure. +- `consumer_group` - A string representing the consumer group. +- `name` - A string representing the batch interval. +- `name` - A string representing the batch size. +- `name` - A string or list of strings representing bootstrap server addresses. + #### to\_cypher ```python def to_cypher() -> str ``` -Converts Pulsar stream to a cypher clause. +Converts Pulsar stream to a Cypher clause. ## MemgraphTrigger Objects @@ -57,7 +110,7 @@ class GraphObject(BaseModel) #### \_\_init\_subclass\_\_ ```python -def __init_subclass__(cls, type=None, label=None, labels=None) +def __init_subclass__(cls, type=None, label=None, labels=None, index=None, db=None) ``` Stores the subclass by type if type is specified, or by class name diff --git a/docs/reference/gqlalchemy/query_builder.md b/docs/reference/gqlalchemy/query_builder.md index f8fba5cf..f86e2832 100644 --- a/docs/reference/gqlalchemy/query_builder.md +++ b/docs/reference/gqlalchemy/query_builder.md @@ -206,7 +206,17 @@ class DeclarativeBase(ABC) def match(optional: bool = False) -> "DeclarativeBase" ``` -Creates a MATCH statement Cypher partial query. +Obtain data from the database by matching it to a given pattern. + +**Arguments**: + +- `optional` - A bool indicating if missing parts of the pattern will be + filled with null values. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### merge @@ -214,7 +224,13 @@ Creates a MATCH statement Cypher partial query. def merge() -> "DeclarativeBase" ``` -Creates a MERGE statement Cypher partial query. +Ensure that a pattern you are looking for exists in the database. +This means that if the pattern is not found, it will be created. In a +way, this clause is like a combination of MATCH and CREATE. + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### create @@ -222,7 +238,11 @@ Creates a MERGE statement Cypher partial query. def create() -> "DeclarativeBase" ``` -Creates a CREATE statement Cypher partial query. +Create nodes and relationships in a graph. + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### call @@ -230,7 +250,19 @@ Creates a CREATE statement Cypher partial query. def call(procedure: str, arguments: Optional[str] = None) -> "DeclarativeBase" ``` -Creates a CALL statement Cypher partial query. +Call a query module procedure. + +**Arguments**: + +- `procedure` - A string representing the name of the procedure in the + format `query_module.procedure`. +- `arguments` - A string representing the arguments of the procedure in + text format. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### node @@ -238,7 +270,21 @@ Creates a CALL statement Cypher partial query. def node(labels: Union[str, List[str], None] = "", variable: Optional[str] = None, node: Optional["Node"] = None, **kwargs, ,) -> "DeclarativeBase" ``` -Creates a node Cypher partial query. +Add a node pattern to the query. + +**Arguments**: + +- `labels` - A string or list of strings representing the labels of the + node. +- `variable` - A string representing the name of the variable for storing + results of the node pattern. +- `node` - A `Node` object to construct the pattern from. +- `**kwargs` - Arguments representing the properties of the node. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### to @@ -246,7 +292,21 @@ Creates a node Cypher partial query. def to(edge_label: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, **kwargs, ,) -> "DeclarativeBase" ``` -Creates a relationship Cypher partial query with a '->' sign. +Add a relationship pattern to the query. + +**Arguments**: + +- `edge_label` - A string representing the type of the relationship. +- `directed` - A bool indicating if the relationship is directed. +- `variable` - A string representing the name of the variable for storing + results of the relationship pattern. +- `relationship` - A `Relationship` object to construct the pattern from. +- `**kwargs` - Arguments representing the properties of the relationship. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### from\_ @@ -254,12 +314,26 @@ Creates a relationship Cypher partial query with a '->' sign. def from_(edge_label: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, **kwargs, ,) -> "Match" ``` -Creates a relationship Cypher partial query with a '<-' sign. +Add a relationship pattern to the query. + +**Arguments**: + +- `edge_label` - A string representing the type of the relationship. +- `directed` - A bool indicating if the relationship is directed. +- `variable` - A string representing the name of the variable for storing + results of the relationship pattern. +- `relationship` - A `Relationship` object to construct the pattern from. +- `**kwargs` - Arguments representing the properties of the relationship. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### where ```python -def where(property: str, operator: str, value: Any) -> "DeclarativeBase" +def where(item: str, operator: str, value: Any) -> "DeclarativeBase" ``` Creates a WHERE statement Cypher partial query. @@ -267,7 +341,7 @@ Creates a WHERE statement Cypher partial query. #### and\_where ```python -def and_where(property: str, operator: str, value: Any) -> "DeclarativeBase" +def and_where(item: str, operator: str, value: Any) -> "DeclarativeBase" ``` Creates a AND (expression) statement Cypher partial query. @@ -275,18 +349,36 @@ Creates a AND (expression) statement Cypher partial query. #### or\_where ```python -def or_where(property: str, operator: str, value: Any) -> "DeclarativeBase" +def or_where(item: str, operator: str, value: Any) -> "DeclarativeBase" ``` Creates a OR (expression) statement Cypher partial query. +#### xor\_where + +```python +def xor_where(item: str, operator: str, value: Any) -> "DeclarativeBase" +``` + +Creates a XOR (expression) statement Cypher partial query. + #### unwind ```python def unwind(list_expression: str, variable: str) -> "DeclarativeBase" ``` -Creates a UNWIND statement Cypher partial query. +Unwind a list of values as individual rows. + +**Arguments**: + +- `list_expression` - A list of strings representing the list of values. +- `variable` - A string representing the variable name for unwinding results. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### with\_ @@ -294,7 +386,18 @@ Creates a UNWIND statement Cypher partial query. def with_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" ``` -Creates a WITH statement Cypher partial query. +Chain together parts of a query, piping the results from one to be +used as starting points or criteria in the next. + +**Arguments**: + +- `results` - A dictionary mapping variables in the first query with + aliases in the second query. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### union @@ -302,7 +405,17 @@ Creates a WITH statement Cypher partial query. def union(include_duplicates: Optional[bool] = True) -> "DeclarativeBase" ``` -Creates a UNION statement Cypher partial query. +Combine the result of multiple queries. + +**Arguments**: + +- `include_duplicates` - A bool indicating if duplicates should be + included. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### delete @@ -310,7 +423,19 @@ Creates a UNION statement Cypher partial query. def delete(variable_expressions: List[str], detach: Optional[bool] = False) -> "DeclarativeBase" ``` -Creates a DELETE statement Cypher partial query. +Delete nodes and relationships from the database. + +**Arguments**: + +- `variable_expressions` - A list of strings indicating which nodes + and/or relationships should be removed. +- `detach` - A bool indicating if relationships should be deleted along + with a node. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### remove @@ -318,7 +443,17 @@ Creates a DELETE statement Cypher partial query. def remove(items: List[str]) -> "DeclarativeBase" ``` -Creates a REMOVE statement Cypher partial query. +Remove labels and properties from nodes and relationships. + +**Arguments**: + +- `items` - A list of strings indicating which labels and/or properties + should be removed. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### yield\_ @@ -326,7 +461,17 @@ Creates a REMOVE statement Cypher partial query. def yield_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" ``` -Creates a YIELD statement Cypher partial query. +Yield data from the query. + +**Arguments**: + +- `results` - A dictionary mapping items that are returned with alias + names. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### return\_ @@ -334,7 +479,17 @@ Creates a YIELD statement Cypher partial query. def return_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" ``` -Creates a RETURN statement Cypher partial query. +Return data from the query. + +**Arguments**: + +- `results` - A dictionary mapping items that are returned with alias + names. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### order\_by @@ -342,7 +497,16 @@ Creates a RETURN statement Cypher partial query. def order_by(properties: str) -> "DeclarativeBase" ``` -Creates a ORDER BY statement Cypher partial query. +Order the results of the query. + +**Arguments**: + +- `properties` - A string representing how to order the results. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### limit @@ -350,7 +514,17 @@ Creates a ORDER BY statement Cypher partial query. def limit(integer_expression: str) -> "DeclarativeBase" ``` -Creates a LIMIT statement Cypher partial query. +Limit the number of records when returning results. + +**Arguments**: + +- `integer_expression` - An integer indicating how many records to limit + the results to. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### skip @@ -358,7 +532,55 @@ Creates a LIMIT statement Cypher partial query. def skip(integer_expression: str) -> "DeclarativeBase" ``` -Creates a SKIP statement Cypher partial query. +Skip a number of records when returning results. + +**Arguments**: + +- `integer_expression` - An integer indicating how many records to skip + in the results. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + +#### add\_custom\_cypher + +```python +def add_custom_cypher(custom_cypher: str) -> "DeclarativeBase" +``` + +Inject custom Cypher code into the query. + +**Arguments**: + +- `custom_cypher` - A string representing the Cypher code to be injected + into the query. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + +#### load\_csv + +```python +def load_csv(path: str, header: bool, row: str) -> "DeclarativeBase" +``` + +Load data from a CSV file by executing a Cypher query for each row. + +**Arguments**: + +- `path` - A string representing the path to the CSV file. +- `header` - A bool indicating if the CSV file starts with a header row. +- `row` - A string representing the name of the variable for iterating + over each row. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. #### get\_single @@ -368,11 +590,24 @@ def get_single(retrieve: str) -> Any Returns a single result with a `retrieve` variable name. +**Arguments**: + +- `retrieve` - A string representing the results variable to be returned. + + +**Returns**: + + An iterator of dictionaries containing the results of the query. + #### execute ```python def execute() -> Iterator[Dict[str, Any]] ``` -Executes the Cypher query. +Executes the Cypher query and returns the results. + +**Returns**: + + An iterator of dictionaries containing the results of the query. diff --git a/docs/reference/gqlalchemy/transformations.md b/docs/reference/gqlalchemy/transformations.md index 2119fd97..b8067a0f 100644 --- a/docs/reference/gqlalchemy/transformations.md +++ b/docs/reference/gqlalchemy/transformations.md @@ -9,7 +9,7 @@ title: gqlalchemy.transformations def nx_to_cypher(graph: nx.Graph, config: NetworkXCypherConfig = None) -> Iterator[str] ``` -Generates a Cypher queries for creating graph. +Generates a Cypher query for creating a graph. #### nx\_graph\_to\_memgraph\_parallel @@ -17,8 +17,7 @@ Generates a Cypher queries for creating graph. def nx_graph_to_memgraph_parallel(graph: nx.Graph, host: str = "127.0.0.1", port: int = 7687, username: str = "", password: str = "", encrypted: bool = False, config: NetworkXCypherConfig = None) -> None ``` -Generates a Cypher queries and inserts data into Memgraph in parallel. - +Generates Cypher queries and inserts data into Memgraph in parallel. ## NetworkXCypherBuilder Objects @@ -32,8 +31,7 @@ class NetworkXCypherBuilder() def yield_queries(graph: nx.Graph) -> Iterator[str] ``` -Generates a Cypher queries for creating graph. - +Generates Cypher queries for creating a graph. #### yield\_query\_groups @@ -41,5 +39,5 @@ Generates a Cypher queries for creating graph. def yield_query_groups(graph: nx.Graph) -> List[Iterator[str]] ``` -Generates a Cypher queries for creating graph by query groups. +Generates Cypher queries for creating a graph by query groups. diff --git a/docs/reference/gqlalchemy/utilities.md b/docs/reference/gqlalchemy/utilities.md index aa92a2d7..87ec246d 100644 --- a/docs/reference/gqlalchemy/utilities.md +++ b/docs/reference/gqlalchemy/utilities.md @@ -8,6 +8,7 @@ title: gqlalchemy.utilities ```python def to_cypher_value(value: Any, config: NetworkXCypherConfig = None) -> str ``` + Converts value to a valid Cypher type. #### to\_cypher\_properties @@ -24,4 +25,5 @@ Converts properties to a Cypher key-value properties. def to_cypher_labels(labels: Union[str, List[str], None]) -> str ``` -Converts labels to a Cypher label definition. \ No newline at end of file +Converts labels to a Cypher label definition. + diff --git a/docs/reference/sidebar.json b/docs/reference/sidebar.json index a9954aae..8d75be5f 100644 --- a/docs/reference/sidebar.json +++ b/docs/reference/sidebar.json @@ -2,8 +2,8 @@ "items": [ { "items": [ - "reference/gqlalchemy/connection", "reference/gqlalchemy/disk_storage", + "reference/gqlalchemy/instance_runner", "reference/gqlalchemy/memgraph", "reference/gqlalchemy/models", "reference/gqlalchemy/query_builder", diff --git a/gqlalchemy/__init__.py b/gqlalchemy/__init__.py index bde6deba..172e2a03 100644 --- a/gqlalchemy/__init__.py +++ b/gqlalchemy/__init__.py @@ -36,9 +36,16 @@ Unwind, With, ) +from .instance_runner import ( # noqa F401 + DockerImage, + MemgraphInstanceBinary, + MemgraphInstanceDocker, + wait_for_docker_container, + wait_for_port, +) from .exceptions import GQLAlchemyWarning, GQLAlchemyError # noqa F401 -from pydantic import Field # noqa F401 +from pydantic import Field, validator # noqa F401 import warnings warnings.filterwarnings("once", category=GQLAlchemyWarning) diff --git a/gqlalchemy/disk_storage.py b/gqlalchemy/disk_storage.py index 30af2f20..27eeb5db 100644 --- a/gqlalchemy/disk_storage.py +++ b/gqlalchemy/disk_storage.py @@ -20,6 +20,8 @@ class OnDiskPropertyDatabase(ABC): + """An abstract class for implementing on-disk storage features with specific databases.""" + def save_node_property(self, node_id: int, property_name: str, property_value: str) -> None: """Saves a node property to an on disk database.""" pass @@ -60,7 +62,14 @@ def __init__(self, database_path: str, memgraph: "Memgraph" = None): # noqa F82 memgraph.init_disk_storage(self) def execute_query(self, query: str) -> List[str]: - """Executes an SQL query on the on disk property database.""" + """Executes an SQL query on the on disk property database. + + Args: + query: A string representing an SQL query. + + Returns: + A list of strings representing the results of the query. + """ with contextlib.closing(sqlite3.connect(self.database_name)) as conn: with conn: # autocommit changes with contextlib.closing(conn.cursor()) as cursor: @@ -95,7 +104,13 @@ def drop_database(self) -> None: self.execute_query("DELETE FROM relationship_properties;") def save_node_property(self, node_id: int, property_name: str, property_value: str) -> None: - """Saves a node property to an on disk database.""" + """Saves a node property to an on disk database. + + Args: + node_id: An integer representing the internal id of the node. + property_name: A string representing the name of the property. + property_value: A string representing the value of the property. + """ self.execute_query( "INSERT INTO node_properties (node_id, property_name, property_value) " f"VALUES({node_id}, '{property_name}', '{property_value}') " @@ -104,7 +119,15 @@ def save_node_property(self, node_id: int, property_name: str, property_value: s ) def load_node_property(self, node_id: int, property_name: str) -> Optional[str]: - """Loads a node property from an on disk database.""" + """Loads a node property from an on disk database. + + Args: + node_id: An integer representing the internal id of the node. + property_name: A string representing the name of the property. + + Returns: + An optional string representing the property value. + """ result = self.execute_query( "SELECT property_value " "FROM node_properties AS db " @@ -121,7 +144,12 @@ def load_node_property(self, node_id: int, property_name: str) -> Optional[str]: return result[0][0] def delete_node_property(self, node_id: int, property_name: str) -> None: - """Deletes a node property from an on disk database.""" + """Deletes a node property from an on disk database. + + Args: + node_id: An integer representing the internal id of the node. + property_name: A string representing the name of the property. + """ self.execute_query( "DELETE " "FROM node_properties AS db " @@ -130,7 +158,13 @@ def delete_node_property(self, node_id: int, property_name: str) -> None: ) def save_relationship_property(self, relationship_id: int, property_name: str, property_value: str) -> None: - """Saves a relationship property to an on disk database.""" + """Saves a relationship property to an on disk database. + + Args: + relationship_id: An integer representing the internal id of the relationship. + property_name: A string representing the name of the property. + property_value: A string representing the value of the property. + """ self.execute_query( "INSERT INTO relationship_properties (relationship_id, property_name, property_value) " f"VALUES({relationship_id}, '{property_name}', '{property_value}') " @@ -139,7 +173,15 @@ def save_relationship_property(self, relationship_id: int, property_name: str, p ) def load_relationship_property(self, relationship_id: int, property_name: str) -> Optional[str]: - """Loads a relationship property from an on disk database.""" + """Loads a relationship property from an on disk database. + + Args: + relationship_id: An integer representing the internal id of the relationship. + property_name: A string representing the name of the property. + + Returns: + An optional string representing the property value. + """ result = self.execute_query( "SELECT property_value " "FROM relationship_properties AS db " @@ -156,7 +198,12 @@ def load_relationship_property(self, relationship_id: int, property_name: str) - return result[0][0] def delete_relationship_property(self, relationship_id: int, property_name: str) -> None: - """Deletes a node property from an on disk database.""" + """Deletes a node property from an on disk database. + + Args: + relationship_id: An integer representing the internal id of the relationship. + property_name: A string representing the name of the property. + """ self.execute_query( "DELETE " "FROM relationship_properties AS db " diff --git a/gqlalchemy/exceptions.py b/gqlalchemy/exceptions.py index 01d6bc06..cab8fa26 100644 --- a/gqlalchemy/exceptions.py +++ b/gqlalchemy/exceptions.py @@ -18,6 +18,12 @@ {field}: {field_type} = Field({constraint}=True, db=Memgraph()) """ +DATABASE_MISSING_IN_NODE_CLASS_ERROR_MESSAGE = """ +Can't have an index on a label without providing the database `db` object. +Define your class as: + {cls.__name__}(Node, index=True, db=Memgraph()) +""" + SUBCLASS_NOT_FOUND_WARNING = """ GraphObject subclass(es) '{types}' not found. '{cls.__name__}' will be used until you create a subclass. @@ -34,6 +40,23 @@ SQLitePropertyDatabase("path-to-sqlite-db", db) """ +MISSING_ORDER = """ +The second argument of the tuple must be order: ASC, ASCENDING, DESC or DESCENDING. +""" + +ORDER_BY_TYPE_ERROR = """ +TypeError: The argument provided is of wrong type. Please provide str, tuple[str, str] or list[tuple[str, str]]. +""" + +LITERAL_AND_EXPRESSION_MISSING_IN_WHERE = """ +Can't create WHERE query without providing either 'literal' or 'expression' keyword arguments, that can be literals, labels or properties. +""" + +EXTRA_KEYWORD_ARGUMENTS_IN_WHERE = """ +Can't create WHERE query with extra keyword arguments: +Please provide a value to either 'literal' or 'expression' keyword arguments." +""" + class GQLAlchemyWarning(Warning): pass @@ -62,7 +85,37 @@ def __init__(self, constraint: str, field: str, field_type: str): ) +class GQLAlchemyDatabaseMissingInNodeClassError(GQLAlchemyError): + def __init__(self, cls): + super().__init__() + self.message = DATABASE_MISSING_IN_NODE_CLASS_ERROR_MESSAGE.format(cls=cls) + + class GQLAlchemyOnDiskPropertyDatabaseNotDefinedError(GQLAlchemyError): def __init__(self): super().__init__() self.message = ON_DISK_PROPERTY_DATABASE_NOT_DEFINED_ERROR + + +class GQLAlchemyMissingOrder(GQLAlchemyError): + def __init__(self): + super().__init__() + self.message = MISSING_ORDER + + +class GQLAlchemyOrderByTypeError(TypeError): + def __init__(self): + super().__init__() + self.message = ORDER_BY_TYPE_ERROR + + +class GQLAlchemyLiteralAndExpressionMissingInWhere(GQLAlchemyError): + def __init__(self): + super().__init__() + self.message = LITERAL_AND_EXPRESSION_MISSING_IN_WHERE + + +class GQLAlchemyExtraKeywordArgumentsInWhere(GQLAlchemyError): + def __init__(self): + super().__init__() + self.message = EXTRA_KEYWORD_ARGUMENTS_IN_WHERE diff --git a/gqlalchemy/instance_runner.py b/gqlalchemy/instance_runner.py new file mode 100644 index 00000000..ed447658 --- /dev/null +++ b/gqlalchemy/instance_runner.py @@ -0,0 +1,285 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import docker +import os +import psutil +import socket +import subprocess +import time +from abc import ABC, abstractmethod +from enum import Enum +from typing import Any, Dict, Union +from .memgraph import Memgraph + + +MEMGRAPH_DEFAULT_BINARY_PATH = "/usr/lib/memgraph/memgraph" +MEMGRAPH_DEFAULT_PORT = 7687 +MEMGRAPH_CONFIG_BOLT_PORT = "--bolt-port" +MEMGRAPH_CONFIG_BOLT_ADDRESS = "--bolt-address" +DOCKER_IMAGE_TAG_LATEST = "latest" +LOOPBACK_ADDRESS = "127.0.0.1" +WILDCARD_ADDRESS = "0.0.0.0" + +TIMEOUT_ERROR_MESSAGE = "Waited too long for the port {port} on host {host} to start accepting connections." +DOCKER_TIMEOUT_ERROR_MESSAGE = "Waited too long for the Docker container to start." +MEMGRAPH_CONNECTION_ERROR_MESSAGE = "The Memgraph process probably died." + + +class DockerImage(Enum): + MEMGRAPH = "memgraph/memgraph" + MAGE = "memgraph/memgraph-mage" + + +class DockerContainerStatus(Enum): + EXITED = "exited" + PAUSED = "paused" + RESTARTING = "restarting" + RUNNING = "running" + + +def wait_for_port( + host: str = LOOPBACK_ADDRESS, port: int = MEMGRAPH_DEFAULT_PORT, delay: float = 0.01, timeout: float = 5.0 +) -> None: + """Wait for a TCP port to become available. + + Args: + host: A string representing the IP address that is being checked. + port: A string representing the port that is being checked. + delay: A float that defines how long to wait between retries. + timeout: A float that defines how long to wait for the port. + + Raises: + TimeoutError: Raises an error when the host and port are not accepting + connections after the timeout period has passed. + """ + start_time = time.perf_counter() + time.sleep(delay) + while True: + try: + with socket.create_connection((host, port), timeout=timeout): + break + except OSError as ex: + time.sleep(delay) + if time.perf_counter() - start_time >= timeout: + raise TimeoutError(TIMEOUT_ERROR_MESSAGE.format(port=port, host=host)) from ex + + delay *= 2 + + +def wait_for_docker_container(container: "docker.Container", delay: float = 0.01, timeout: float = 5.0) -> None: + """Wait for a Docker container to enter the status `running`. + + Args: + container: The Docker container to wait for. + delay: A float that defines how long to wait between retries. + timeout: A float that defines how long to wait for the status. + + Raises: + TimeoutError: Raises an error when the container isn't running after the + timeout period has passed. + """ + start_time = time.perf_counter() + time.sleep(delay) + container.reload() + while container.status != DockerContainerStatus.RUNNING.value: + time.sleep(delay) + if time.perf_counter() - start_time >= timeout: + raise TimeoutError(DOCKER_TIMEOUT_ERROR_MESSAGE) + + container.reload() + delay *= 2 + + +class MemgraphInstance(ABC): + def __init__( + self, + host: str = WILDCARD_ADDRESS, + port: int = MEMGRAPH_DEFAULT_PORT, + config: Dict[str, Union[str, int, bool]] = dict(), + ) -> None: + self.host = host + self.port = port + self.config = config + self.proc_mg = None + self.config[MEMGRAPH_CONFIG_BOLT_PORT] = self.port + self.config[MEMGRAPH_CONFIG_BOLT_ADDRESS] = self.host + + def set_config(self, config: Dict[str, Union[str, int, bool]]) -> None: + self.config.update(config) + + def connect(self) -> "Memgraph": + self.memgraph = Memgraph(self.host, self.port) + if not self.is_running(): + raise ConnectionError(MEMGRAPH_CONNECTION_ERROR_MESSAGE) + + return self.memgraph + + @abstractmethod + def start(self, restart: bool = False) -> None: + pass + + @abstractmethod + def start_and_connect(self, restart: bool = False) -> "Memgraph": + pass + + @abstractmethod + def stop(self) -> Any: + pass + + @abstractmethod + def is_running(self) -> bool: + pass + + +class MemgraphInstanceBinary(MemgraphInstance): + """A class for managing Memgraph instances started from binary files on Unix + systems. + + Attributes: + binary_path: A string representing the path to a Memgraph binary + file. + user: A string representing the user that should start the Memgraph + process. + """ + + def __init__(self, binary_path: str = MEMGRAPH_DEFAULT_BINARY_PATH, user: str = "", **data) -> None: + super().__init__(**data) + self.binary_path = binary_path + self.user = user + + def start(self, restart: bool = False) -> None: + """Start the Memgraph instance from a binary file. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + if not restart and self.is_running(): + return + + self.stop() + args_mg = f"{self.binary_path } " + (" ").join([f"{k}={v}" for k, v in self.config.items()]) + if self.user != "": + args_mg = f"sudo runuser -l {self.user} -c '{args_mg}'" + + self.proc_mg = subprocess.Popen(args_mg, shell=True) + wait_for_port(self.host, self.port) + + def start_and_connect(self, restart: bool = False) -> "Memgraph": + """Start the Memgraph instance from a binary file and return the + connection object. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + self.start(restart=restart) + + return self.connect() + + def stop(self) -> None: + """Stop the Memgraph instance.""" + if not self.is_running(): + return + + procs = set() + process = psutil.Process(self.proc_mg.pid) + procs.add(process) + for proc in process.children(recursive=True): + procs.add(proc) + os.system(f"sudo kill {proc.pid}") + + process.kill() + psutil.wait_procs(procs) + + def is_running(self) -> bool: + """Check if the Memgraph instance is still running.""" + if self.proc_mg is None: + return False + + if self.proc_mg.poll() is not None: + return False + + return True + + +class MemgraphInstanceDocker(MemgraphInstance): + """A class for managing Memgraph instances started in Docker containers. + + Attributes: + docker_image: An enum representing the Docker image. Values: + `DockerImage.MEMGRAPH` and `DockerImage.MAGE`. + docker_image_tag: A string representing the tag of the Docker image. + """ + + def __init__( + self, docker_image: DockerImage = DockerImage.MEMGRAPH, docker_image_tag: str = DOCKER_IMAGE_TAG_LATEST, **data + ) -> None: + super().__init__(**data) + self.docker_image = docker_image + self.docker_image_tag = docker_image_tag + self._client = docker.from_env() + self._container = None + + def start(self, restart: bool = False) -> None: + """Start the Memgraph instance in a Docker container. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + if not restart and self.is_running(): + return + + self.stop() + self._container = self._client.containers.run( + image=f"{self.docker_image.value}:{self.docker_image_tag}", + command=f"{MEMGRAPH_DEFAULT_BINARY_PATH} {(' ').join([f'{k}={v}' for k, v in self.config.items()])}", + detach=True, + ports={f"{self.port}/tcp": self.port}, + ) + wait_for_docker_container(self._container, delay=1) + + def start_and_connect(self, restart: bool = False) -> "Memgraph": + """Start the Memgraph instance in a Docker container and return the + connection object. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + self.start(restart=restart) + + return self.connect() + + def stop(self) -> Dict: + """Stop the Memgraph instance.""" + if not self.is_running(): + return + + self._container.stop() + + return self._container.wait() + + def is_running(self) -> bool: + """Check if the Memgraph instance is still running.""" + if self._container is None: + return False + + self._container.reload() + if self._container.status == DockerContainerStatus.RUNNING.value: + return True + + return False diff --git a/gqlalchemy/loaders.py b/gqlalchemy/loaders.py new file mode 100644 index 00000000..2b2ac70e --- /dev/null +++ b/gqlalchemy/loaders.py @@ -0,0 +1,1028 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from string import Template + +from . import Memgraph +from .query_builder import QueryBuilder, Unwind +from .models import ( + MemgraphIndex, + MemgraphTrigger, + TriggerEventObject, + TriggerEventType, + TriggerExecutionPhase, +) + +from abc import ABC, abstractmethod +from enum import Enum +from dataclasses import dataclass, field +from dacite import from_dict +from pyarrow import fs +from typing import List, Dict, Any, Optional, Union +import pyarrow.dataset as ds +import adlfs +import platform + +NAME_MAPPINGS_KEY = "name_mappings" +ONE_TO_MANY_RELATIONS_KEY = "one_to_many_relations" +INDICES_KEY = "indices" +MANY_TO_MANY_RELATIONS_KEY = "many_to_many_relations" + +FROM_NODE_VARIABLE_NAME = "from_node" +TO_NODE_VARIABLE_NAME = "to_node" + +NODE_A = "a" +NODE_B = "b" + +PARQUET_EXTENSION = "parquet" +CSV_EXTENSION = "csv" +ORC_EXTENSION = "orc" +IPC_EXTENSION = "ipc" +FEATHER_EXTENSION = "feather" +ARROW_EXTENSION = "arrow" + +BLOB_ACCOUNT_NAME = "blob_account_name" +BLOB_ACCOUNT_KEY = "blob_account_key" +BLOB_SAS_TOKEN = "blob_sas_token" +BLOB_CONTAINER_NAME_KEY = "container_name" + +S3_REGION = "s3_region" +S3_ACCESS_KEY = "s3_access_key" +S3_SECRET_KEY = "s3_secret_key" +S3_SESSION_TOKEN = "s3_session_token" +S3_BUCKET_NAME_KEY = "bucket_name" + +LOCAL_STORAGE_PATH = "local_storage_path" + + +@dataclass(frozen=True) +class ForeignKeyMapping: + """Class that contains the full description of a single foreign key in a table. + + Attributes: + column_name: Column name that holds the foreign key. + reference_table: Name of a table from which the foreign key is taken. + reference_key: Column name in the referenced table from which the foreign key is taken. + """ + + column_name: str + reference_table: str + reference_key: str + + +@dataclass(frozen=True) +class OneToManyMapping: + """Class that holds the full description of a single one to many mapping in a table. + + Attributes: + foreign_key: Foreign key used for mapping. + label: Label which will be applied to the relationship created from this object. + from_entity: Direction of the relationship created from the mapping object. + parameters: Parameters that will be added to the relationship created from this object (Optional). + """ + + foreign_key: ForeignKeyMapping + label: str + from_entity: bool = False + parameters: Optional[Dict[str, str]] = None + + +@dataclass(frozen=True) +class ManyToManyMapping: + """Class that holds the full description of a single many to many mapping in a table. + Many to many mapping is intended to be used in case of associative tables. + + Attributes: + foreign_key_from: Describes the source of the relationship. + foreign_key_to: Describes the destination of the relationship. + label: Label to be applied to the newly created relationship. + parameters: Parameters that will be added to the relationship created from this object (Optional). + """ + + foreign_key_from: ForeignKeyMapping + foreign_key_to: ForeignKeyMapping + label: str + parameters: Optional[Dict[str, str]] = None + + +Mapping = Union[List[OneToManyMapping], ManyToManyMapping] + + +@dataclass +class TableMapping: + """Class that holds the full description of all of the mappings for a single table. + + Attributes: + table_name: Name of the table. + mapping: All of the mappings in the table (Optional). + indices: List of the indices to be created for this table (Optional). + """ + + table_name: str + mapping: Optional[Mapping] = None + indices: Optional[List[str]] = None + + +@dataclass(frozen=True) +class NameMappings: + """Class that contains new label name and all of the column name mappings for a single table. + + Attributes: + label: New label (Optional). + column_names_mapping: Dictionary containing key-value pairs in form ("column name", "property name") (Optional). + """ + + label: Optional[str] = None + column_names_mapping: Dict[str, str] = field(default_factory=dict) + + def get_property_name(self, column_name: str): + return self.column_names_mapping.get(column_name, column_name) + + +class NameMapper: + """Class that holds all name mappings for all of the collections.""" + + def __init__(self, mappings: Dict[str, Any]) -> None: + self._name_mappings: Dict[str, NameMappings] = {k: NameMappings(**v) for k, v in mappings.items()} + + def get_label(self, collection_name: str) -> str: + """Returns label for given collection. + + Args: + collection_name: Original collection name. + """ + label = self._name_mappings[collection_name].label + + return label if label is not None else collection_name + + def get_property_name(self, collection_name: str, column_name: str) -> str: + """Returns property name for column from collection. + + Args: + collection_name: Original collection name. + column_name: Original column name. + """ + return self._name_mappings[collection_name].get_property_name(column_name=column_name) + + +class FileSystemHandler(ABC): + """Abstract class for defining FileSystemHandler. + + Inherit this class, define a custom data source and initialize the + connection. + """ + + def __init__(self, fs: Any) -> None: + super().__init__() + self._fs = fs + + @property + def fs(self): + return self._fs + + @abstractmethod + def get_path(self, collection_name: str) -> str: + """Returns complete path in specific file system. Used to read the file system + for a specific file. + """ + pass + + +class S3FileSystemHandler(FileSystemHandler): + """Handles connection to Amazon S3 service via PyArrow.""" + + def __init__(self, bucket_name: str, **kwargs): + """Initializes connection and data bucket. + + Args: + bucket_name: Name of the bucket on S3 from which to read the data + + Kwargs: + s3_access_key: S3 access key. + s3_secret_key: S3 secret key. + s3_region: S3 region. + s3_session_token: S3 session token (Optional). + + Raises: + KeyError: kwargs doesn't contain necessary fields. + """ + if S3_ACCESS_KEY not in kwargs: + raise KeyError(f"{S3_ACCESS_KEY} is needed to connect to S3 storage") + if S3_SECRET_KEY not in kwargs: + raise KeyError(f"{S3_SECRET_KEY} is needed to connect to S3 storage") + + super().__init__(fs=fs.S3FileSystem(**kwargs)) + self._bucket_name = bucket_name + + def get_path(self, collection_name: str) -> str: + """Get file path in file system. + + Args: + collection_name: Name of the file to read. + """ + return f"{self._bucket_name}/{collection_name}" + + +class AzureBlobFileSystemHandler(FileSystemHandler): + """Handles connection to Azure Blob service via adlfs package.""" + + def __init__(self, container_name: str, **kwargs) -> None: + """Initializes connection and data container. + + Args: + container_name: Name of the Blob container storing data. + + Kwargs: + blob_account_name: Account name from Azure Blob. + blob_account_key: Account key for Azure Blob (Optional - if using sas_token). + blob_sas_token: Shared access signature token for authentification (Optional). + + Raises: + KeyError: kwargs doesn't contain necessary fields. + """ + if BLOB_ACCOUNT_KEY not in kwargs and BLOB_SAS_TOKEN not in kwargs: + raise KeyError(f"{BLOB_ACCOUNT_KEY} or {BLOB_SAS_TOKEN} is needed to connect to Blob storage") + if BLOB_ACCOUNT_NAME not in kwargs: + raise KeyError(f"{BLOB_ACCOUNT_NAME} is needed to connect to Blob storage") + + super().__init__(fs=adlfs.AzureBlobFileSystem(**kwargs)) + self._container_name = container_name + + def get_path(self, collection_name: str) -> str: + """Get file path in file system. + + Args: + collection_name: Name of the file to read. + """ + return f"{self._container_name}/{collection_name}" + + +class LocalFileSystemHandler(FileSystemHandler): + """Handles a local filesystem.""" + + def __init__(self, path: str) -> None: + """Initializes an fsspec local file system and sets path to data. + + Args: + path: path to the local storage location. + """ + super().__init__(fs=fs.LocalFileSystem()) + self._path = path + + def get_path(self, collection_name: str) -> str: + """Get file path in the local file system. + + Args: + collection_name: Name of the file to read. + """ + return f"{self._path}/{collection_name}" + + +class DataLoader(ABC): + """Implements loading of a data type from file system service to TableToGraphImporter.""" + + def __init__(self, file_extension: str, file_system_handler: FileSystemHandler) -> None: + """ + Args: + file_extension: File format to be read. + file_system_handler: Object for handling of the file system service. + """ + super().__init__() + self._file_extension = file_extension + self._file_system_handler = file_system_handler + + @abstractmethod + def load_data(self, collection_name: str, is_cross_table: bool = False) -> None: + """Override this method in the derived class. Intended to be used for reading data from data format. + + Args: + collection_name: Name of the file to read. + is_cross_table: Indicate whether or not the collection contains associative table (default=False). + + Raises: + NotImplementedError: The method is not implemented in the extended class. + """ + raise NotImplementedError("Subclasses must override load_data() for use in TableToGraphImporter") + + +class PyArrowFileTypeEnum(Enum): + """Enumerates file types supported by PyArrow""" + + Default = 1 + Parquet = 2 + CSV = 3 + ORC = 4 + Feather = 5 + + +class PyArrowDataLoader(DataLoader): + """Loads data using PyArrow. + + PyArrow currently supports "parquet", "ipc"/"arrow"/"feather", "csv", + and "orc", see pyarrow.dataset.dataset for up-to-date info. + ds.dataset in load_data accepts any fsspec subclass, making this DataLoader + compatible with fsspec-compatible filesystems. + """ + + def __init__( + self, + file_extension_enum: PyArrowFileTypeEnum, + file_system_handler: FileSystemHandler, + ) -> None: + """ + Args: + file_extension_enum: The file format to be read. + file_system_handler: Object for handling of the file system service. + """ + super().__init__(file_extension=file_extension_enum.name.lower(), file_system_handler=file_system_handler) + + def load_data( + self, collection_name: str, is_cross_table: bool = False, columns: Optional[List[str]] = None + ) -> None: + """Generator for loading data. + + Args: + collection_name: Name of the file to read. + is_cross_table: Flag signifying whether it is a cross table. + columns: Table columns to read. + """ + source = self._file_system_handler.get_path(f"{collection_name}.{self._file_extension}") + print("Loading data from " + ("cross " if is_cross_table else "") + f"table {source}...") + + dataset = ds.dataset(source=source, format=self._file_extension, filesystem=self._file_system_handler.fs) + + for batch in dataset.to_batches( + columns=columns, + ): + for batch_item in batch.to_pylist(): + yield batch_item + + print("Data loaded.") + + +class TableToGraphImporter: + """Implements translation of table data to graph data, and imports it to Memgraph.""" + + _DIRECTION = { + True: (NODE_A, NODE_B), + False: (NODE_B, NODE_A), + } + + _TriggerQueryTemplate = Template( + Unwind(list_expression="createdVertices", variable="$node_a") + .with_(results={"$node_a": ""}) + .where(item="$node_a:$label_2", operator="MATCH", expression="($node_b:$label_1)") + .where(item="$node_b.$property_1", operator="=", expression="$node_a.$property_2") + .create() + .node(variable="$from_node") + .to(edge_label="$edge_type") + .node(variable="$to_node") + .construct_query() + ) + + @staticmethod + def _create_trigger_cypher_query( + label1: str, label2: str, property1: str, property2: str, edge_type: str, from_entity: bool + ) -> str: + """Creates a Cypher query for the translation trigger. + + Args: + label1: Label of the first node. + label2: Label of the second node. + property1: Property of the first node. + property2: Property of the second node. + edge_type: Label for the relationship that the trigger creates. + from_entity: Indicate whether the relationship goes from or to the first entity. + """ + from_node, to_node = TableToGraphImporter._DIRECTION[from_entity] + + return TableToGraphImporter._TriggerQueryTemplate.substitute( + node_a=NODE_A, + node_b=NODE_B, + label_1=label1, + label_2=label2, + property_1=property1, + property_2=property2, + from_node=from_node, + to_node=to_node, + edge_type=edge_type, + ) + + def __init__( + self, + data_loader: DataLoader, + data_configuration: Dict[str, Any], + memgraph: Optional[Memgraph] = None, + ) -> None: + """ + Args: + data_loader: Object for loading data. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + """ + self._data_loader: DataLoader = data_loader + self._memgraph: Memgraph = memgraph if memgraph is not None else Memgraph() + + self.__load_configuration(data_configuration=data_configuration) + + def translate(self, drop_database_on_start: bool = True) -> None: + """Performs the translations. + + Args: + drop_database_on_start: Indicate whether or not the database should be dropped prior to the start of the translations. + """ + if drop_database_on_start: + self._memgraph.drop_database() + self._memgraph.drop_indexes() + self._memgraph.drop_triggers() + + self._create_indexes() + self._create_triggers() + + self._load_nodes() + self._load_cross_relationships() + + def _load_nodes(self) -> None: + """Reads all of the data from the single table in the data source, translates it, and writes it to Memgraph.""" + for one_to_many_mapping in self._one_to_many_mappings: + collection_name = one_to_many_mapping.table_name + for row in self._data_loader.load_data(collection_name=collection_name): + self._save_row_as_node(label=collection_name, row=row) + + def _load_cross_relationships(self) -> None: + """Reads all of the data from the single associative table in the data source, translates it, and writes it to Memgraph.""" + for many_to_many_mapping in self._many_to_many_mappings: + mapping_from = many_to_many_mapping.mapping.foreign_key_from + mapping_to = many_to_many_mapping.mapping.foreign_key_to + + for row in self._data_loader.load_data( + collection_name=many_to_many_mapping.table_name, is_cross_table=True + ): + self._save_row_as_relationship( + collection_name_from=mapping_from.reference_table, + collection_name_to=mapping_to.reference_table, + property_from=mapping_from.reference_key, + property_to=mapping_to.reference_key, + relation_label=many_to_many_mapping.mapping.label, + row=row, + ) + + def _create_triggers(self) -> None: + """Creates all of the Triggers in Memgraph. + + Triggers are used as a part of speeding up the translation. Since nodes + and relationships are written in one go, foreign keys that are represented + as relationships might not yet be present in memgraph. When they do appear, + triggers make sure to create the relationship at that point in time, rather + than having hanging relationship. + """ + for one_to_many_mapping in self._one_to_many_mappings: + label1 = self._name_mapper.get_label(collection_name=one_to_many_mapping.table_name) + for mapping in one_to_many_mapping.mapping: + property1 = self._name_mapper.get_property_name( + collection_name=one_to_many_mapping.table_name, column_name=mapping.foreign_key.column_name + ) + label2 = self._name_mapper.get_label(collection_name=mapping.foreign_key.reference_table) + property2 = self._name_mapper.get_property_name( + collection_name=one_to_many_mapping.table_name, column_name=mapping.foreign_key.reference_key + ) + edge_type = mapping.label + from_entity = mapping.from_entity + + self._create_trigger( + label1=label1, + label2=label2, + property1=property1, + property2=property2, + edge_type=edge_type, + from_entity=from_entity, + ) + self._create_trigger( + label1=label2, + label2=label1, + property1=property2, + property2=property1, + edge_type=edge_type, + from_entity=not from_entity, + ) + + def _create_trigger( + self, label1: str, label2: str, property1: str, property2: str, edge_type: str, from_entity: bool + ) -> None: + """Creates a translation trigger in Memgraph. + + Args: + label1: Label of the first node. + label2: Label of the second node. + property1: Property of the first node. + property2: Property of the second node. + edge_type: Label for the relationship that the trigger creates. + from_entity: Indicate whether the relationship goes from or to the first entity. + """ + trigger_name = "__".join([label1, property1, label2, property2]) + + trigger = MemgraphTrigger( + name=trigger_name, + event_type=TriggerEventType.CREATE, + event_object=TriggerEventObject.NODE, + execution_phase=TriggerExecutionPhase.BEFORE, + statement=TableToGraphImporter._create_trigger_cypher_query( + label1, label2, property1, property2, edge_type, from_entity + ), + ) + + self._memgraph.create_trigger(trigger) + + print(f"Created trigger {trigger_name}") + + def _create_indexes(self) -> None: + """Creates indices in Memgraph.""" + for one_to_many_mapping in self._one_to_many_mappings: + collection_name = self._name_mapper.get_label(collection_name=one_to_many_mapping.table_name) + for index in one_to_many_mapping.indices: + new_index = self._name_mapper.get_property_name( + collection_name=one_to_many_mapping.table_name, column_name=index + ) + self._memgraph.create_index(index=MemgraphIndex(collection_name, new_index)) + print(f"Created index for {collection_name} on {new_index}") + + def _save_row_as_node( + self, + label: str, + row: Dict[str, Any], + ) -> None: + """Translates a row to a node and writes it to Memgraph. + + Args: + label: Original label of the new node. + row: The row that should be saved to Memgraph as a node. + """ + ( + QueryBuilder(connection=self._memgraph) + .create() + .node( + labels=self._name_mapper.get_label(collection_name=label), + **{ + self._name_mapper.get_property_name(collection_name=label, column_name=k): v for k, v in row.items() + }, + ) + .execute() + ) + + def _save_row_as_relationship( + self, + collection_name_from: str, + collection_name_to: str, + property_from: str, + property_to: str, + relation_label: str, + row: Dict[str, Any], + ) -> None: + """Translates a row to a relationship and writes it to Memgraph. + + Args: + collection_name_from: Collection name of the source node. + collection_name_to: Collection name of the destination node. + property_from: Property of the source node. + property_to: Property of the destination node. + relation_label: Label for the relationship. + row: The row to be translated. + """ + ( + QueryBuilder(connection=self._memgraph) + .match() + .node( + labels=self._name_mapper.get_label(collection_name=collection_name_from), + variable=NODE_A, + **{ + self._name_mapper.get_property_name( + collection_name=collection_name_from, column_name=property_from + ): row[property_from] + }, + ) + .match() + .node( + labels=self._name_mapper.get_label(collection_name=collection_name_to), + variable=NODE_B, + **{ + self._name_mapper.get_property_name( + collection_name=collection_name_to, column_name=property_to + ): row[property_to] + }, + ) + .create() + .node(variable=NODE_A) + .to(relation_label) + .node(variable=NODE_B) + .execute() + ) + + def __load_configuration(self, data_configuration: Dict[str, Any]) -> None: + """Loads all of the configuration. + + Args: + data_configuration: instructions to translate table to graph. + """ + self.__load_name_mappings(data_configuration.get(NAME_MAPPINGS_KEY, {})) + self.__load_one_to_many_mappings_and_indices( + data_configuration[ONE_TO_MANY_RELATIONS_KEY], data_configuration.get(INDICES_KEY, {}) + ) + self.__load_many_to_many_mappings(data_configuration.get(MANY_TO_MANY_RELATIONS_KEY, {})) + + def __load_name_mappings(self, name_mappings: Dict[str, Any]) -> None: + """Loads name mappings from the configuration.""" + self._name_mapper = NameMapper(mappings=name_mappings) + + def __load_one_to_many_mappings_and_indices( + self, one_to_many_configuration: Dict[str, List[str]], indices: Dict[str, List[str]] + ) -> None: + """Loads One To Many Mappings and indices from the configuration.""" + self._one_to_many_mappings = [ + TableMapping( + table_name=table_name, + mapping=[from_dict(data_class=OneToManyMapping, data=relation) for relation in relations], + indices=indices.get(table_name, {}), + ) + for table_name, relations in one_to_many_configuration.items() + ] + + def __load_many_to_many_mappings(self, many_to_many_configuration: Dict[str, Any]) -> None: + """Loads Many To Many Mappings from the configuration.""" + self._many_to_many_mappings = [ + TableMapping(table_name=table_name, mapping=from_dict(data_class=ManyToManyMapping, data=relations)) + for table_name, relations in many_to_many_configuration.items() + ] + + +class PyArrowImporter(TableToGraphImporter): + """TableToGraphImporter wrapper for use with PyArrow for reading data.""" + + def __init__( + self, + file_system_handler: str, + file_extension_enum: PyArrowFileTypeEnum, + data_configuration: Dict[str, Any], + memgraph: Optional[Memgraph] = None, + ) -> None: + """ + Args: + file_system_handler: File system to read from. + file_extension_enum: File format to be read. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + + Raises: + ValueError: PyArrow doesn't support ORC on Windows. + """ + if file_extension_enum == PyArrowFileTypeEnum.ORC and platform.system() == "Windows": + raise ValueError("ORC filetype is currently not supported by PyArrow on Windows") + + super().__init__( + data_loader=PyArrowDataLoader( + file_extension_enum=file_extension_enum, file_system_handler=file_system_handler + ), + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class PyArrowS3Importer(PyArrowImporter): + """PyArrowImporter wrapper for use with the Amazon S3 File System.""" + + def __init__( + self, + bucket_name: str, + file_extension_enum: PyArrowFileTypeEnum, + data_configuration: Dict[str, Any], + memgraph: Optional[Memgraph] = None, + **kwargs, + ) -> None: + """ + Args: + bucket_name: Name of the bucket in S3 to read from. + file_extension_enum: File format to be read. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for S3FileSystem. + """ + super().__init__( + file_system_handler=S3FileSystemHandler(bucket_name=bucket_name, **kwargs), + file_extension_enum=file_extension_enum, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class PyArrowAzureBlobImporter(PyArrowImporter): + """PyArrowImporter wrapper for use with the Azure Blob File System.""" + + def __init__( + self, + container_name: str, + file_extension_enum: PyArrowFileTypeEnum, + data_configuration: Dict[str, Any], + memgraph: Optional[Memgraph] = None, + **kwargs, + ) -> None: + """ + Args: + container_name: Name of the container in Azure Blob to read from. + file_extension_enum: File format to be read. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for AzureBlobFileSystem. + """ + super().__init__( + file_system_handler=AzureBlobFileSystemHandler(container_name=container_name, **kwargs), + file_extension_enum=file_extension_enum, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class PyArrowLocalFileSystemImporter(PyArrowImporter): + """PyArrowImporter wrapper for use with the Local File System.""" + + def __init__( + self, + path: str, + file_extension_enum: PyArrowFileTypeEnum, + data_configuration: Dict[str, Any], + memgraph: Optional[Memgraph] = None, + ) -> None: + """ + Args: + path: Full path to the directory to read from. + file_extension_enum: File format to be read. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + """ + super().__init__( + file_system_handler=LocalFileSystemHandler(path=path), + file_extension_enum=file_extension_enum, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class ParquetS3FileSystemImporter(PyArrowS3Importer): + """PyArrowS3Importer wrapper for use with the S3 file system and the parquet file type.""" + + def __init__( + self, bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + bucket_name: Name of the bucket in S3 to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for S3FileSystem. + """ + super().__init__( + bucket_name=bucket_name, + file_extension_enum=PyArrowFileTypeEnum.Parquet, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class CSVS3FileSystemImporter(PyArrowS3Importer): + """PyArrowS3Importer wrapper for use with the S3 file system and the CSV file type.""" + + def __init__( + self, bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + bucket_name: Name of the bucket in S3 to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for S3FileSystem. + """ + super().__init__( + bucket_name=bucket_name, + file_extension_enum=PyArrowFileTypeEnum.CSV, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class ORCS3FileSystemImporter(PyArrowS3Importer): + """PyArrowS3Importer wrapper for use with the S3 file system and the ORC file type.""" + + def __init__( + self, bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + bucket_name: Name of the bucket in S3 to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for S3FileSystem. + """ + super().__init__( + bucket_name=bucket_name, + file_extension_enum=PyArrowFileTypeEnum.ORC, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class FeatherS3FileSystemImporter(PyArrowS3Importer): + """PyArrowS3Importer wrapper for use with the S3 file system and the feather file type.""" + + def __init__( + self, bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + bucket_name: Name of the bucket in S3 to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for S3FileSystem. + """ + super().__init__( + bucket_name=bucket_name, + file_extension_enum=PyArrowFileTypeEnum.Feather, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class ParquetAzureBlobFileSystemImporter(PyArrowAzureBlobImporter): + """PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the parquet file type.""" + + def __init__( + self, container_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + container_name: Name of the container in Azure Blob storage to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for AzureBlobFileSystem. + """ + super().__init__( + container_name=container_name, + file_extension_enum=PyArrowFileTypeEnum.Parquet, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class CSVAzureBlobFileSystemImporter(PyArrowAzureBlobImporter): + """PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the CSV file type.""" + + def __init__( + self, container_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + container_name: Name of the container in Azure Blob storage to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for AzureBlobFileSystem. + """ + super().__init__( + container_name=container_name, + file_extension_enum=PyArrowFileTypeEnum.CSV, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class ORCAzureBlobFileSystemImporter(PyArrowAzureBlobImporter): + """PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the CSV file type.""" + + def __init__( + self, container_name, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + container_name: Name of the container in Blob storage to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for AzureBlobFileSystem. + """ + super().__init__( + container_name=container_name, + file_extension_enum=PyArrowFileTypeEnum.ORC, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class FeatherAzureBlobFileSystemImporter(PyArrowAzureBlobImporter): + """PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the Feather file type.""" + + def __init__( + self, container_name, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs + ) -> None: + """ + Args: + container_name: Name of the container in Blob storage to read from. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for AzureBlobFileSystem. + """ + super().__init__( + container_name=container_name, + file_extension_enum=PyArrowFileTypeEnum.Feather, + data_configuration=data_configuration, + memgraph=memgraph, + **kwargs, + ) + + +class ParquetLocalFileSystemImporter(PyArrowLocalFileSystemImporter): + """PyArrowLocalFileSystemImporter wrapper for use with the local file system and the parquet file type.""" + + def __init__(self, path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None: + """ + Args: + path: Full path to directory. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for LocalFileSystem. + """ + super().__init__( + path=path, + file_extension_enum=PyArrowFileTypeEnum.Parquet, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class CSVLocalFileSystemImporter(PyArrowLocalFileSystemImporter): + """PyArrowLocalFileSystemImporter wrapper for use with the local file system and the CSV file type.""" + + def __init__(self, path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None: + """ + Args: + path: Full path to directory. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for LocalFileSystem. + """ + super().__init__( + path=path, + file_extension_enum=PyArrowFileTypeEnum.CSV, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class ORCLocalFileSystemImporter(PyArrowLocalFileSystemImporter): + """PyArrowLocalFileSystemImporter wrapper for use with the local file system and the ORC file type.""" + + def __init__(self, path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None: + """ + Args: + path: Full path to directory. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for LocalFileSystem. + """ + super().__init__( + path=path, + file_extension_enum=PyArrowFileTypeEnum.ORC, + data_configuration=data_configuration, + memgraph=memgraph, + ) + + +class FeatherLocalFileSystemImporter(PyArrowLocalFileSystemImporter): + """PyArrowLocalFileSystemImporter wrapper for use with the local file system and the Feather/IPC/Arrow file type.""" + + def __init__(self, path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None: + """ + Args: + path: Full path to directory. + data_configuration: Configuration for the translations. + memgraph: Connection to Memgraph (Optional). + **kwargs: Specified for LocalFileSystem. + """ + super().__init__( + path=path, + file_extension_enum=PyArrowFileTypeEnum.Feather, + data_configuration=data_configuration, + memgraph=memgraph, + ) diff --git a/gqlalchemy/memgraph.py b/gqlalchemy/memgraph.py index 3855b78f..71156219 100644 --- a/gqlalchemy/memgraph.py +++ b/gqlalchemy/memgraph.py @@ -114,6 +114,10 @@ def ensure_indexes(self, indexes: List[MemgraphIndex]) -> None: for missing_index in new_indexes.difference(old_indexes): self.create_index(missing_index) + def drop_indexes(self) -> None: + """Drops all indexes in the database""" + self.ensure_indexes(indexes=[]) + def create_constraint(self, index: MemgraphConstraint) -> None: """Creates a constraint (label or label-property type) in the database""" query = f"CREATE CONSTRAINT ON {index.to_cypher()};" @@ -146,6 +150,16 @@ def get_constraints( ) return constraints + def get_exists_constraints( + self, + ) -> List[MemgraphConstraintExists]: + return [x for x in self.get_constraints() if isinstance(x, MemgraphConstraintExists)] + + def get_unique_constraints( + self, + ) -> List[MemgraphConstraintUnique]: + return [x for x in self.get_constraints() if isinstance(x, MemgraphConstraintUnique)] + def ensure_constraints( self, constraints: List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]], @@ -190,14 +204,39 @@ def create_trigger(self, trigger: MemgraphTrigger) -> None: self.execute(query) def get_triggers(self) -> List[str]: - """Creates a trigger""" - return list(self.execute_and_fetch("SHOW TRIGGERS;")) + """Returns a list of all database triggers""" + triggers_list = list(self.execute_and_fetch("SHOW TRIGGERS;")) + memgraph_triggers_list = [] + for trigger in triggers_list: + event_type = trigger["event type"] + event_object = None + + if event_type == "ANY": + event_type = None + elif len(event_type.split()) > 1: + [event_object, event_type] = [part for part in event_type.split()] + + memgraph_triggers_list.append( + MemgraphTrigger( + name=trigger["trigger name"], + event_type=event_type, + event_object=event_object, + execution_phase=trigger["phase"].split()[0], + statement=trigger["statement"], + ) + ) + return memgraph_triggers_list - def drop_trigger(self, trigger) -> None: + def drop_trigger(self, trigger: MemgraphTrigger) -> None: """Drop a trigger""" query = f"DROP TRIGGER {trigger.name};" self.execute(query) + def drop_triggers(self) -> None: + """Drops all triggers in the database""" + for trigger in self.get_triggers(): + self.drop_trigger(trigger) + def _get_cached_connection(self) -> Connection: """Returns cached connection if it exists, creates it otherwise""" if self._cached_connection is None or not self._cached_connection.is_active(): @@ -292,6 +331,11 @@ def save_node(self, node: Node) -> Node: result = self._save_node_properties_on_disk(node, result) return result + def save_nodes(self, nodes: List[Node]) -> None: + """Saves a list of nodes to Memgraph.""" + for i in range(len(nodes)): + nodes[i]._id = self.save_node(nodes[i])._id + def _save_node_properties_on_disk(self, node: Node, result: Node) -> Node: """Saves all on_disk properties to the on disk database attached to Memgraph. @@ -454,6 +498,11 @@ def save_relationship(self, relationship: Relationship) -> Optional[Relationship result = self._save_relationship_properties_on_disk(relationship, result) return result + def save_relationships(self, relationships: List[Relationship]) -> None: + """Saves a list of relationships to Memgraph.""" + for i in range(len(relationships)): + relationships[i]._id = self.save_relationship(relationships[i])._id + def _save_relationship_properties_on_disk(self, relationship: Relationship, result: Relationship) -> Relationship: """Saves on_disk relationship propeties on the OnDiskPropertyDatabase added with Memgraph().init_disk_storage(db). If OnDiskPropertyDatabase diff --git a/gqlalchemy/models.py b/gqlalchemy/models.py index 60be647d..86b3b72b 100644 --- a/gqlalchemy/models.py +++ b/gqlalchemy/models.py @@ -24,26 +24,59 @@ GQLAlchemyError, GQLAlchemySubclassNotFoundWarning, GQLAlchemyDatabaseMissingInFieldError, + GQLAlchemyDatabaseMissingInNodeClassError, ) class TriggerEventType: + """An enum representing types of trigger events.""" + CREATE = "CREATE" UPDATE = "UPDATE" DELETE = "DELETE" + @classmethod + def list(cls): + return [cls.CREATE, cls.UPDATE, cls.DELETE] + class TriggerEventObject: - ALL = "" + """An enum representing types of trigger objects. + + NODE -> `()` + RELATIONSHIP -> `-->` + """ + NODE = "()" RELATIONSHIP = "-->" + @classmethod + def list(cls): + return [cls.NODE, cls.RELATIONSHIP] + class TriggerExecutionPhase: + """An enum representing types of trigger objects. + + Enum: + BEFORE + AFTER + """ + BEFORE = "BEFORE" AFTER = "AFTER" +class FieldAttrsConstants: + INDEX = "index" + EXISTS = "exists" + UNIQUE = "unique" + + @classmethod + def list(cls): + return [cls.INDEX, cls.EXISTS, cls.UNIQUE] + + @dataclass(frozen=True, eq=True) class MemgraphIndex: label: str @@ -95,15 +128,37 @@ def to_cypher(self) -> str: pass -@dataclass(frozen=True, eq=True) class MemgraphKafkaStream(MemgraphStream): - consumer_group: str = None - batch_interval: str = None - batch_size: str = None - bootstrap_servers: str = None + """A class for creating and managing Kafka streams in Memgraph. + + Args: + name: A string representing the stream name. + topics: A list of strings representing the stream topics. + transform: A string representing the name of the transformation procedure. + consumer_group: A string representing the consumer group. + name: A string representing the batch interval. + name: A string representing the batch size. + name: A string or list of strings representing bootstrap server addresses. + """ + + def __init__( + self, + name: str, + topics: List[str], + transform: str, + consumer_group: str = None, + batch_interval: str = None, + batch_size: str = None, + bootstrap_servers: Union[str, List[str]] = None, + ): + super().__init__(name, topics, transform) + self.consumer_group = consumer_group + self.batch_interval = batch_interval + self.batch_size = batch_size + self.bootstrap_servers = bootstrap_servers def to_cypher(self) -> str: - """Converts Kafka stream to a cypher clause.""" + """Converts Kafka stream to a Cypher clause.""" topics = ",".join(self.topics) query = f"CREATE KAFKA STREAM {self.name} TOPICS {topics} TRANSFORM {self.transform}" if self.consumer_group is not None: @@ -113,19 +168,44 @@ def to_cypher(self) -> str: if self.batch_size is not None: query += f" BATCH_SIZE {self.batch_size}" if self.bootstrap_servers is not None: - query += f" BOOTSTRAP_SERVERS {self.bootstrap_servers}" + if isinstance(self.bootstrap_servers, str): + servers_field = f"'{self.bootstrap_servers}'" + else: + servers_field = str(self.bootstrap_servers)[1:-1] + query += f" BOOTSTRAP_SERVERS {servers_field}" query += ";" return query -@dataclass(frozen=True, eq=True) class MemgraphPulsarStream(MemgraphStream): - batch_interval: str = None - batch_size: str = None - service_url: str = None + """A class for creating and managing Pulsar streams in Memgraph. + + Args: + name: A string representing the stream name. + topics: A list of strings representing the stream topics. + transform: A string representing the name of the transformation procedure. + consumer_group: A string representing the consumer group. + name: A string representing the batch interval. + name: A string representing the batch size. + name: A string or list of strings representing bootstrap server addresses. + """ + + def __init__( + self, + name: str, + topics: List[str], + transform: str, + batch_interval: str = None, + batch_size: str = None, + service_url: str = None, + ): + super().__init__(name, topics, transform) + self.batch_interval = batch_interval + self.batch_size = batch_size + self.service_url = service_url def to_cypher(self) -> str: - """Converts Pulsar stream to a cypher clause.""" + """Converts Pulsar stream to a Cypher clause.""" topics = ",".join(self.topics) query = f"CREATE PULSAR STREAM {self.name} TOPICS {topics} TRANSFORM {self.transform}" if self.batch_interval is not None: @@ -141,16 +221,20 @@ def to_cypher(self) -> str: @dataclass(frozen=True, eq=True) class MemgraphTrigger: name: str - event_type: TriggerEventType - event_object: TriggerEventObject execution_phase: TriggerExecutionPhase statement: str + event_type: Optional[TriggerEventType] = None + event_object: Optional[TriggerEventObject] = None def to_cypher(self) -> str: """Converts a Trigger to a cypher clause.""" query = f"CREATE TRIGGER {self.name} " - # when self.event_object is TriggerEventObject.ALL there is a double space - query += f"ON {self.event_object} {self.event_type} " + if self.event_type in TriggerEventType.list(): + query += f"ON " + ( + f"{self.event_object} {self.event_type} " + if self.event_object in TriggerEventObject.list() + else f"{self.event_type} " + ) query += f"{self.execution_phase} COMMIT EXECUTE " query += f"{self.statement};" return query @@ -162,7 +246,7 @@ class GraphObject(BaseModel): class Config: extra = Extra.allow - def __init_subclass__(cls, type=None, label=None, labels=None): + def __init_subclass__(cls, type=None, label=None, labels=None, index=None, db=None): """Stores the subclass by type if type is specified, or by class name when instantiating a subclass. """ @@ -211,9 +295,7 @@ def parse_obj(cls, obj): """ return cls._convert_to_real_type_(obj) - def escape_value( - self, value: Union[None, bool, int, float, str, list, dict, datetime.datetime], in_list_or_dict=False - ) -> str: + def escape_value(self, value: Union[None, bool, int, float, str, list, dict, datetime.datetime]) -> str: if value is None: "Null" elif isinstance(value, bool): @@ -268,6 +350,14 @@ def _get_cypher_fields_and_block(self, variable_name: str) -> str: """ return self._get_cypher_field_assignment_block(variable_name, " AND ") + def _get_cypher_fields_xor_block(self, variable_name: str) -> str: + """Returns a cypher field assignment block separated by an XOR + statement. + """ + return self._get_cypher_field_assignment_block(variable_name, " XOR ") + + # TODO: add NOT + def _get_cypher_set_properties(self, variable_name: str) -> str: """Returns a cypher set properties block.""" cypher_set_properties = [] @@ -297,7 +387,7 @@ def __init__(self, **data): @property def _properties(self) -> Dict[str, Any]: - return {k: v for k, v in dict(self).items() if not k.startswith("_")} + return {k: v for k, v in dict(self).items() if not k.startswith("_") and k != "labels"} def __str__(self) -> str: return f"" @@ -323,31 +413,46 @@ def field_in_superclass(field, constraint): return None + def get_base_labels() -> Set[str]: + base_labels = set() + nonlocal bases + for base in bases: + if hasattr(base, "labels"): + base_labels = base_labels.union(base.labels) + + return base_labels + cls = super().__new__(mcs, name, bases, namespace, **kwargs) + cls.index = kwargs.get("index") cls.label = kwargs.get("label", name) - if name == "Node": - pass - elif "labels" in kwargs: # overrides superclass labels - cls.labels = kwargs["labels"] - elif hasattr(cls, "labels"): - cls.labels = cls.labels | {cls.label} - else: - cls.labels = {cls.label} + if name != "Node": + cls.labels = get_base_labels().union({cls.label}, kwargs.get("labels", set())) + + db = kwargs.get("db") + if cls.index is True: + if db is None: + raise GQLAlchemyDatabaseMissingInNodeClassError(cls=cls) + + index = MemgraphIndex(cls.label) + db.create_index(index) for field in cls.__fields__: attrs = cls.__fields__[field].field_info.extra field_type = cls.__fields__[field].type_.__name__ - label = attrs.get("label", cls.label) - db = attrs.get("db", None) skip_constraints = False - for constraint in ["index", "unique", "exists"]: + + if db is None: + db = attrs.get("db") + + for constraint in FieldAttrsConstants.list(): if constraint in attrs and db is None: base = field_in_superclass(field, constraint) if base is not None: cls.__fields__[field].field_info.extra = base.__fields__[field].field_info.extra skip_constraints = True break + raise GQLAlchemyDatabaseMissingInFieldError( constraint=constraint, field=field, @@ -357,20 +462,21 @@ def field_in_superclass(field, constraint): if skip_constraints: continue - if "index" in attrs: + if FieldAttrsConstants.INDEX in attrs and attrs[FieldAttrsConstants.INDEX] is True: index = MemgraphIndex(label, field) db.create_index(index) - if "exists" in attrs: + if FieldAttrsConstants.EXISTS in attrs and attrs[FieldAttrsConstants.EXISTS] is True: constraint = MemgraphConstraintExists(label, field) db.create_constraint(constraint) - if "unique" in attrs: + if FieldAttrsConstants.UNIQUE in attrs and attrs[FieldAttrsConstants.UNIQUE] is True: constraint = MemgraphConstraintUnique(label, field) db.create_constraint(constraint) if attrs and "db" in attrs: del attrs["db"] + return cls diff --git a/gqlalchemy/query_builder.py b/gqlalchemy/query_builder.py index 104c7757..ffc5a204 100644 --- a/gqlalchemy/query_builder.py +++ b/gqlalchemy/query_builder.py @@ -12,27 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum import re from abc import ABC, abstractmethod -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union from .memgraph import Connection, Memgraph from .utilities import to_cypher_labels, to_cypher_properties, to_cypher_value from .models import Node, Relationship +from .exceptions import ( + GQLAlchemyLiteralAndExpressionMissingInWhere, + GQLAlchemyExtraKeywordArgumentsInWhere, + GQLAlchemyMissingOrder, + GQLAlchemyOrderByTypeError, +) class DeclarativeBaseTypes: - AND_WHERE = "AND_WHERE" CALL = "CALL" CREATE = "CREATE" DELETE = "DELETE" EDGE = "EDGE" LIMIT = "LIMIT" + LOAD_CSV = "LOAD_CSV" MATCH = "MATCH" MERGE = "MERGE" NODE = "NODE" - ORDER_BY = "ORDER_BY" - OR_WHERE = "OR_WHERE" + ORDER_BY = "ORDER BY" REMOVE = "REMOVE" RETURN = "RETURN" SKIP = "SKIP" @@ -53,10 +59,19 @@ class MatchConstants: VARIABLE = "variable" -class WhereConditionConstants: - WHERE = "WHERE" - AND = "AND" - OR = "OR" +class Where(Enum): + WHERE = 1 + AND = 2 + OR = 3 + XOR = 4 + NOT = 5 + + +class Order(Enum): + ASC = 1 + ASCENDING = 2 + DESC = 3 + DESCENDING = 4 class NoVariablesMatchedException(Exception): @@ -80,6 +95,17 @@ def construct_query(self) -> str: pass +class LoadCsvPartialQuery(PartialQuery): + def __init__(self, path: str, header: bool, row: str): + super().__init__(DeclarativeBaseTypes.LOAD_CSV) + self.path = path + self.header = header + self.row = row + + def construct_query(self) -> str: + return f" LOAD CSV FROM '{self.path}' " + ("WITH" if self.header else "NO") + f" HEADER AS {self.row} " + + class MatchPartialQuery(PartialQuery): def __init__(self, optional: bool): super().__init__(DeclarativeBaseTypes.MATCH) @@ -121,15 +147,67 @@ def construct_query(self) -> str: class WhereConditionPartialQuery(PartialQuery): - def __init__(self, keyword: str, query: str): - super().__init__(DeclarativeBaseTypes.WHERE) + _LITERAL = "literal" + _EXPRESSION = "expression" + _LABEL_FILTER = ":" - self.keyword = keyword - self.query = query + def __init__(self, item: str, operator: str, keyword: Where = Where.WHERE, is_negated: bool = False, **kwargs): + super().__init__(type=keyword.name if not is_negated else f"{keyword.name} {Where.NOT.name}") + self.query = self._build_where_query(item=item, operator=operator, **kwargs) def construct_query(self) -> str: """Constructs a where partial query.""" - return f" {self.keyword} {self.query} " + return f" {self.type} {self.query} " + + def _build_where_query(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Builds parts of a WHERE Cypher query divided by the boolean operators.""" + literal = kwargs.get(WhereConditionPartialQuery._LITERAL) + value = kwargs.get(WhereConditionPartialQuery._EXPRESSION) + + if value is None: + if literal is None: + raise GQLAlchemyLiteralAndExpressionMissingInWhere + + value = to_cypher_value(literal) + elif literal is not None: + raise GQLAlchemyExtraKeywordArgumentsInWhere + + return ("" if operator == WhereConditionPartialQuery._LABEL_FILTER else " ").join([item, operator, value]) + + +class WhereNotConditionPartialQuery(WhereConditionPartialQuery): + def __init__(self, item: str, operator: str, keyword: Where = Where.WHERE, **kwargs): + super().__init__(item=item, operator=operator, keyword=keyword, is_negated=True, **kwargs) + + +class AndWhereConditionPartialQuery(WhereConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.AND, **kwargs) + + +class AndNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.AND, **kwargs) + + +class OrWhereConditionPartialQuery(WhereConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.OR, **kwargs) + + +class OrNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.OR, **kwargs) + + +class XorWhereConditionPartialQuery(WhereConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.XOR, **kwargs) + + +class XorNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): + def __init__(self, item: str, operator: str, **kwargs): + super().__init__(item=item, operator=operator, keyword=Where.XOR, **kwargs) class NodePartialQuery(PartialQuery): @@ -154,7 +232,7 @@ def properties(self) -> str: def construct_query(self) -> str: """Constructs a node partial query.""" - return f"({self.variable}{self.labels}{self.properties})" + return f"({self.variable}{self.labels}{' ' + self.properties if self.properties else ''})" class EdgePartialQuery(PartialQuery): @@ -312,14 +390,38 @@ def construct_query(self) -> str: class OrderByPartialQuery(PartialQuery): - def __init__(self, properties: str): + def __init__(self, properties: Union[str, Tuple[str, Order], List[Union[str, Tuple[str, Order]]]]): super().__init__(DeclarativeBaseTypes.ORDER_BY) - self.properties = properties + self.query = ( + self._order_by_read_list(properties) + if isinstance(properties, list) + else self._order_by_read_item(properties) + ) def construct_query(self) -> str: """Creates a ORDER BY statement Cypher partial query.""" - return f" ORDER BY {self.properties} " + return f" {self.type} {self.query} " + + def _order_by_read_item(self, item: Union[str, Tuple[str, Order]]) -> str: + if isinstance(item, str): + return f"{self._order_by_read_str(item)}" + elif isinstance(item, tuple): + return f"{self._order_by_read_tuple(item)}" + else: + raise GQLAlchemyOrderByTypeError + + def _order_by_read_list(self, property: List[Union[str, Tuple[str, Order]]]): + return ", ".join(self._order_by_read_item(item=item) for item in property) + + def _order_by_read_str(self, property: str) -> str: + return f"{property}" + + def _order_by_read_tuple(self, tuple: Tuple[str, Order]) -> str: + if not isinstance(tuple[1], Order): + raise GQLAlchemyMissingOrder + + return f"{tuple[0]} {tuple[1].name}" class LimitPartialQuery(PartialQuery): @@ -356,29 +458,58 @@ def construct_query(self) -> str: class DeclarativeBase(ABC): def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): - self._query: List[Any] = [] + self._query: List[PartialQuery] = [] self._connection = connection if connection is not None else Memgraph() + self._fetch_results: bool = False def match(self, optional: bool = False) -> "DeclarativeBase": - """Creates a MATCH statement Cypher partial query.""" + """Obtain data from the database by matching it to a given pattern. + + Args: + optional: A bool indicating if missing parts of the pattern will be + filled with null values. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(MatchPartialQuery(optional)) return self def merge(self) -> "DeclarativeBase": - """Creates a MERGE statement Cypher partial query.""" + """Ensure that a pattern you are looking for exists in the database. + This means that if the pattern is not found, it will be created. In a + way, this clause is like a combination of MATCH and CREATE. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(MergePartialQuery()) return self def create(self) -> "DeclarativeBase": - """Creates a CREATE statement Cypher partial query.""" + """Create nodes and relationships in a graph. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(CreatePartialQuery()) return self def call(self, procedure: str, arguments: Optional[str] = None) -> "DeclarativeBase": - """Creates a CALL statement Cypher partial query.""" + """Call a query module procedure. + + Args: + procedure: A string representing the name of the procedure in the + format `query_module.procedure`. + arguments: A string representing the arguments of the procedure in + text format. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(CallPartialQuery(procedure, arguments)) return self @@ -390,7 +521,19 @@ def node( node: Optional["Node"] = None, **kwargs, ) -> "DeclarativeBase": - """Creates a node Cypher partial query.""" + """Add a node pattern to the query. + + Args: + labels: A string or list of strings representing the labels of the + node. + variable: A string representing the name of the variable for storing + results of the node pattern. + node: A `Node` object to construct the pattern from. + **kwargs: Arguments representing the properties of the node. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ if not self._is_linking_valid_with_query(DeclarativeBaseTypes.NODE): raise InvalidMatchChainException() @@ -413,18 +556,30 @@ def to( relationship: Optional["Relationship"] = None, **kwargs, ) -> "DeclarativeBase": - """Creates a relationship Cypher partial query with a '->' sign.""" + """Add a relationship pattern to the query. + + Args: + edge_label: A string representing the type of the relationship. + directed: A bool indicating if the relationship is directed. + variable: A string representing the name of the variable for storing + results of the relationship pattern. + relationship: A `Relationship` object to construct the pattern from. + **kwargs: Arguments representing the properties of the relationship. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ if not self._is_linking_valid_with_query(DeclarativeBaseTypes.EDGE): raise InvalidMatchChainException() if relationship is None: - labels_str = to_cypher_labels(edge_label) + type_str = to_cypher_labels(edge_label) properties_str = to_cypher_properties(kwargs) else: - labels_str = to_cypher_labels(relationship._type) + type_str = to_cypher_labels(relationship._type) properties_str = to_cypher_properties(relationship._properties) - self._query.append(EdgePartialQuery(variable, labels_str, properties_str, bool(directed), False)) + self._query.append(EdgePartialQuery(variable, type_str, properties_str, bool(directed), False)) return self @@ -436,7 +591,19 @@ def from_( relationship: Optional["Relationship"] = None, **kwargs, ) -> "Match": - """Creates a relationship Cypher partial query with a '<-' sign.""" + """Add a relationship pattern to the query. + + Args: + edge_label: A string representing the type of the relationship. + directed: A bool indicating if the relationship is directed. + variable: A string representing the name of the variable for storing + results of the relationship pattern. + relationship: A `Relationship` object to construct the pattern from. + **kwargs: Arguments representing the properties of the relationship. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ if not self._is_linking_valid_with_query(DeclarativeBaseTypes.EDGE): raise InvalidMatchChainException() @@ -451,100 +618,416 @@ def from_( return self - def where(self, property: str, operator: str, value: Any) -> "DeclarativeBase": - """Creates a WHERE statement Cypher partial query.""" - value_cypher = to_cypher_value(value) - self._query.append( - WhereConditionPartialQuery(WhereConditionConstants.WHERE, " ".join([property, operator, value_cypher])) - ) + def where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates a WHERE statement Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Raises: + GQLAlchemyLiteralAndExpressionMissingInWhere: Raises an error when neither literal nor expression keyword arguments were provided. + GQLAlchemyExtraKeywordArgumentsInWhere: Raises an error when both literal and expression keyword arguments were provided. + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by the equality of `name` properties of two connected nodes. + + Python: `match().node(variable="n").to().node(variable="m").where(item="n.name", operator="=", expression="m.name").return_()` + Cypher: `MATCH (n)-[]->(m) WHERE n.name = m.name RETURN *;` + + Filtering query results by the node label. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").return_()` + Cypher: `MATCH (n) WHERE n:User RETURN *;` + + Filtering query results by the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n.age > 18 RETURN *;` + """ + # WHERE item operator (literal | expression) + # item: variable | property + # expression: label | property + self._query.append(WhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def and_where(self, property: str, operator: str, value: Any) -> "DeclarativeBase": - """Creates a AND (expression) statement Cypher partial query.""" - value_cypher = to_cypher_value(value) - self._query.append( - WhereConditionPartialQuery(WhereConditionConstants.AND, " ".join([property, operator, value_cypher])) - ) + def where_not(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates a WHERE NOT statement Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Raises: + GQLAlchemyLiteralAndExpressionMissingInWhere: Raises an error when neither literal nor expression keyword arguments were provided. + GQLAlchemyExtraKeywordArgumentsInWhere: Raises an error when both literal and expression keyword arguments were provided. + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by the equality of `name` properties of two connected nodes. + + Python: `match().node(variable="n").to().node(variable="m").where_not(item="n.name", operator="=", expression="m.name").return_()` + Cypher: `MATCH (n)-[]->(m) WHERE NOT n.name = m.name RETURN *;` + """ + self._query.append(WhereNotConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def or_where(self, property: str, operator: str, value: Any) -> "DeclarativeBase": - """Creates a OR (expression) statement Cypher partial query.""" - value_cypher = to_cypher_value(value) - self._query.append( - WhereConditionPartialQuery(WhereConditionConstants.OR, " ".join([property, operator, value_cypher])) - ) + def and_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an AND statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").and_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User AND n.age > 18 RETURN *;` + """ + self._query.append(AndWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) + + return self + + def and_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an AND NOT statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").and_not_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User AND NOT n.age > 18 RETURN *;` + """ + self._query.append(AndNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) + + return self + + def or_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an OR statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").or_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User OR n.age > 18 RETURN *;` + """ + self._query.append(OrWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) + + return self + + def or_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an OR NOT statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").or_not_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User OR NOT n.age > 18 RETURN *;` + """ + self._query.append(OrNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) + + return self + + def xor_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an XOR statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").xor_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User XOR n.age > 18 RETURN *;` + """ + self._query.append(XorWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) + + return self + + def xor_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + """Creates an XOR NOT statement as a part of WHERE Cypher partial query. + + Args: + item: A string representing variable or property. + operator: A string representing the operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Filtering query results by node label or the comparison of node property and literal. + + Python: `match().node(variable="n").where(item="n", operator=":", expression="User").xor_not_where(item="n.age", operator=">", literal=18).return_()` + Cypher: `MATCH (n) WHERE n:User XOR NOT n.age > 18 RETURN *;` + """ + self._query.append(XorNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self def unwind(self, list_expression: str, variable: str) -> "DeclarativeBase": - """Creates a UNWIND statement Cypher partial query.""" + """Unwind a list of values as individual rows. + + Args: + list_expression: A list of strings representing the list of values. + variable: A string representing the variable name for unwinding results. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(UnwindPartialQuery(list_expression, variable)) return self def with_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": - """Creates a WITH statement Cypher partial query.""" + """Chain together parts of a query, piping the results from one to be + used as starting points or criteria in the next. + + Args: + results: A dictionary mapping variables in the first query with + aliases in the second query. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(WithPartialQuery(results)) return self def union(self, include_duplicates: Optional[bool] = True) -> "DeclarativeBase": - """Creates a UNION statement Cypher partial query.""" + """Combine the result of multiple queries. + + Args: + include_duplicates: A bool indicating if duplicates should be + included. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(UnionPartialQuery(include_duplicates)) return self def delete(self, variable_expressions: List[str], detach: Optional[bool] = False) -> "DeclarativeBase": - """Creates a DELETE statement Cypher partial query.""" + """Delete nodes and relationships from the database. + + Args: + variable_expressions: A list of strings indicating which nodes + and/or relationships should be removed. + detach: A bool indicating if relationships should be deleted along + with a node. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(DeletePartialQuery(variable_expressions, detach)) return self def remove(self, items: List[str]) -> "DeclarativeBase": - """Creates a REMOVE statement Cypher partial query.""" + """Remove labels and properties from nodes and relationships. + + Args: + items: A list of strings indicating which labels and/or properties + should be removed. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(RemovePartialQuery(items)) return self def yield_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": - """Creates a YIELD statement Cypher partial query.""" + """Yield data from the query. + + Args: + results: A dictionary mapping items that are returned with alias + names. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(YieldPartialQuery(results)) return self def return_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": - """Creates a RETURN statement Cypher partial query.""" + """Return data from the query. + + Args: + results: A dictionary mapping items that are returned with alias + names. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(ReturnPartialQuery(results)) + self._fetch_results = True return self - def order_by(self, properties: str) -> "DeclarativeBase": - """Creates a ORDER BY statement Cypher partial query.""" - self._query.append(OrderByPartialQuery(properties)) + def order_by( + self, properties: Union[str, Tuple[str, Order], List[Union[str, Tuple[str, Order]]]] + ) -> "DeclarativeBase": + """Creates an ORDER BY statement Cypher partial query. + + Args: + properties: Properties and order by which the query results will be ordered. + + Raises: + GQLAlchemyOrderByTypeError: Raises an error when the given ordering is of the wrong type. + GQLAlchemyMissingOrdering: Raises an error when the given property is neither string nor tuple. + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Ordering query results by the property `n.name` in ascending order + and by the property `n.last_name` in descending order: + + Python: `match().node(variable="n").return_().order_by(properties=["n.name", ("n.last_name", Order.DESC)])` + Cypher: `MATCH (n) RETURN * ORDER BY n.name, n.last_name DESC;` + """ + self._query.append(OrderByPartialQuery(properties=properties)) return self def limit(self, integer_expression: str) -> "DeclarativeBase": - """Creates a LIMIT statement Cypher partial query.""" + """Limit the number of records when returning results. + + Args: + integer_expression: An integer indicating how many records to limit + the results to. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(LimitPartialQuery(integer_expression)) return self def skip(self, integer_expression: str) -> "DeclarativeBase": - """Creates a SKIP statement Cypher partial query.""" + """Skip a number of records when returning results. + + Args: + integer_expression: An integer indicating how many records to skip + in the results. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(SkipPartialQuery(integer_expression)) return self def add_custom_cypher(self, custom_cypher: str) -> "DeclarativeBase": + """Inject custom Cypher code into the query. + + Args: + custom_cypher: A string representing the Cypher code to be injected + into the query. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ self._query.append(AddStringPartialQuery(custom_cypher)) + if " RETURN " in custom_cypher: + self._fetch_results = True + + return self + + def load_csv(self, path: str, header: bool, row: str) -> "DeclarativeBase": + """Load data from a CSV file by executing a Cypher query for each row. + + Args: + path: A string representing the path to the CSV file. + header: A bool indicating if the CSV file starts with a header row. + row: A string representing the name of the variable for iterating + over each row. + + Returns: + A `DeclarativeBase` instance for constructing queries. + """ + self._query.append(LoadCsvPartialQuery(path, header, row)) return self def get_single(self, retrieve: str) -> Any: - """Returns a single result with a `retrieve` variable name.""" + """Returns a single result with a `retrieve` variable name. + + Args: + retrieve: A string representing the results variable to be returned. + + Returns: + An iterator of dictionaries containing the results of the query. + """ query = self._construct_query() result = next(self._connection.execute_and_fetch(query), None) @@ -554,9 +1037,16 @@ def get_single(self, retrieve: str) -> Any: return result def execute(self) -> Iterator[Dict[str, Any]]: - """Executes the Cypher query.""" + """Executes the Cypher query and returns the results. + + Returns: + An iterator of dictionaries containing the results of the query. + """ query = self._construct_query() - return self._connection.execute_and_fetch(query) + if self._fetch_results: + return self._connection.execute_and_fetch(query) + else: + return self._connection.execute(query) def _construct_query(self) -> str: """Constructs the (partial) Cypher query so it can be executed.""" @@ -572,6 +1062,9 @@ def _construct_query(self) -> str: joined_query = re.sub("\\s\\s+", " ", joined_query) return joined_query + def construct_query(self) -> str: + return self._construct_query() + def _any_variables_matched(self) -> bool: """Checks if any variables are present in the result.""" return any( diff --git a/gqlalchemy/transformations.py b/gqlalchemy/transformations.py index d78370a0..565e5a09 100644 --- a/gqlalchemy/transformations.py +++ b/gqlalchemy/transformations.py @@ -33,7 +33,7 @@ class NetworkXGraphConstants: def nx_to_cypher(graph: nx.Graph, config: NetworkXCypherConfig = None) -> Iterator[str]: - """Generates a Cypher queries for creating graph.""" + """Generates a Cypher query for creating a graph.""" if config is None: config = NetworkXCypherConfig() @@ -52,7 +52,7 @@ def nx_graph_to_memgraph_parallel( encrypted: bool = False, config: NetworkXCypherConfig = None, ) -> None: - """Generates a Cypher queries and inserts data into Memgraph in parallel.""" + """Generates Cypher queries and inserts data into Memgraph in parallel.""" if config is None: config = NetworkXCypherConfig() @@ -138,7 +138,7 @@ def __init__(self, config: NetworkXCypherConfig): self._config = config def yield_queries(self, graph: nx.Graph) -> Iterator[str]: - """Generates a Cypher queries for creating graph.""" + """Generates Cypher queries for creating a graph.""" if self._config.create_index: yield from self._nx_nodes_to_cypher_with_index(graph) @@ -147,7 +147,7 @@ def yield_queries(self, graph: nx.Graph) -> Iterator[str]: yield from self._nx_edges_to_cypher(graph) def yield_query_groups(self, graph: nx.Graph) -> List[Iterator[str]]: - """Generates a Cypher queries for creating graph by query groups.""" + """Generates Cypher queries for creating a graph by query groups.""" query_groups = [] @@ -161,12 +161,12 @@ def yield_query_groups(self, graph: nx.Graph) -> List[Iterator[str]]: return query_groups def _nx_nodes_to_cypher(self, graph: nx.Graph) -> Iterator[str]: - """Generates a Cypher queries for creating nodes.""" + """Generates Cypher queries for creating nodes.""" for nx_id, data in graph.nodes(data=True): yield self._create_node(nx_id, data) def _nx_nodes_to_cypher_with_index(self, graph: nx.Graph) -> Iterator[str]: - """Generates a Cypher queries for creating nodes and indexes.""" + """Generates Cypher queries for creating nodes and indexes.""" labels = set() for nx_id, data in graph.nodes(data=True): node_labels = data.get(NetworkXGraphConstants.LABELS, None) @@ -180,7 +180,7 @@ def _nx_nodes_to_cypher_with_index(self, graph: nx.Graph) -> Iterator[str]: yield self._create_index(label) def _nx_edges_to_cypher(self, graph: nx.Graph) -> Iterator[str]: - """Generates a Cypher queries for creating edges.""" + """Generates Cypher queries for creating edges.""" for n1, n2, data in graph.edges(data=True): from_label = graph.nodes[n1].get(NetworkXGraphConstants.LABELS, "") to_label = graph.nodes[n2].get(NetworkXGraphConstants.LABELS, "") diff --git a/poetry.lock b/poetry.lock index 72751e01..33e44709 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,85 @@ +[[package]] +name = "adal" +version = "1.2.7" +description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +cryptography = ">=1.1.0" +PyJWT = ">=1.0.0,<3" +python-dateutil = ">=2.1.0,<3" +requests = ">=2.0.0,<3" + +[[package]] +name = "adlfs" +version = "2022.2.0" +description = "Access Azure Datalake Gen1 with fsspec and dask" +category = "main" +optional = false +python-versions = ">3.6" + +[package.dependencies] +aiohttp = "*" +azure-core = ">=1.7.0" +azure-datalake-store = ">=0.0.46,<0.1" +azure-identity = "*" +azure-storage-blob = ">=12.5.0" +fsspec = ">=2021.10.1" + +[[package]] +name = "aiohttp" +version = "3.8.1" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotli", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.2.0" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "atomicwrites" version = "1.4.0" @@ -20,6 +102,60 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "azure-core" +version = "1.23.0" +description = "Microsoft Azure Core Library for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +requests = ">=2.18.4" +six = ">=1.11.0" +typing-extensions = ">=4.0.1" + +[[package]] +name = "azure-datalake-store" +version = "0.0.52" +description = "Azure Data Lake Store Filesystem Client Library for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +adal = ">=0.4.2" +cffi = "*" +requests = ">=2.20.0" + +[[package]] +name = "azure-identity" +version = "1.8.0" +description = "Microsoft Azure Identity Library for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +azure-core = ">=1.11.0,<2.0.0" +cryptography = ">=2.5" +msal = ">=1.12.0,<2.0.0" +msal-extensions = ">=0.3.0,<0.4.0" +six = ">=1.12.0" + +[[package]] +name = "azure-storage-blob" +version = "12.10.0" +description = "Microsoft Azure Blob Storage Client Library for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +azure-core = ">=1.15.0,<2.0.0" +cryptography = ">=2.1.4" +msrest = ">=0.6.21" + [[package]] name = "black" version = "21.12b0" @@ -47,6 +183,25 @@ jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "cffi" +version = "1.15.0" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.3.1" @@ -55,6 +210,17 @@ category = "dev" optional = false python-versions = ">=3.6.1" +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "8.0.3" @@ -86,6 +252,37 @@ python-versions = ">=3.6" [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "36.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + +[[package]] + +name = "dacite" +version = "1.6.0" +description = "Simple creation of data classes from dictionaries." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pytest (>=5)", "pytest-cov", "coveralls", "black", "mypy", "pylint"] + [[package]] name = "distlib" version = "0.3.4" @@ -94,6 +291,23 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "docker" +version = "5.0.3" +description = "A Python library for the Docker Engine API." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pywin32 = {version = "227", markers = "sys_platform == \"win32\""} +requests = ">=2.14.2,<2.18.0 || >2.18.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.2)"] +tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"] + [[package]] name = "filelock" version = "3.4.2" @@ -120,6 +334,44 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" +[[package]] +name = "frozenlist" +version = "1.3.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "fsspec" +version = "2022.2.0" +description = "File-system specification" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dropbox = ["dropboxdrivefs", "requests", "dropbox"] +entrypoints = ["importlib-metadata"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["requests", "aiohttp"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] + [[package]] name = "identify" version = "2.4.5" @@ -131,6 +383,14 @@ python-versions = ">=3.7" [package.extras] license = ["ukkonen"] +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "importlib-metadata" version = "4.2.0" @@ -155,6 +415,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + [[package]] name = "mccabe" version = "0.6.1" @@ -163,6 +434,59 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "msal" +version = "1.17.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +cryptography = ">=0.6,<39" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[[package]] +name = "msal-extensions" +version = "0.3.1" +description = "" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +msal = ">=0.4.1,<2.0.0" +portalocker = [ + {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, +] + +[[package]] +name = "msrest" +version = "0.6.21" +description = "AutoRest swagger generator Python client runtime." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = ">=2017.4.17" +isodate = ">=0.6.0" +requests = ">=2.16,<3.0" +requests-oauthlib = ">=0.5.0" + +[package.extras] +async = ["aiohttp (>=3.0)", "aiodns"] + +[[package]] +name = "multidict" +version = "6.0.2" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -195,6 +519,28 @@ optional = false python-versions = "*" [[package]] +name = "numpy" +version = "1.21.1" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "oauthlib" +version = "3.2.0" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] + name = "packaging" version = "21.3" description = "Core utilities for Python packages" @@ -240,6 +586,22 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "portalocker" +version = "2.4.0" +description = "Wraps the portalocker recipe for easy usage" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "sphinx (>=3.0.3)", "pytest-mypy (>=0.8.0)", "redis"] + [[package]] name = "pre-commit" version = "2.17.0" @@ -257,6 +619,17 @@ pyyaml = ">=5.1" toml = "*" virtualenv = ">=20.0.8" +[[package]] +name = "psutil" +version = "5.9.0" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] + [[package]] name = "py" version = "1.11.0" @@ -265,6 +638,17 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pyarrow" +version = "7.0.0" +description = "Python library for Apache Arrow" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +numpy = ">=1.16.6" + [[package]] name = "pycodestyle" version = "2.8.0" @@ -273,6 +657,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pydantic" version = "1.9.0" @@ -296,6 +688,23 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pyjwt" +version = "2.3.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.3.1)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] + [[package]] name = "pymgclient" version = "1.2.0" @@ -389,6 +798,25 @@ python-versions = "*" [package.dependencies] pytest = ">=3.6.0" +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pywin32" +version = "227" +description = "Python for Window Extensions" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "pyyaml" version = "6.0" @@ -397,6 +825,39 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + [[package]] name = "six" version = "1.16.0" @@ -437,6 +898,19 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + [[package]] name = "virtualenv" version = "20.13.0" @@ -456,6 +930,32 @@ six = ">=1.9.0,<2" docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] +[[package]] +name = "yarl" +version = "1.7.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "websocket-client" +version = "1.2.3" +description = "WebSocket client for Python with low level API options" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "zipp" version = "3.7.0" @@ -471,9 +971,102 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "4fdf3f41430516cfd90e0be5219ff3ca2e574243d0c2ff862f5c3c3e8fa92ec7" +content-hash = "a0f72e4b4eb5755212b6ee6cfdf36f58c69020d10d7b1d1ac0a7d172f152f6d2" [metadata.files] +adal = [ + {file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"}, + {file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"}, +] +adlfs = [ + {file = "adlfs-2022.2.0.tar.gz", hash = "sha256:8543d29ce5b994d49831ad5f86b76b90809a540302b4f24e027591b740127942"}, +] +aiohttp = [ + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, + {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, + {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, + {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, + {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, + {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, + {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, + {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, + {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, + {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, +] +aiosignal = [ + {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, + {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, +] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +asynctest = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -482,14 +1075,90 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +azure-core = [ + {file = "azure-core-1.23.0.zip", hash = "sha256:a56a6f720d0948d3f3e4a25a5fe46df2f1b7f865c358d74e2ce47dbb49262608"}, + {file = "azure_core-1.23.0-py3-none-any.whl", hash = "sha256:23c1389a115c328878c4eface3ca3899c2468313ea6f883f2347d6924cd887b2"}, +] +azure-datalake-store = [ + {file = "azure-datalake-store-0.0.52.tar.gz", hash = "sha256:4198ddb32614d16d4502b43d5c9739f81432b7e0e4d75d30e05149fe6007fea2"}, + {file = "azure_datalake_store-0.0.52-py2.py3-none-any.whl", hash = "sha256:aaed72b9c856824aeab554f4dbe0ef2c6d0ff36700bdd8b93d8298793117c48e"}, +] +azure-identity = [ + {file = "azure-identity-1.8.0.zip", hash = "sha256:020ff0e47157852e4aac8a3adb06841827147f27a94cbe74a904425d8e62d93c"}, + {file = "azure_identity-1.8.0-py3-none-any.whl", hash = "sha256:8d87aff09b8dabe3c99bb934798dcdeb2f2d49614ecc4f0425cc888faafd64ae"}, +] +azure-storage-blob = [ + {file = "azure-storage-blob-12.10.0.zip", hash = "sha256:3c7dc2c93e7ff2a731acd66a36a1f0a6266072b4154deba4894dab891285ea3a"}, + {file = "azure_storage_blob-12.10.0-py3-none-any.whl", hash = "sha256:a70995c4f9310eb704594f30505d1499286b4caac5543a2ebfe84431c4a38b0b"}, +] black = [ {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +cffi = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] click = [ {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, @@ -547,10 +1216,40 @@ coverage = [ {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] +cryptography = [ + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"}, + {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"}, + {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"}, + {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"}, +] +dacite = [ + {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, + {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, +] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, ] +docker = [ + {file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"}, + {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, +] filelock = [ {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, @@ -559,10 +1258,79 @@ flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] +frozenlist = [ + {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, + {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, + {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, + {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, + {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, + {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, + {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, + {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, + {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, + {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, + {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, + {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, + {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, +] +fsspec = [ + {file = "fsspec-2022.2.0-py3-none-any.whl", hash = "sha256:eb9c9d9aee49d23028deefffe53e87c55d3515512c63f57e893710301001449a"}, + {file = "fsspec-2022.2.0.tar.gz", hash = "sha256:20322c659538501f52f6caa73b08b2ff570b7e8ea30a86559721d090e473ad5c"}, +] identify = [ {file = "identify-2.4.5-py2.py3-none-any.whl", hash = "sha256:d27d10099844741c277b45d809bd452db0d70a9b41ea3cd93799ebbbcc6dcb29"}, {file = "identify-2.4.5.tar.gz", hash = "sha256:d11469ff952a4d7fd7f9be520d335dc450f585d474b39b5dfb86a500831ab6c7"}, ] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] importlib-metadata = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, @@ -571,10 +1339,87 @@ iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +isodate = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] +msal = [ + {file = "msal-1.17.0-py2.py3-none-any.whl", hash = "sha256:5a52d78e70d2c451e267c1e8c2342e4c06f495c75c859aeafd9260d3974f09fe"}, + {file = "msal-1.17.0.tar.gz", hash = "sha256:04e3cb7bb75c51f56d290381f23056207df1f3eb594ed03d38551f3b16d2a36e"}, +] +msal-extensions = [ + {file = "msal-extensions-0.3.1.tar.gz", hash = "sha256:d9029af70f2cbdc5ad7ecfed61cb432ebe900484843ccf72825445dbfe62d311"}, + {file = "msal_extensions-0.3.1-py2.py3-none-any.whl", hash = "sha256:89df9c0237e1adf16938fa58575db59c2bb9de04a83ffb0452c8dfc79031f717"}, +] +msrest = [ + {file = "msrest-0.6.21-py2.py3-none-any.whl", hash = "sha256:c840511c845330e96886011a236440fafc2c9aff7b2df9c0a92041ee2dee3782"}, + {file = "msrest-0.6.21.tar.gz", hash = "sha256:72661bc7bedc2dc2040e8f170b6e9ef226ee6d3892e01affd4d26b06474d68d8"}, +] +multidict = [ + {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, + {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, + {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, + {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, + {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, + {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, + {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, + {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, + {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, + {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, + {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, + {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, + {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -587,6 +1432,40 @@ nodeenv = [ {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] +numpy = [ + {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, + {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, + {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, + {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, + {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, + {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, + {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, + {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, + {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, +] +oauthlib = [ + {file = "oauthlib-3.2.0-py3-none-any.whl", hash = "sha256:6db33440354787f9b7f3a6dbd4febf5d0f93758354060e802f6c06cb493022fe"}, + {file = "oauthlib-3.2.0.tar.gz", hash = "sha256:23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -603,18 +1482,92 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +portalocker = [ + {file = "portalocker-2.4.0-py2.py3-none-any.whl", hash = "sha256:b092f48e1e30a234ab3dd1cfd44f2f235e8a41f4e310e463fc8d6798d1c3c235"}, + {file = "portalocker-2.4.0.tar.gz", hash = "sha256:a648ad761b8ea27370cb5915350122cd807b820d2193ed5c9cc28f163df637f4"}, +] pre-commit = [ {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, ] +psutil = [ + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, + {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, + {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, + {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, + {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, + {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, + {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, + {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, + {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, + {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, + {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, + {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, + {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, + {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, + {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, + {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, + {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, + {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, + {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pyarrow = [ + {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:0f15213f380539c9640cb2413dc677b55e70f04c9e98cfc2e1d8b36c770e1036"}, + {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:29c4e3b3be0b94d07ff4921a5e410fc690a3a066a850a302fc504de5fc638495"}, + {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8a9bfc8a016bcb8f9a8536d2fa14a890b340bc7a236275cd60fd4fb8b93ff405"}, + {file = "pyarrow-7.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:49d431ed644a3e8f53ae2bbf4b514743570b495b5829548db51610534b6eeee7"}, + {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa6442a321c1e49480b3d436f7d631c895048a16df572cf71c23c6b53c45ed66"}, + {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b01a23cb401750092c6f7c4dcae67cd8fd6b99ae710e26f654f23508f25f25"}, + {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f10928745c6ff66e121552731409803bed86c66ac79c64c90438b053b5242c5"}, + {file = "pyarrow-7.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:759090caa1474cafb5e68c93a9bd6cb45d8bb8e4f2cad2f1a0cc9439bae8ae88"}, + {file = "pyarrow-7.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e3fe34bcfc28d9c4a747adc3926d2307a04c5c50b89155946739515ccfe5eab0"}, + {file = "pyarrow-7.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:040dce5345603e4e621bcf4f3b21f18d557852e7b15307e559bb14c8951c8714"}, + {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed4b647c3345ae3463d341a9d28d0260cd302fb92ecf4e2e3e0f1656d6e0e55c"}, + {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7fecd5d5604f47e003f50887a42aee06cb8b7bf8e8bf7dc543a22331d9ba832"}, + {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f2d00b892fe865e43346acb78761ba268f8bb1cbdba588816590abcb780ee3d"}, + {file = "pyarrow-7.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f439f7d77201681fd31391d189aa6b1322d27c9311a8f2fce7d23972471b02b6"}, + {file = "pyarrow-7.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:3e06b0e29ce1e32f219c670c6b31c33d25a5b8e29c7828f873373aab78bf30a5"}, + {file = "pyarrow-7.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:13dc05bcf79dbc1bd2de1b05d26eb64824b85883d019d81ca3c2eca9b68b5a44"}, + {file = "pyarrow-7.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:06183a7ff2b0c030ec0413fc4dc98abad8cf336c78c280a0b7f4bcbebb78d125"}, + {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:702c5a9f960b56d03569eaaca2c1a05e8728f05ea1a2138ef64234aa53cd5884"}, + {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7313038203df77ec4092d6363dbc0945071caa72635f365f2b1ae0dd7469865"}, + {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e87d1f7dc7a0b2ecaeb0c7a883a85710f5b5626d4134454f905571c04bc73d5a"}, + {file = "pyarrow-7.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ba69488ae25c7fde1a2ae9ea29daf04d676de8960ffd6f82e1e13ca945bb5861"}, + {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:11a591f11d2697c751261c9d57e6e5b0d38fdc7f0cc57f4fd6edc657da7737df"}, + {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:6183c700877852dc0f8a76d4c0c2ffd803ba459e2b4a452e355c2d58d48cf39f"}, + {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1748154714b543e6ae8452a68d4af85caf5298296a7e5d4d00f1b3021838ac6"}, + {file = "pyarrow-7.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcc8f934c7847a88f13ec35feecffb61fe63bb7a3078bd98dd353762e969ce60"}, + {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:759f59ac77b84878dbd54d06cf6df74ff781b8e7cf9313eeffbb5ec97b94385c"}, + {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3e3f93ac2993df9c5e1922eab7bdea047b9da918a74e52145399bc1f0099a3"}, + {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:306120af554e7e137895254a3b4741fad682875a5f6403509cd276de3fe5b844"}, + {file = "pyarrow-7.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:087769dac6e567d58d59b94c4f866b3356c00d3db5b261387ece47e7324c2150"}, + {file = "pyarrow-7.0.0.tar.gz", hash = "sha256:da656cad3c23a2ebb6a307ab01d35fce22f7850059cffafcb90d12590f8f4f38"}, +] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pydantic = [ {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, @@ -656,6 +1609,10 @@ pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] +pyjwt = [ + {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, + {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, +] pymgclient = [ {file = "pymgclient-1.2.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:908b039188c31b0fc6117aacc040c0ed33067a9e09cb14c934eecf73e429f961"}, {file = "pymgclient-1.2.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:a78a48c84ff77733fde72c29c2fbbafa3fb81150171ef336976c18e17a655b18"}, @@ -693,6 +1650,24 @@ pytest-timeout = [ {file = "pytest-timeout-1.4.2.tar.gz", hash = "sha256:20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76"}, {file = "pytest_timeout-1.4.2-py2.py3-none-any.whl", hash = "sha256:541d7aa19b9a6b4e475c759fd6073ef43d7cdc9a92d95644c260076eb257a063"}, ] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pywin32 = [ + {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"}, + {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"}, + {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"}, + {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"}, + {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"}, + {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"}, + {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"}, + {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"}, + {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"}, + {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"}, + {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"}, + {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"}, +] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -728,6 +1703,14 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +requests-oauthlib = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -770,10 +1753,92 @@ typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] virtualenv = [ {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, ] +yarl = [ + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, + {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, + {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, + {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, + {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, + {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, + {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, + {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, + {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, + {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, + {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, + {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, + {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, + {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, +] +websocket-client = [ + {file = "websocket-client-1.2.3.tar.gz", hash = "sha256:1315816c0acc508997eb3ae03b9d3ff619c9d12d544c9a9b553704b1cc4f6af5"}, + {file = "websocket_client-1.2.3-py3-none-any.whl", hash = "sha256:2eed4cc58e4d65613ed6114af2f380f7910ff416fc8c46947f6e76b6815f56c0"}, +] zipp = [ {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, diff --git a/pydoc-markdown.yml b/pydoc-markdown.yml index 7c9f94e0..aa7ed5c9 100644 --- a/pydoc-markdown.yml +++ b/pydoc-markdown.yml @@ -1,5 +1,13 @@ loaders: - type: python + search_path: [.] + modules: + - gqlalchemy.disk_storage + - gqlalchemy.memgraph + - gqlalchemy.models + - gqlalchemy.query_builder + - gqlalchemy.transformations + - gqlalchemy.utilities processors: - type: filter skip_empty_modules: true diff --git a/pyproject.toml b/pyproject.toml index cbf4fe7d..57d1f95f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,11 @@ python = "^3.7" pymgclient = "1.2.0" networkx = "^2.5.1" pydantic = "^1.8.2" +docker = "^5.0.3" +psutil = "^5.9.0" +pyarrow = "^7.0.0" +dacite = "^1.6.0" +adlfs = "^2022.2.0" [tool.poetry.dev-dependencies] black = "^21.5b1" diff --git a/tests/conftest.py b/tests/conftest.py index 4520d391..a6809b38 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,21 @@ def memgraph() -> Memgraph: memgraph.ensure_indexes([]) memgraph.ensure_constraints([]) memgraph.drop_database() - return memgraph + + yield memgraph + + memgraph.ensure_indexes([]) + memgraph.ensure_constraints([]) + + +@pytest.fixture +def memgraph_without_dropping_constraints() -> Memgraph: + memgraph = Memgraph() + memgraph.drop_database() + + yield memgraph + + memgraph.drop_database() @pytest.fixture diff --git a/tests/intergration/__init__.py b/tests/docs/__init__.py similarity index 100% rename from tests/intergration/__init__.py rename to tests/docs/__init__.py diff --git a/tests/docs/test_ogm.py b/tests/docs/test_ogm.py new file mode 100644 index 00000000..c28e457c --- /dev/null +++ b/tests/docs/test_ogm.py @@ -0,0 +1,203 @@ +from gqlalchemy import Memgraph, Node, Relationship, Field, match +from typing import Optional + +db = Memgraph() + + +class UserSave(Node): + id: str = Field(index=True, exist=True, unique=True, db=db) + username: str = Field(index=True, exist=True, unique=True, db=db) + + +class UserMap(Node): + id: str = Field(index=True, exist=True, unique=True, db=db) + + +class Streamer(UserMap): + id: str = Field(index=True, exist=True, unique=True, db=db) + username: Optional[str] = Field(index=True, exist=True, unique=True, db=db) + url: Optional[str] = Field() + followers: Optional[int] = Field() + createdAt: Optional[str] = Field() + totalViewCount: Optional[int] = Field() + description: Optional[str] = Field() + + +class StreamerLoad(Node): + id: str = Field(index=True, unique=True, db=db) + name: Optional[str] = Field(index=True, exists=True, unique=True, db=db) + + +class Team(Node): + name: str = Field(unique=True, db=db) + + +class IsPartOf(Relationship, type="IS_PART_OF"): + date: Optional[str] = Field() + + +class Language(Node): + name: str = Field(unique=True, db=db) + + +class ChatsWith(Relationship, type="CHATS_WITH"): + lastChatted: Optional[str] = Field() + + +class Speaks(Relationship, type="SPEAKS"): + pass + + +class SpeaksTemp(Relationship, type="SPEAKSTEMP"): + pass + + +class TestMapNodesAndRelationships: + def test_node_mapping(self): + streamer = Streamer( + id="7", + username="Ivan", + url="myurl.com", + followers=888, + createdAt="2022-26-01", + totalViewCount=6666, + description="Hi, I am streamer!", + ).save(db) + + result = next( + match().node("Streamer", variable="s").where(item="s.id", operator="=", literal="7").return_().execute() + )["s"] + + assert result.id == streamer.id + assert result.username == streamer.username + assert result.url == streamer.url + assert result.followers == streamer.followers + assert result.createdAt == streamer.createdAt + assert result.totalViewCount == streamer.totalViewCount + assert result.description == streamer.description + + def test_relationship_mapping(self): + streamer_1 = Streamer( + id="8", + username="Kate", + url="myurl.com", + followers=888, + createdAt="2022-26-01", + totalViewCount=6666, + description="Hi, I am streamer!", + ).save(db) + streamer_2 = Streamer( + id="9", + username="Mislav", + url="myurl.com", + followers=888, + createdAt="2022-26-01", + totalViewCount=6666, + description="Hi, I am streamer!", + ).save(db) + chats_with = ChatsWith( + _start_node_id=streamer_1._id, _end_node_id=streamer_2._id, lastChatted="2021-04-25" + ).save(db) + + result = next(match().node().to("CHATS_WITH", variable="c").node().return_().execute())["c"] + + assert result._start_node_id == streamer_1._id + assert result._end_node_id == streamer_2._id + assert result.lastChatted == chats_with.lastChatted + assert result._type == chats_with._type + + +class TestSaveNodesAndRelationships: + def test_node_saving_1(self): + user = UserSave(id="3", username="John").save(db) + language = Language(name="en").save(db) + + result = next( + match().node("UserSave", variable="u").where(item="u.id", operator="=", literal="3").return_().execute() + )["u"] + + assert result.id == user.id + assert result.username == user.username + + result_2 = next(match().node("Language", variable="l").return_().execute())["l"] + + assert result_2._labels == language._labels + + def test_node_saving_2(self): + user = UserSave(id="4", username="James") + language = Language(name="hr") + + db.save_node(user) + db.save_node(language) + + result = next( + match().node("UserSave", variable="u").where(item="u.id", operator="=", literal="4").return_().execute() + )["u"] + + assert result.id == user.id + assert result.username == user.username + + result_2 = next(match().node("Language", variable="l").return_().execute())["l"] + + assert result_2._labels == language._labels + + def test_relationship_saving_1(self): + user = UserSave(id="55", username="Jimmy").save(db) + language = Language(name="ko").save(db) + + speaks_rel = Speaks(_start_node_id=user._id, _end_node_id=language._id).save(db) + + result = next(match().node().to("SPEAKS", variable="s").node().return_().execute())["s"] + + assert result._start_node_id == user._id + assert result._end_node_id == language._id + assert result._type == speaks_rel._type + + def test_relationship_saving_2(self): + user = UserSave(id="35", username="Jessica").save(db) + language = Language(name="de").save(db) + + speaks_rel = SpeaksTemp(_start_node_id=user._id, _end_node_id=language._id) + db.save_relationship(speaks_rel) + + result = next(match().node().to("SPEAKSTEMP", variable="s").node().return_().execute())["s"] + + assert result._start_node_id == user._id + assert result._end_node_id == language._id + assert result._type == speaks_rel._type + + +class TestLoadNodesAndRelationships: + def test_node_load(self): + streamer = StreamerLoad(name="Jack", id="54").save(db) + team = Team(name="Warriors").save(db) + + loaded_streamer = StreamerLoad(id="54").load(db=db) + loaded_team = Team(name="Warriors").load(db=db) + + assert streamer.name == loaded_streamer.name + assert streamer.id == loaded_streamer.id + assert streamer._labels == {"StreamerLoad"} + assert streamer._labels == loaded_streamer._labels + assert team.name == loaded_team.name + assert team._labels == {"Team"} + assert team._labels == loaded_team._labels + + is_part_of = IsPartOf(_start_node_id=loaded_streamer._id, _end_node_id=loaded_team._id, date="2021-04-26").save( + db + ) + + result = next(match().node().to("IS_PART_OF", variable="i").node().return_().execute())["i"] + + assert result._start_node_id == streamer._id + assert result._end_node_id == team._id + assert result._type == is_part_of._type + + def test_relationship_load(self): + streamer = StreamerLoad(name="Hayley", id="36").save(db) + team = Team(name="Lakers").save(db) + is_part_of = IsPartOf(_start_node_id=streamer._id, _end_node_id=team._id, date="2021-04-20").save(db) + loaded_is_part_of = IsPartOf(_start_node_id=streamer._id, _end_node_id=team._id).load(db) + + assert loaded_is_part_of._type == "IS_PART_OF" + assert loaded_is_part_of._type == is_part_of._type diff --git a/tests/docs/test_query_builder.py b/tests/docs/test_query_builder.py new file mode 100644 index 00000000..d22bbaaa --- /dev/null +++ b/tests/docs/test_query_builder.py @@ -0,0 +1,176 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import patch + +from gqlalchemy import match, call, create, merge +from gqlalchemy.memgraph import Memgraph + + +def test_call_procedures_1(memgraph): + query_builder = call("pagerank.get").yield_().return_() + expected_query = " CALL pagerank.get() YIELD * RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_call_procedures_2(memgraph): + query_builder = ( + call("json_util.load_from_url", "https://some-url.com") + .yield_({"objects": "objects"}) + .return_({"objects": "objects"}) + ) + + expected_query = " CALL json_util.load_from_url(https://some-url.com) YIELD objects RETURN objects " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_create_nodes_relationships_1(memgraph): + query_builder = create().node(labels="Person", name="Ron") + + expected_query = " CREATE (:Person {name: 'Ron'})" + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_create_nodes_relationships_2(memgraph): + query_builder = merge().node(labels="Person", name="Leslie") + + expected_query = " MERGE (:Person {name: 'Leslie'})" + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_create_nodes_relationships_3(memgraph): + query_builder = ( + create().node(labels="Person", name="Leslie").to(edge_label="FRIENDS_WITH").node(labels="Person", name="Ron") + ) + + expected_query = " CREATE (:Person {name: 'Leslie'})-[:FRIENDS_WITH]->(:Person {name: 'Ron'})" + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_delete_remove_objects_1(memgraph): + query_builder = match().node("Person", variable="p").delete(["p"]) + + expected_query = " MATCH (p:Person) DELETE p " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_delete_remove_objects_2(memgraph): + query_builder = match().node("Person").to("FRIENDS_WITH", variable="f").node("Person").delete(["f"]) + + expected_query = " MATCH (:Person)-[f:FRIENDS_WITH]->(:Person) DELETE f " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_delete_remove_objects_3(memgraph): + query_builder = match().node("Person", variable="p").remove(["p.name"]) + + expected_query = " MATCH (p:Person) REMOVE p.name " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_filter_data_1(memgraph): + query_builder = ( + match() + .node("Person", variable="p1") + .to("FRIENDS_WITH") + .node("Person", variable="p2") + .where(item="n.name", operator="=", literal="Ron") + .or_where(item="m.id", operator="=", literal=0) + .return_() + ) + + expected_query = " MATCH (p1:Person)-[:FRIENDS_WITH]->(p2:Person) WHERE n.name = 'Ron' OR m.id = 0 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_return_results_1(memgraph): + query_builder = match().node(labels="Person", variable="p").return_() + + expected_query = " MATCH (p:Person) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_return_results_2(memgraph): + query_builder = ( + match().node(labels="Person", variable="p1").to().node(labels="Person", variable="p2").return_({"p1": "p1"}) + ) + + expected_query = " MATCH (p1:Person)-[]->(p2:Person) RETURN p1 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_return_results_3(memgraph): + query_builder = match().node(labels="Person", variable="p").return_().limit(10) + + expected_query = " MATCH (p:Person) RETURN * LIMIT 10 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_return_results_4(memgraph): + query_builder = match().node(labels="Person", variable="p").return_({"p": "p"}).order_by("p.name DESC") + + expected_query = " MATCH (p:Person) RETURN p ORDER BY p.name DESC " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/intergration/test_constraints.py b/tests/integration/test_constraints.py similarity index 62% rename from tests/intergration/test_constraints.py rename to tests/integration/test_constraints.py index 37b0492c..249c6908 100644 --- a/tests/intergration/test_constraints.py +++ b/tests/integration/test_constraints.py @@ -12,7 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gqlalchemy import MemgraphConstraintExists, MemgraphConstraintUnique +from gqlalchemy import Field, MemgraphConstraintExists, MemgraphConstraintUnique, Memgraph, Node + + +db = Memgraph() + + +def test_exists_attr(memgraph_without_dropping_constraints): + class Person(Node): + first_name: str = Field(index=True, db=db) + year: int = Field(exists=True, db=db) + person_age: int = Field(unique=True, db=db) + hair: str = Field(exists=True, db=db) + eyes: str = Field(unique=True, db=db) + gender: str = Field(exists=True, unique=True, db=db) + height: int = Field(unique=True, db=db) + weight: int = Field(unique=True, exists=True, db=db) + nationality: str = Field(exists=True, unique=True, db=db) + state: str = Field(exists=False, unique=False, db=db) + + exists_constraints = { + MemgraphConstraintExists("Person", "year"), + MemgraphConstraintExists("Person", "gender"), + MemgraphConstraintExists("Person", "hair"), + MemgraphConstraintExists("Person", "gender"), + MemgraphConstraintExists("Person", "weight"), + MemgraphConstraintExists("Person", "nationality"), + } + actual_exists_constraints = memgraph_without_dropping_constraints.get_exists_constraints() + assert set(actual_exists_constraints) == exists_constraints + + +def test_unique_attr(memgraph_without_dropping_constraints): + class Person(Node): + first_name: str = Field(index=True, db=db) + year: int = Field(exists=True, db=db) + person_age: int = Field(unique=True, db=db) + hair: str = Field(exists=True, db=db) + eyes: str = Field(unique=True, db=db) + gender: str = Field(exists=True, unique=True, db=db) + height: int = Field(unique=True, db=db) + weight: int = Field(unique=True, exists=True, db=db) + nationality: str = Field(exists=True, unique=True, db=db) + state: str = Field(exists=False, unique=False, db=db) + + unique_constraints = { + MemgraphConstraintUnique("Person", ("person_age",)), + MemgraphConstraintUnique("Person", ("eyes",)), + MemgraphConstraintUnique("Person", ("gender",)), + MemgraphConstraintUnique("Person", ("height",)), + MemgraphConstraintUnique("Person", ("weight",)), + MemgraphConstraintUnique("Person", ("nationality",)), + } + actual_unique_constraints = memgraph_without_dropping_constraints.get_unique_constraints() + assert set(actual_unique_constraints) == unique_constraints def test_create_constraint_exist(memgraph): diff --git a/tests/intergration/test_index.py b/tests/integration/test_index.py similarity index 51% rename from tests/intergration/test_index.py rename to tests/integration/test_index.py index 913907b5..d8aff558 100644 --- a/tests/intergration/test_index.py +++ b/tests/integration/test_index.py @@ -13,6 +13,92 @@ # limitations under the License. from gqlalchemy.models import MemgraphIndex +from gqlalchemy import Field, Node +import pytest +from gqlalchemy.exceptions import GQLAlchemyDatabaseMissingInNodeClassError + + +def test_index_label(memgraph): + class Animal(Node, index=True, db=memgraph): + name: str + + actual_index = memgraph.get_indexes() + + assert set(actual_index) == {MemgraphIndex("Animal")} + + +def test_index_property(memgraph): + class Human(Node): + id: str = Field(index=True, db=memgraph) + + actual_index = memgraph.get_indexes() + + assert set(actual_index) == {MemgraphIndex("Human", "id")} + + +def test_missing_db_in_node_class(memgraph): + with pytest.raises(GQLAlchemyDatabaseMissingInNodeClassError): + + class User(Node, index=True): + id: str + + +def test_db_in_node_class(memgraph): + class User(Node, db=memgraph): + id: str = Field(index=True) + + actual_index = memgraph.get_indexes() + assert set(actual_index) == {MemgraphIndex("User", "id")} + + +def test_db_in_node_and_property(memgraph): + class User(Node, db=memgraph): + id: str = Field(index=True, db=memgraph) + + actual_index = memgraph.get_indexes() + assert set(actual_index) == {MemgraphIndex("User", "id")} + + +def test_index_on_label_and_property(memgraph): + class User(Node, index=True, db=memgraph): + id: str = Field(index=True, db=memgraph) + + actual_index = memgraph.get_indexes() + assert set(actual_index) == {MemgraphIndex("User", "id"), MemgraphIndex("User")} + + +def test_false_index_in_node_class(memgraph): + class User(Node, index=False, db=memgraph): + id: str + + actual_index = memgraph.get_indexes() + assert set(actual_index) == set() + + +def test_false_index_no_db_in_node_class(memgraph): + class User(Node, index=False): + id: str + + actual_index = memgraph.get_indexes() + assert set(actual_index) == set() + + +def test_false_index_with_db_in_node_class(memgraph): + class User(Node, index=False, db=memgraph): + id: str + + actual_index = memgraph.get_indexes() + assert set(actual_index) == set() + + +def test_index_attr(memgraph): + class Example(Node): + first_name: str = Field(index=True, db=memgraph) + last_name: str = Field(index=False, db=memgraph) + + actual_index = memgraph.get_indexes() + + assert set(actual_index) == {MemgraphIndex("Example", "first_name")} def test_no_index(memgraph): diff --git a/tests/intergration/test_memgraph.py b/tests/integration/test_memgraph.py similarity index 100% rename from tests/intergration/test_memgraph.py rename to tests/integration/test_memgraph.py diff --git a/tests/intergration/test_networkx.py b/tests/integration/test_networkx.py similarity index 100% rename from tests/intergration/test_networkx.py rename to tests/integration/test_networkx.py diff --git a/tests/intergration/test_stream.py b/tests/integration/test_stream.py similarity index 86% rename from tests/intergration/test_stream.py rename to tests/integration/test_stream.py index 7662fdda..30fd75a4 100644 --- a/tests/intergration/test_stream.py +++ b/tests/integration/test_stream.py @@ -78,12 +78,25 @@ def test_kafka_stream_extended_cypher(): transform="kafka_stream.transform", consumer_group="my_group", batch_interval="9999", - bootstrap_servers="'localhost:9092'", + bootstrap_servers="localhost:9092", ) query = "CREATE KAFKA STREAM test_stream TOPICS topic TRANSFORM kafka_stream.transform CONSUMER_GROUP my_group BATCH_INTERVAL 9999 BOOTSTRAP_SERVERS 'localhost:9092';" assert kafka_stream.to_cypher() == query +def test_kafka_stream_extended_cypher_list(): + kafka_stream = MemgraphKafkaStream( + name="test_stream", + topics=["topic"], + transform="kafka_stream.transform", + consumer_group="my_group", + batch_interval="9999", + bootstrap_servers=["localhost:9092", "localhost:9093", "localhost:9094"], + ) + query = "CREATE KAFKA STREAM test_stream TOPICS topic TRANSFORM kafka_stream.transform CONSUMER_GROUP my_group BATCH_INTERVAL 9999 BOOTSTRAP_SERVERS 'localhost:9092', 'localhost:9093', 'localhost:9094';" + assert kafka_stream.to_cypher() == query + + def test_pulsar_stream_extended_cypher(): pulsar_stream = MemgraphPulsarStream( name="test_stream", diff --git a/tests/integration/test_trigger.py b/tests/integration/test_trigger.py new file mode 100644 index 00000000..d82664bd --- /dev/null +++ b/tests/integration/test_trigger.py @@ -0,0 +1,111 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +from gqlalchemy import Memgraph, MemgraphTrigger +from gqlalchemy.models import TriggerEventObject, TriggerEventType, TriggerExecutionPhase + + +@pytest.fixture +def cleanup_trigger(): + memgraph = Memgraph() + memgraph.drop_triggers() + yield + memgraph.drop_triggers() + + +@pytest.mark.usefixtures("cleanup_trigger") +def test_create_trigger_without_event_object(memgraph: Memgraph): + trigger = MemgraphTrigger( + name="test_trigger", + event_type=TriggerEventType.CREATE, + execution_phase=TriggerExecutionPhase.BEFORE, + statement="CREATE (:Node)", + ) + + memgraph.create_trigger(trigger) + assert any(map(lambda t: t.name == "test_trigger", memgraph.get_triggers())) + + +def test_drop_trigger(memgraph: Memgraph): + trigger = MemgraphTrigger( + name="test_trigger", + event_type=TriggerEventType.CREATE, + execution_phase=TriggerExecutionPhase.BEFORE, + statement="CREATE (:Node)", + ) + + memgraph.create_trigger(trigger) + memgraph.drop_trigger(trigger) + assert len(memgraph.get_triggers()) == 0 + + +def test_trigger_cypher(): + trigger = MemgraphTrigger( + name="test_trigger", + event_type=TriggerEventType.CREATE, + execution_phase=TriggerExecutionPhase.BEFORE, + statement="CREATE (:Node)", + ) + query = "CREATE TRIGGER test_trigger ON CREATE BEFORE COMMIT EXECUTE CREATE (:Node);" + assert trigger.to_cypher() == query + + +@pytest.mark.usefixtures("cleanup_trigger") +def test_create_trigger_with_event_object(memgraph: Memgraph): + trigger = MemgraphTrigger( + name="test_trigger", + event_type=TriggerEventType.CREATE, + event_object=TriggerEventObject.NODE, + execution_phase=TriggerExecutionPhase.AFTER, + statement="CREATE (:Node)", + ) + + memgraph.create_trigger(trigger) + assert any(map(lambda t: t.name == "test_trigger", memgraph.get_triggers())) + + +def test_trigger_with_event_object_cypher(memgraph: Memgraph): + trigger = MemgraphTrigger( + name="test_trigger", + event_type=TriggerEventType.CREATE, + event_object=TriggerEventObject.NODE, + execution_phase=TriggerExecutionPhase.AFTER, + statement="CREATE (:Node)", + ) + + query = "CREATE TRIGGER test_trigger ON () CREATE AFTER COMMIT EXECUTE CREATE (:Node);" + assert trigger.to_cypher() == query + + +@pytest.mark.usefixtures("cleanup_trigger") +def test_create_trigger_without_on(memgraph: Memgraph): + trigger = MemgraphTrigger( + name="test_trigger", + execution_phase=TriggerExecutionPhase.BEFORE, + statement="CREATE (n:Node)", + ) + + memgraph.create_trigger(trigger) + assert any(map(lambda t: t.name == "test_trigger", memgraph.get_triggers())) + + +def test_trigger_without_on_cypher(): + trigger = MemgraphTrigger( + name="test_trigger", + execution_phase=TriggerExecutionPhase.BEFORE, + statement="CREATE (:Node)", + ) + + query = "CREATE TRIGGER test_trigger BEFORE COMMIT EXECUTE CREATE (:Node);" + assert trigger.to_cypher() == query diff --git a/tests/intergration/test_trigger.py b/tests/intergration/test_trigger.py deleted file mode 100644 index cd09011f..00000000 --- a/tests/intergration/test_trigger.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pytest -from gqlalchemy import Memgraph, MemgraphTrigger -from gqlalchemy.models import TriggerEventType, TriggerEventObject, TriggerExecutionPhase - - -@pytest.fixture -def cleanup_trigger(): - yield - memgraph = Memgraph() - memgraph.execute("DROP TRIGGER test_trigger;") - - -@pytest.mark.usefixtures("cleanup_trigger") -def test_create_get_trigger(memgraph: Memgraph): - trigger = MemgraphTrigger( - name="test_trigger", - event_type=TriggerEventType.CREATE, - event_object=TriggerEventObject.ALL, - execution_phase=TriggerExecutionPhase.BEFORE, - statement="CREATE (:Node)", - ) - - memgraph.create_trigger(trigger) - assert any(map(lambda t: t["trigger name"] == "test_trigger", memgraph.get_triggers())) - - -def test_drop_trigger(memgraph: Memgraph): - trigger = MemgraphTrigger( - name="test_trigger", - event_type=TriggerEventType.CREATE, - event_object=TriggerEventObject.ALL, - execution_phase=TriggerExecutionPhase.BEFORE, - statement="CREATE (:Node)", - ) - - memgraph.create_trigger(trigger) - memgraph.drop_trigger(trigger) - assert len(memgraph.get_triggers()) == 0 - - -def test_trigger_cypher(): - trigger = MemgraphTrigger( - name="test_trigger", - event_type=TriggerEventType.CREATE, - event_object=TriggerEventObject.ALL, - execution_phase=TriggerExecutionPhase.BEFORE, - statement="CREATE (:Node)", - ) - query = "CREATE TRIGGER test_trigger ON CREATE BEFORE COMMIT EXECUTE CREATE (:Node);" - assert trigger.to_cypher() == query diff --git a/tests/loaders/__init__.py b/tests/loaders/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/tests/loaders/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/loaders/data/example.csv b/tests/loaders/data/example.csv new file mode 100644 index 00000000..d39e4f85 --- /dev/null +++ b/tests/loaders/data/example.csv @@ -0,0 +1,4 @@ +name,surname,grade +Ivan,Horvat,4 +Marko,Andric,5 +Luka,Lukic,3 diff --git a/tests/loaders/data/example.feather b/tests/loaders/data/example.feather new file mode 100644 index 00000000..574ffe52 Binary files /dev/null and b/tests/loaders/data/example.feather differ diff --git a/tests/loaders/data/example.orc b/tests/loaders/data/example.orc new file mode 100644 index 00000000..9325f0ac Binary files /dev/null and b/tests/loaders/data/example.orc differ diff --git a/tests/loaders/data/example.parquet b/tests/loaders/data/example.parquet new file mode 100644 index 00000000..a67114a1 Binary files /dev/null and b/tests/loaders/data/example.parquet differ diff --git a/tests/loaders/test_loaders.py b/tests/loaders/test_loaders.py new file mode 100644 index 00000000..eeb28674 --- /dev/null +++ b/tests/loaders/test_loaders.py @@ -0,0 +1,131 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import platform +import pytest + +from gqlalchemy.loaders import ( + CSVLocalFileSystemImporter, + DataLoader, + FeatherLocalFileSystemImporter, + FileSystemHandler, + NameMapper, + ORCLocalFileSystemImporter, + ParquetLocalFileSystemImporter, +) + + +class TestFileSystemHandler(FileSystemHandler): + def __init__(self) -> None: + super().__init__(fs=None) + + def get_path(self): + pass + + +class TestDataLoader(DataLoader): + def __init__(self, file_system_handler: FileSystemHandler) -> None: + super().__init__(file_extension="none", file_system_handler=file_system_handler) + self.num = 5 + + def load_data(self, collection_name: str, is_cross_table: bool = False) -> None: + self.num = 42 + + +@pytest.fixture +def dummy_loader(): + return TestDataLoader(TestFileSystemHandler()) + + +def test_name_mapper_get_label(): + """Test get_label from NameMapper class""" + mappings = {"individuals": {"label": "INDIVIDUAL"}, "address": {"label": "ADDRESS"}} + name_mapper = NameMapper(mappings) + label = name_mapper.get_label("individuals") + + assert label == "INDIVIDUAL" + + +def test_name_mapper_get_property_name(): + """Test get_property_name from NameMapper class""" + mappings = {"individuals": {"label": "INDIVIDUAL"}, "address": {"label": "ADDRESS"}} + name_mapper = NameMapper(mappings) + property_name = name_mapper.get_property_name("individuals", "label") + assert property_name == "label" + + +def test_custom_data_loader(dummy_loader): + """Test functionality of custom DataLoader with a custom FileSystemHandler""" + dummy_loader.load_data("file") + assert dummy_loader.num == 42 + + +def test_local_table_to_graph_importer_parquet(memgraph): + """e2e test, using Local File System to import into memgraph, tests available file extensions""" + my_configuration = { + "indices": {"example": ["name"]}, + "name_mappings": {"example": {"label": "PERSON"}}, + "one_to_many_relations": {"example": []}, + } + importer = ParquetLocalFileSystemImporter( + path="./tests/loaders/data", data_configuration=my_configuration, memgraph=memgraph + ) + + importer.translate(drop_database_on_start=True) + + +def test_local_table_to_graph_importer_csv(memgraph): + """e2e test, using Local File System to import into memgraph, tests available file extensions""" + my_configuration = { + "indices": {"example": ["name"]}, + "name_mappings": {"example": {"label": "PERSON"}}, + "one_to_many_relations": {"example": []}, + } + importer = CSVLocalFileSystemImporter( + path="./tests/loaders/data", data_configuration=my_configuration, memgraph=memgraph + ) + + importer.translate(drop_database_on_start=True) + + +def test_local_table_to_graph_importer_orc(memgraph): + """e2e test, using Local File System to import into memgraph, tests available file extensions""" + if platform.system() == "Windows": + with pytest.raises(ValueError): + ORCLocalFileSystemImporter(path="", data_configuration=None) + else: + my_configuration = { + "indices": {"example": ["name"]}, + "name_mappings": {"example": {"label": "PERSON"}}, + "one_to_many_relations": {"example": []}, + } + importer = ORCLocalFileSystemImporter( + path="./tests/loaders/data", data_configuration=my_configuration, memgraph=memgraph + ) + + importer.translate(drop_database_on_start=True) + + +def test_local_table_to_graph_importer_feather(memgraph): + """e2e test, using Local File System to import into memgraph, tests available file extensions""" + my_configuration = { + "indices": {"example": ["name"]}, + "name_mappings": {"example": {"label": "PERSON"}}, + "one_to_many_relations": {"example": []}, + } + importer = FeatherLocalFileSystemImporter( + path="./tests/loaders/data", data_configuration=my_configuration, memgraph=memgraph + ) + + importer.translate(drop_database_on_start=True) diff --git a/tests/memgraph/test_query_builder.py b/tests/memgraph/test_query_builder.py index e0881a97..e21d0ebf 100644 --- a/tests/memgraph/test_query_builder.py +++ b/tests/memgraph/test_query_builder.py @@ -12,11 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -from unittest.mock import patch - +from gqlalchemy.exceptions import ( + GQLAlchemyLiteralAndExpressionMissingInWhere, + GQLAlchemyExtraKeywordArgumentsInWhere, +) import pytest -from gqlalchemy import InvalidMatchChainException, QueryBuilder, match, call, unwind, with_ +from gqlalchemy import ( + InvalidMatchChainException, + QueryBuilder, + match, + call, + unwind, + with_, + merge, + Node, + Relationship, + Field, +) from gqlalchemy.memgraph import Memgraph +from typing import Optional +from unittest.mock import patch +from gqlalchemy.exceptions import GQLAlchemyMissingOrder, GQLAlchemyOrderByTypeError +from gqlalchemy.query_builder import Order def test_invalid_match_chain_throws_exception(): @@ -24,439 +41,1302 @@ def test_invalid_match_chain_throws_exception(): QueryBuilder().node(":Label", "n").node(":Label", "m").return_() -class TestMatch: - def test_simple_create(self): - query_builder = QueryBuilder().create().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " CREATE (n:L1)-[:TO]->(:L2) RETURN * " +def test_simple_create(memgraph): + query_builder = QueryBuilder().create().node("L1", variable="n").to("TO").node("L2").return_() + expected_query = " CREATE (n:L1)-[:TO]->(:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_simple_match(self): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " MATCH (n:L1)-[:TO]->(:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_simple_match(memgraph): + query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2").return_() + expected_query = " MATCH (n:L1)-[:TO]->(:L2) RETURN * " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - def test_simple_merge(self): - query_builder = QueryBuilder().merge().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " MERGE (n:L1)-[:TO]->(:L2) RETURN * " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_simple_with_multiple_labels(memgraph): + query_builder = ( + QueryBuilder().match().node(["L1", "L2", "L3"], variable="n").to("TO").node("L2", variable="m").return_() + ) + expected_query = " MATCH (n:L1:L2:L3)-[:TO]->(m:L2) RETURN * " - def test_simple_create_with_variables(self): - query_builder = ( - QueryBuilder().create().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " CREATE (n:L1)-[e:TO]->(m:L2) RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + mock.assert_called_with(expected_query) - mock.assert_called_with(expected_query) - def test_simple_match_with_variables(self): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " MATCH (n:L1)-[e:TO]->(m:L2) RETURN * " +def test_multiple_matches(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node("L1", variable="n") + .to("TO") + .node("L2", variable="m") + .match(True) + .node(variable="n") + .to("TO") + .node("L3") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) OPTIONAL MATCH (n)-[:TO]->(:L3) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_simple_merge_with_variables(self): - query_builder = ( - QueryBuilder().merge().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " MERGE (n:L1)-[e:TO]->(m:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_with_empty(memgraph): + query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").with_() + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WITH * " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() - def test_simple_with_multiple_labels(self): - query_builder = ( - QueryBuilder().match().node(["L1", "L2", "L3"], variable="n").to("TO").node("L2", variable="m").return_() - ) - expected_query = " MATCH (n:L1:L2:L3)-[:TO]->(m:L2) RETURN * " + mock.assert_called_with(expected_query) + + +def test_with(memgraph): + query_builder = QueryBuilder().match().node(variable="n").with_({"n": ""}) + expected_query = " MATCH (n) WITH n " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + +def test_union(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(variable="n1", labels="Node1") + .return_({"n1": ""}) + .union(include_duplicates=False) + .match() + .node(variable="n2", labels="Node2") + .return_({"n2": ""}) + ) + expected_query = " MATCH (n1:Node1) RETURN n1 UNION MATCH (n2:Node2) RETURN n2 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_union_all(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(variable="n1", labels="Node1") + .return_({"n1": ""}) + .union() + .match() + .node(variable="n2", labels="Node2") + .return_({"n2": ""}) + ) + expected_query = " MATCH (n1:Node1) RETURN n1 UNION ALL MATCH (n2:Node2) RETURN n2 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_delete(memgraph): + query_builder = QueryBuilder().match().node(variable="n1", labels="Node1").delete({"n1"}) + expected_query = " MATCH (n1:Node1) DELETE n1 " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() mock.assert_called_with(expected_query) - def test_multiple_matches(self): - query_builder = ( + +def test_simple_merge(memgraph): + query_builder = merge().node("L1", variable="n").to("TO").node("L2") + expected_query = " MERGE (n:L1)-[:TO]->(:L2)" + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_merge(memgraph): + query_builder = QueryBuilder().merge().node("L1", variable="n").to("TO").node("L2").return_() + expected_query = " MERGE (n:L1)-[:TO]->(:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_simple_create_with_variables(memgraph): + query_builder = ( + QueryBuilder().create().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() + ) + expected_query = " CREATE (n:L1)-[e:TO]->(m:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_simple_match_with_variables(memgraph): + query_builder = ( + QueryBuilder().match().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() + ) + expected_query = " MATCH (n:L1)-[e:TO]->(m:L2) RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_simple_merge_with_variables(memgraph): + query_builder = merge().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() + expected_query = " MERGE (n:L1)-[e:TO]->(m:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_merge_with_variables(memgraph): + query_builder = ( + QueryBuilder().merge().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() + ) + expected_query = " MERGE (n:L1)-[e:TO]->(m:L2) RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_delete_detach(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(variable="n1", labels="Node1") + .to(edge_label="EDGE") + .node(variable="n2", labels="Node2") + .delete(["n1", "n2"], True) + ) + expected_query = " MATCH (n1:Node1)-[:EDGE]->(n2:Node2) DETACH DELETE n1, n2 " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_remove_property(memgraph): + query_builder = QueryBuilder().match().node(variable="n", labels="Node").remove({"n.name"}) + expected_query = " MATCH (n:Node) REMOVE n.name " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_multiple_merges(memgraph): + query_builder = ( + QueryBuilder() + .merge() + .node("L1", variable="n") + .to("TO") + .node("L2", variable="m") + .merge() + .node(variable="n") + .to("TO") + .node("L3") + .return_() + ) + expected_query = " MERGE (n:L1)-[:TO]->(m:L2) MERGE (n)-[:TO]->(:L3) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_load_csv_with_header(memgraph): + query_builder = QueryBuilder().load_csv("path/to/my/file.csv", True, "row").return_() + expected_query = " LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + +def test_load_csv_no_header(memgraph): + query_builder = QueryBuilder().load_csv("path/to/my/file.csv", False, "row").return_() + expected_query = " LOAD CSV FROM 'path/to/my/file.csv' NO HEADER AS row RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + +def test_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node("L1", variable="n") + .to("TO") + .node("L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_where_not_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator="=", expression="m.name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n.name = m.name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_where_not_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where_not(item="n", operator=":", expression="Node") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n:Node RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( QueryBuilder() .match() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .match(True) - .node(variable="n") - .to("TO") - .node("L3") + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=") .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) OPTIONAL MATCH (n)-[:TO]->(:L3) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - - def test_multiple_merges(self): - query_builder = ( +def test_where_not_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( QueryBuilder() - .merge() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .merge() - .node(variable="n") - .to("TO") - .node("L3") + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator="=") .return_() ) - expected_query = " MERGE (n:L1)-[:TO]->(m:L2) MERGE (n)-[:TO]->(:L3) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - def test_where(self): - query_builder = ( +def test_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( QueryBuilder() .match() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .where("n.name", "=", "best_name") - .or_where("m.id", "<", 4) + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name", expression="Node") .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - def test_get_single(self): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"n": ""}) +def test_where_not_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN n " - with patch.object(Memgraph, "execute_and_fetch", return_value=iter([{"n": None}])) as mock: - query_builder.get_single(retrieve="n") - mock.assert_called_with(expected_query) +def test_or_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .or_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_not_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .or_not_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR NOT m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .or_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_not_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .or_not_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR NOT m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .or_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_not_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .or_not_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR NOT m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_or_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .or_where(item="m.name", operator="=") + .return_() + ) - def test_return_empty(self): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_() - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_or_not_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .or_not_where(item="m.name", operator="=") + .return_() + ) - mock.assert_called_with(expected_query) - def test_return_alias(self): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "first"}) +def test_or_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .or_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_or_not_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .or_not_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() + ) + - def test_return_alias_same_as_variable(self): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "L1"}) +def test_and_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .and_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_not_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .and_not_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND NOT m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .and_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_not_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .and_not_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND NOT m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .and_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_not_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node("L2", variable="m") + .where(item="n", operator=":", expression="Node") + .and_not_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND NOT m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_and_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .and_where(item="m.name", operator="=") + .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_and_not_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .and_not_where(item="m.name", operator="=") + .return_() + ) + - def test_return_alias_empty(self): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": ""}) +def test_and_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .and_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_and_not_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .and_not_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() + ) - def test_call_procedure_pagerank(self): - query_builder = ( + +def test_xor_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .xor_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_not_where_literal(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="best_name") + .xor_not_where(item="m.id", operator="<", literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR NOT m.id < 4 RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .xor_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_not_where_property(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", expression="m.name") + .xor_not_where(item="m.name", operator="=", expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR NOT m.name = n.last_name RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .xor_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_not_where_label(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .xor_not_where(item="m", operator=":", expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR NOT m:User RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_xor_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( QueryBuilder() - .call(procedure="pagerank.get") - .yield_({"node": "", "rank": ""}) - .return_({"node": "node", "rank": "rank"}) + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .xor_where(item="m.name", operator="=") + .return_() ) - expected_query = " CALL pagerank.get() YIELD node, rank RETURN node, rank " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - def test_call_procedure_node2vec(self): - query_builder = QueryBuilder().call(procedure="node2vec_online.get_embeddings", arguments="False, 2.0, 0.5") - expected_query = " CALL node2vec_online.get_embeddings(False, 2.0, 0.5) " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_xor_not_where_literal_and_expression_missing(memgraph): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator="=", literal="my_name") + .xor_not_where(item="m.name", operator="=") + .return_() + ) - mock.assert_called_with(expected_query) - def test_call_procedure_nxalg_betweenness_centrality(self): - query_builder = ( +def test_xor_and_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( QueryBuilder() - .call(procedure="nxalg.betweenness_centrality", arguments="20, True") - .yield_() - .return_({"node": "", "betweenness": ""}) + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .xor_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() ) - expected_query = " CALL nxalg.betweenness_centrality(20, True) YIELD * RETURN node, betweenness " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - def test_unwind(self): - query_builder = ( - QueryBuilder().unwind(list_expression="[1, 2, 3, null]", variable="x").return_({"x": "", "'val'": "y"}) +def test_xor_not_and_where_extra_values(memgraph): + with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): + ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator="=", literal="best_name") + .xor_not_where(item="n.name", operator="=", literal="best_name", expression="Node") + .return_() ) - expected_query = " UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_and_or_xor_not_where(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(labels="L1", variable="n") + .to(edge_label="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .and_where(item="n.age", operator=">", literal=5) + .or_where(item="n", operator=":", expression="Node2") + .xor_where(item="n.name", operator="=", expression="m.name") + .xor_not_where(item="m", operator=":", expression="User") + .or_not_where(item="m", operator=":", expression="Node") + .and_not_where(item="m.name", operator="=", literal="John") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND n.age > 5 OR n:Node2 XOR n.name = m.name XOR NOT m:User OR NOT m:Node AND NOT m.name = 'John' RETURN * " - def test_with_empty(self): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").with_() - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WITH * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + mock.assert_called_with(expected_query) - mock.assert_called_with(expected_query) - def test_with(self): - query_builder = QueryBuilder().match().node(variable="n").with_({"n": ""}) - expected_query = " MATCH (n) WITH n " +def test_get_single(memgraph): + query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"n": ""}) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN n " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=iter([{"n": None}])) as mock: + query_builder.get_single(retrieve="n") - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_union(self): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .return_({"n1": ""}) - .union(include_duplicates=False) - .match() - .node(variable="n2", labels="Node2") - .return_({"n2": ""}) - ) - expected_query = " MATCH (n1:Node1) RETURN n1 UNION MATCH (n2:Node2) RETURN n2 " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_return_empty(memgraph): + query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_() + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN * " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - def test_union_all(self): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .return_({"n1": ""}) - .union() - .match() - .node(variable="n2", labels="Node2") - .return_({"n2": ""}) - ) - expected_query = " MATCH (n1:Node1) RETURN n1 UNION ALL MATCH (n2:Node2) RETURN n2 " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_return_alias(memgraph): + query_builder = ( + QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "first"}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first " - def test_delete(self): - query_builder = QueryBuilder().match().node(variable="n1", labels="Node1").delete({"n1"}) - expected_query = " MATCH (n1:Node1) DELETE n1 " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + mock.assert_called_with(expected_query) - mock.assert_called_with(expected_query) - def test_delete_detach(self): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .to(edge_label="EDGE") - .node(variable="n2", labels="Node2") - .delete(["n1", "n2"], True) - ) - expected_query = " MATCH (n1:Node1)-[:EDGE]->(n2:Node2) DETACH DELETE n1, n2 " +def test_return_alias_same_as_variable(memgraph): + query_builder = ( + QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "L1"}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_remove_property(self): - query_builder = QueryBuilder().match().node(variable="n", labels="Node").remove({"n.name"}) - expected_query = " MATCH (n:Node) REMOVE n.name " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_return_alias_empty(memgraph): + query_builder = ( + QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": ""}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - def test_remove_label(self): - query_builder = QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove({"n:Node2"}) - expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2 " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_call_procedure_pagerank(memgraph): + query_builder = ( + QueryBuilder() + .call(procedure="pagerank.get") + .yield_({"node": "", "rank": ""}) + .return_({"node": "node", "rank": "rank"}) + ) + expected_query = " CALL pagerank.get() YIELD node, rank RETURN node, rank " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - def test_remove_property_and_label(self): - query_builder = ( - QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove(["n:Node2", "n.name"]) - ) - expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2, n.name " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_call_procedure_node2vec(memgraph): + query_builder = QueryBuilder().call(procedure="node2vec_online.get_embeddings", arguments="False, 2.0, 0.5") + expected_query = " CALL node2vec_online.get_embeddings(False, 2.0, 0.5) " + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() - def test_orderby(self): - query_builder = QueryBuilder().match().node(variable="n").order_by("n.id") - expected_query = " MATCH (n) ORDER BY n.id " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_call_procedure_nxalg_betweenness_centrality(memgraph): + query_builder = ( + QueryBuilder() + .call(procedure="nxalg.betweenness_centrality", arguments="20, True") + .yield_() + .return_({"node": "", "betweenness": ""}) + ) + expected_query = " CALL nxalg.betweenness_centrality(20, True) YIELD * RETURN node, betweenness " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - def test_orderby_desc(self): - query_builder = QueryBuilder().match().node(variable="n").order_by("n.id DESC") - expected_query = " MATCH (n) ORDER BY n.id DESC " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_unwind(memgraph): + query_builder = ( + QueryBuilder().unwind(list_expression="[1, 2, 3, null]", variable="x").return_({"x": "", "'val'": "y"}) + ) + expected_query = " UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y " - def test_limit(self): - query_builder = QueryBuilder().match().node(variable="n").limit("3") - expected_query = " MATCH (n) LIMIT 3 " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + mock.assert_called_with(expected_query) - mock.assert_called_with(expected_query) - def test_skip(self): - query_builder = QueryBuilder().match().node(variable="n").return_({"n": ""}).skip("1") - expected_query = " MATCH (n) RETURN n SKIP 1 " +def test_remove_label(memgraph): + query_builder = QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove({"n:Node2"}) + expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2 " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_base_class_match(self): - query_builder = match().node(variable="n").return_({"n": ""}) - expected_query = " MATCH (n) RETURN n " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_remove_property_and_label(memgraph): + query_builder = QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove(["n:Node2", "n.name"]) + expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2, n.name " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() - def test_base_class_call(self): - query_builder = call("pagerank.get").yield_().return_() - expected_query = " CALL pagerank.get() YIELD * RETURN * " + mock.assert_called_with(expected_query) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) +def test_order_by(memgraph): + query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties="n.id") + expected_query = " MATCH (n) RETURN * ORDER BY n.id " - def test_base_class_unwind(self): - query_builder = unwind("[1, 2, 3]", "x").return_({"x": "x"}) - expected_query = " UNWIND [1, 2, 3] AS x RETURN x " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + mock.assert_called_with(expected_query) - mock.assert_called_with(expected_query) - def test_base_class_with(self): - query_builder = with_({"10": "n"}).return_({"n": ""}) - expected_query = " WITH 10 AS n RETURN n " +def test_order_by_desc(memgraph): + query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", Order.DESC)) + expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_from(self): - query_builder = match().node("L1", variable="n").from_("TO", variable="e").node("L2", variable="m").return_() - expected_query = " MATCH (n:L1)<-[e:TO]-(m:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_order_by_asc(memgraph): + query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", Order.ASC)) + expected_query = " MATCH (n) RETURN * ORDER BY n.id ASC " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_order_by_wrong_ordering(memgraph): + with pytest.raises(GQLAlchemyMissingOrder): + QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", "DESCE")) - mock.assert_called_with(expected_query) - def test_add_string_partial(self): - query_builder = ( - match().node("Node1", variable="n").to("TO", variable="e").add_custom_cypher("(m:L2) ").return_() +def test_order_by_wrong_type(memgraph): + with pytest.raises(GQLAlchemyOrderByTypeError): + QueryBuilder().match().node(variable="n").return_().order_by(properties=1) + + +def test_order_by_properties(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(variable="n") + .return_() + .order_by(properties=[("n.id", Order.DESC), "n.name", ("n.last_name", Order.DESC)]) + ) + expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC, n.name, n.last_name DESC " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_order_by_asc_desc(memgraph): + query_builder = ( + QueryBuilder() + .match() + .node(variable="n") + .return_() + .order_by( + properties=[ + ("n.id", Order.ASC), + "n.name", + ("n.last_name", Order.DESC), + ("n.age", Order.ASCENDING), + ("n.middle_name", Order.DESCENDING), + ] ) - expected_query = " MATCH (n:Node1)-[e:TO]->(m:L2) RETURN * " + ) + expected_query = ( + " MATCH (n) RETURN * ORDER BY n.id ASC, n.name, n.last_name DESC, n.age ASCENDING, n.middle_name DESCENDING " + ) - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() - mock.assert_called_with(expected_query) + mock.assert_called_with(expected_query) - def test_add_string_complete(self): - query_builder = QueryBuilder().add_custom_cypher("MATCH (n) RETURN n") - expected_query = "MATCH (n) RETURN n" - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() +def test_limit(memgraph): + query_builder = QueryBuilder().match().node(variable="n").return_().limit("3") + expected_query = " MATCH (n) RETURN * LIMIT 3 " - mock.assert_called_with(expected_query) + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_skip(memgraph): + query_builder = QueryBuilder().match().node(variable="n").return_({"n": ""}).skip("1") + expected_query = " MATCH (n) RETURN n SKIP 1 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_class_match(memgraph): + query_builder = match().node(variable="n").return_({"n": ""}) + expected_query = " MATCH (n) RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_class_call(memgraph): + query_builder = call("pagerank.get").yield_().return_() + expected_query = " CALL pagerank.get() YIELD * RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_class_unwind(memgraph): + query_builder = unwind("[1, 2, 3]", "x").return_({"x": "x"}) + expected_query = " UNWIND [1, 2, 3] AS x RETURN x " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_class_with(memgraph): + query_builder = with_({"10": "n"}).return_({"n": ""}) + expected_query = " WITH 10 AS n RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_from(memgraph): + query_builder = match().node("L1", variable="n").from_("TO", variable="e").node("L2", variable="m").return_() + expected_query = " MATCH (n:L1)<-[e:TO]-(m:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_add_string_partial(memgraph): + query_builder = match().node("Node1", variable="n").to("TO", variable="e").add_custom_cypher("(m:L2) ").return_() + expected_query = " MATCH (n:Node1)-[e:TO]->(m:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_add_string_complete(memgraph): + query_builder = QueryBuilder().add_custom_cypher("MATCH (n) RETURN n") + expected_query = "MATCH (n) RETURN n" + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_node_instance(memgraph): + class User(Node): + name: Optional[str] = Field(index=True, unique=True, db=memgraph) + + user = User(name="Ron").save(memgraph) + query_builder = QueryBuilder().match().node(node=user, variable="u").return_() + expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_unsaved_node_instance(memgraph): + class User(Node): + name: Optional[str] = Field(index=True, unique=True, db=memgraph) + + user = User(name="Ron") + query_builder = QueryBuilder().match().node(node=user, variable="u").return_() + expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_node_relationship_instances(memgraph): + class User(Node): + name: Optional[str] = Field(index=True, unique=True, db=memgraph) + + class Follows_test(Relationship, type="FOLLOWS"): + pass + + user_1 = User(name="Ron").save(memgraph) + user_2 = User(name="Leslie").save(memgraph) + follows = Follows_test(_start_node_id=user_1._id, _end_node_id=user_2._id).save(memgraph) + query_builder = ( + QueryBuilder() + .match() + .node(node=user_1, variable="user_1") + .to(relationship=follows) + .node(node=user_2, variable="user_2") + .return_() + ) + expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_unsaved_node_relationship_instances(memgraph): + class User(Node): + name: Optional[str] = Field(index=True, unique=True, db=memgraph) + + class Follows_test(Relationship, type="FOLLOWS"): + pass + + user_1 = User(name="Ron") + user_2 = User(name="Leslie") + follows = Follows_test() + query_builder = ( + QueryBuilder() + .match() + .node(node=user_1, variable="user_1") + .to(relationship=follows) + .node(node=user_2, variable="user_2") + .return_() + ) + expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) diff --git a/tests/ogm/class_definition_test.py b/tests/ogm/class_definition_test.py deleted file mode 100644 index a1679072..00000000 --- a/tests/ogm/class_definition_test.py +++ /dev/null @@ -1,28 +0,0 @@ -from gqlalchemy import Memgraph, Node, Field -from typing import Optional - - -db = Memgraph() - - -class User(Node): - name: str = Field(index=True, exists=True, unique=True, db=db) - - -class Stream(User): - id: str = Field(index=True, exists=True, unique=True, db=db) - followers: Optional[int] = Field() - - -def test_multiple_inheritance(): - user = User(name="Kate").save(db) - streamer = Stream(id=7, name="Ivan", followers=172).save(db) - assert "name" in Stream.__fields__ - assert user.name == "Kate" - assert streamer.name == "Ivan" - assert streamer.followers == 172 - assert User.labels == {"User"} - assert Stream.labels == {"Streamer", "User"} - assert user._labels == {"User"} - assert streamer._labels == {"Streamer", "User"} - assert "name" in Stream.__fields__ diff --git a/tests/ogm/multiple_inheritance_test.py b/tests/ogm/multiple_inheritance_test.py deleted file mode 100644 index 5ffa9f43..00000000 --- a/tests/ogm/multiple_inheritance_test.py +++ /dev/null @@ -1,27 +0,0 @@ -from gqlalchemy import Memgraph, Node, Relationship, Field -from typing import Optional - - -db = Memgraph() - - -class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=db) - - -class Streamer(User): - id: Optional[str] = Field(index=True, unique=True, db=db) - name: Optional[str] = Field(index=True, unique=True, db=db, label="User") - - -class Speaks(Relationship, type="SPEAKS"): - pass - - -def test_multiple_inheritance(): - user = User(name="Ivan").save(db) - streamer = Streamer(id=7, name="Pero").save(db) - assert User.labels == {"User"} - assert Streamer.labels == {"Streamer", "User"} - assert user._labels == {"User"} - assert streamer._labels == {"Streamer", "User"} diff --git a/tests/ogm/serialisation_test.py b/tests/ogm/serialisation_test.py deleted file mode 100644 index d63952de..00000000 --- a/tests/ogm/serialisation_test.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional -from gqlalchemy import Memgraph, Node, Relationship, Field -import pytest - -db = Memgraph() - - -class SimpleNode(Node): - id: Optional[int] = Field() - name: Optional[str] = Field() - - -class NodeWithKey(Node): - id: int = Field(exists=True, unique=True, index=True, db=db) - name: Optional[str] = Field() - - -class SimpleRelationship(Relationship, type="SIMPLE_RELATIONSHIP"): - pass - - -@pytest.fixture -def clear_db(): - db = Memgraph() - db.drop_database() - - -def test_save_node(clear_db): - node1 = SimpleNode(id=1, name="First Simple Node") - assert node1._id is None - node1.save(db) - assert node1._id is not None - node2 = SimpleNode(id=1) - node2.save(db) - assert node1._id != node2._id - - -def test_save_node2(memgraph, clear_db): - node1 = NodeWithKey(id=1, name="First NodeWithKey") - assert node1._id is None - node1.save(db) - assert node1._id is not None - node2 = NodeWithKey(id=1) - node2.save(db) - assert node1._id == node2._id - assert node1.name == node2.name - - -def test_save_relationship(memgraph, clear_db): - node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) - node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) - relationship = SimpleRelationship( - _start_node_id=node1._id, - _end_node_id=node2._id, - ) - assert SimpleRelationship.type == relationship._type - assert SimpleRelationship._type is not None - relationship.save(memgraph) - assert relationship._id is not None - - -def test_save_relationship2(memgraph, clear_db): - node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) - node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) - relationship = SimpleRelationship( - _start_node_id=node1._id, - _end_node_id=node2._id, - ) - assert SimpleRelationship.type == relationship._type - assert SimpleRelationship.type is not None - relationship.save(memgraph) - assert relationship._id is not None - relationship2 = db.load_relationship(relationship) - assert relationship2._id == relationship._id diff --git a/tests/ogm/automatic_deserialisation_test.py b/tests/ogm/test_automatic_deserialisation.py similarity index 51% rename from tests/ogm/automatic_deserialisation_test.py rename to tests/ogm/test_automatic_deserialisation.py index 4f91edb8..7c386422 100644 --- a/tests/ogm/automatic_deserialisation_test.py +++ b/tests/ogm/test_automatic_deserialisation.py @@ -13,66 +13,69 @@ # limitations under the License. from typing import Optional -from gqlalchemy import Memgraph, Node, Relationship, Path +from gqlalchemy import Node, Relationship, Path from gqlalchemy.models import GraphObject -class Person(Node): - id: Optional[int] - name: Optional[str] - - -class Alice(Node): - id: Optional[int] - name: Optional[str] - - -class Friends(Relationship, _type="FRIENDS"): - pass - - def test_simple_json_deserialisation(): - person_json = '{"_type":"Person", "id": 9, "name": "person"}' - person = GraphObject.parse_raw(person_json) - assert person == Person(id=9, name="person") + class Person(Node): + id: Optional[int] + name: Optional[str] + + person_json = '{"labels":"Person", "id": 9, "name": "person"}' + person = Person.parse_raw(person_json) + assert str(person) == str(Person(id=9, name="person")) def test_json_deserialisation(): - person_json = '{"_type":"Person", "id": 9, "name": "person", "_node_labels": ["Person"], "_id": 1, "_node_id": 1}' + class Person(Node): + id: Optional[int] + name: Optional[str] + + person_json = '{"id": 9, "name": "person", "_labels": ["Person"], "_id": 1, "_id": 1}' person_1 = GraphObject.parse_raw(person_json) person_2 = Person(id=9, name="person") person_2._id = 1 - person_2._node_id = 1 - person_2._node_labels = ["Person"] - assert person_1 == person_2 + person_2._id = 1 + person_2._labels = ["Person"] + assert str(person_1) == str(person_2) def test_dictionary_deserialisation(): pass -def test_automatic_deserialisation_from_database(): - db = Memgraph() +def test_automatic_deserialisation_from_database(memgraph): + class Person(Node): + id: Optional[int] + name: Optional[str] + + class Alice(Node): + id: Optional[int] + name: Optional[str] + + class Friends(Relationship, type="FRIENDS"): + pass - db.execute("create (:Person {id: 1, name: 'person'});") - db.execute("create (:Alice {id: 8, name: 'alice'});") - db.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") + memgraph.execute("create (:Person {id: 1, name: 'person'});") + memgraph.execute("create (:Alice {id: 8, name: 'alice'});") + memgraph.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") - result = list(db.execute_and_fetch("match (a)-[r]->(b) return a, r, b")) + result = list(memgraph.execute_and_fetch("match (a)-[r]->(b) return a, r, b")) for node in result: a = node["a"] assert isinstance(a, Alice) assert a.id == 8 assert a.name == "alice" - assert a._node_labels == {"Alice"} - assert isinstance(a._node_id, int) + assert a._labels == {"Alice"} + assert isinstance(a._id, int) assert a._properties == {"id": 8, "name": "alice"} assert isinstance(a._id, int) r = node["r"] assert isinstance(r, Friends) assert r._type == "FRIENDS" - assert isinstance(r._relationship_id, int) + assert isinstance(r._id, int) assert isinstance(r._start_node_id, int) assert isinstance(r._end_node_id, int) assert r._properties == {} @@ -82,22 +85,18 @@ def test_automatic_deserialisation_from_database(): assert isinstance(b, Person) assert b.id == 1 assert b.name == "person" - assert b._node_labels == {"Person"} - assert isinstance(b._node_id, int) + assert b._labels == {"Person"} + assert isinstance(b._id, int) assert b._properties == {"id": 1, "name": "person"} assert isinstance(b._id, int) - db.drop_database() - -def test_path_deserialisation(): - db = Memgraph() - db.execute("create (:Person {id: 1, name: 'person'});") - db.execute("create (:Alice {id: 8, name: 'alice'});") - db.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") - result = list(db.execute_and_fetch("MATCH p = ()-[*1]-() RETURN p")) +def test_path_deserialisation(memgraph): + memgraph.execute("create (:Person {id: 1, name: 'person'});") + memgraph.execute("create (:Alice {id: 8, name: 'alice'});") + memgraph.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") + result = list(memgraph.execute_and_fetch("MATCH p = ()-[*1]-() RETURN p")) path = result[0]["p"] assert isinstance(path, Path) assert len(path._nodes) == 2 assert len(path._relationships) == 1 - db.drop_database() diff --git a/tests/ogm/test_class_definition.py b/tests/ogm/test_class_definition.py new file mode 100644 index 00000000..b19b189e --- /dev/null +++ b/tests/ogm/test_class_definition.py @@ -0,0 +1,222 @@ +from gqlalchemy import Node, Field +from typing import Optional + + +def test_node(memgraph): + class User(Node): + id: int = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + user = User(id=0, name="Kate").save(memgraph) + + assert User.label == "User" + assert User.labels == {"User"} + + assert "id" in User.__fields__ + assert "name" in User.__fields__ + + assert user.id == 0 + assert user.name == "Kate" + + +def test_node_inheritance(memgraph): + class User(Node): + id: int = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class Admin(User): + admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + + user = User(id=0, name="Kate").save(memgraph) + admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + + assert User.label == "User" + assert User.labels == {"User"} + + assert "id" in User.__fields__ + assert "name" in User.__fields__ + + assert user.id == 0 + assert user.name == "Kate" + + assert Admin.label == "Admin" + assert Admin.labels == {"Admin", "User"} + + assert "id" in Admin.__fields__ + assert "admin_id" in Admin.__fields__ + assert "name" in Admin.__fields__ + + assert admin.id == 1 + assert admin.admin_id == 0 + assert admin.name == "Admin" + assert admin.label == "Admin" + assert admin.labels == {"Admin", "User"} + assert admin._label == "Admin:User" + assert admin._labels == {"Admin", "User"} + + +def test_node_custom_label(memgraph): + class User(Node, label="UserX"): + id: int = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class Admin(User, label="AdminX"): + admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + + user = User(id=0, name="Kate").save(memgraph) + admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + + assert User.label == "UserX" + assert User.labels == {"UserX"} + + assert user.label == "UserX" + assert user.labels == {"UserX"} + assert user._label == "UserX" + assert user._labels == {"UserX"} + + assert Admin.label == "AdminX" + assert Admin.labels == {"AdminX", "UserX"} + + assert admin.label == "AdminX" + assert admin.labels == {"AdminX", "UserX"} + assert admin._label == "AdminX:UserX" + assert admin._labels == {"AdminX", "UserX"} + + +def test_node_custom_labels(memgraph): + class User(Node, labels={"UserX", "UserY"}): + id: int = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class Admin(User, label="AdminX", labels={"AdminX", "AdminY"}): + admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + + admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + + assert User.label == "User" + assert User.labels == {"User", "UserX", "UserY"} + + assert Admin.label == "AdminX" + assert Admin.labels == {"AdminX", "AdminY", "User", "UserX", "UserY"} + + assert admin.label == "AdminX" + assert admin.labels == {"AdminX", "AdminY", "User", "UserX", "UserY"} + assert admin._label == "AdminX:AdminY:User:UserX:UserY" + assert admin._labels == {"AdminX", "AdminY", "User", "UserX", "UserY"} + + +def test_node_various_inheritance(memgraph): + class User(Node): + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class UserOne(Node, label="User1"): + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class UserTwo(User, label="User2", labels={"User3"}): + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class Streamer(User): + id: str = Field(index=True, exists=True, unique=True, db=memgraph) + followers: Optional[int] = Field() + + class StreamerOne(User, label="Streamer1"): + id: str = Field(index=True, exists=True, unique=True, db=memgraph) + followers: Optional[int] = Field() + + class StreamerTwo(Streamer, label="Streamer2", labels={"Streamer3", "Streamer4"}): + id: str = Field(index=True, exists=True, unique=True, db=memgraph) + followers: Optional[int] = Field() + + user = User(name="Kate").save(memgraph) + userOne = UserOne(name="Mrma").save(memgraph) + userTwo = UserTwo(name="Boris").save(memgraph) + streamer = Streamer(id=7, name="Ivan", followers=172).save(memgraph) + streamerOne = StreamerOne(id=8, name="Bruno", followers=173).save(memgraph) + streamerTwo = StreamerTwo(id=9, name="Marko", followers=174).save(memgraph) + + assert "name" in Streamer.__fields__ + assert user.name == "Kate" + assert streamer.name == "Ivan" + assert streamer.followers == 172 + + assert User.label == "User" + assert User.labels == {"User"} + + assert UserOne.label == "User1" + assert UserOne.labels == {"User1"} + + assert UserTwo.label == "User2" + assert UserTwo.labels == {"User", "User2", "User3"} + + assert user.label == "User" + assert user.labels == {"User"} + assert user._label == "User" + assert user._labels == {"User"} + + assert userOne.label == "User1" + assert userOne.labels == {"User1"} + assert userOne._label == "User1" + assert userOne._labels == {"User1"} + + assert userTwo.label == "User2" + assert userTwo.labels == {"User", "User2", "User3"} + assert userTwo._label == "User:User2:User3" + assert userTwo._labels == {"User", "User2", "User3"} + + assert Streamer.label == "Streamer" + assert Streamer.labels == {"Streamer", "User"} + + assert StreamerOne.label == "Streamer1" + assert StreamerOne.labels == {"Streamer1", "User"} + + assert StreamerTwo.label == "Streamer2" + assert StreamerTwo.labels == {"User", "Streamer", "Streamer2", "Streamer3", "Streamer4"} + + assert streamer.label == "Streamer" + assert streamer.labels == {"Streamer", "User"} + assert streamer._label == "Streamer:User" + assert streamer._labels == {"Streamer", "User"} + + assert streamerOne.label == "Streamer1" + assert streamerOne.labels == {"Streamer1", "User"} + assert streamerOne._label == "Streamer1:User" + assert streamerOne._labels == {"Streamer1", "User"} + + assert streamerTwo.label == "Streamer2" + assert streamerTwo.labels == {"User", "Streamer", "Streamer2", "Streamer3", "Streamer4"} + assert streamerTwo._label == "Streamer:Streamer2:Streamer3:Streamer4:User" + assert streamerTwo._labels == { + "User", + "Streamer", + "Streamer2", + "Streamer3", + "Streamer4", + } + + +def test_node_multiple_inheritence(memgraph): + class User(Node, labels={"UserX"}): + id: int = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class UserOne(Node, labels={"UserOneX"}): + pass + + class UserTwo(Node, label="UserTwoX"): + pass + + class Admin(UserOne, UserTwo, User, label="AdminX", labels={"AdminX", "AdminY"}): + admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + + admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + + assert UserOne.label == "UserOne" + assert UserTwo.label == "UserTwoX" + + assert Admin.label == "AdminX" + assert Admin.labels == {"AdminX", "AdminY", "User", "UserX", "UserOne", "UserOneX", "UserTwoX"} + + assert admin.label == "AdminX" + assert admin.labels == {"AdminX", "AdminY", "User", "UserX", "UserOne", "UserOneX", "UserTwoX"} + assert admin._label == "AdminX:AdminY:User:UserOne:UserOneX:UserTwoX:UserX" + assert admin._labels == {"AdminX", "AdminY", "User", "UserX", "UserOne", "UserOneX", "UserTwoX"} diff --git a/tests/ogm/custom_fields_test.py b/tests/ogm/test_custom_fields.py similarity index 87% rename from tests/ogm/custom_fields_test.py rename to tests/ogm/test_custom_fields.py index 530d915b..c415ae15 100644 --- a/tests/ogm/custom_fields_test.py +++ b/tests/ogm/test_custom_fields.py @@ -12,7 +12,6 @@ # limitations under the License. from gqlalchemy import ( - Memgraph, MemgraphConstraintExists, MemgraphConstraintUnique, MemgraphIndex, @@ -20,22 +19,11 @@ ) from pydantic import Field -db = Memgraph() - - -class Node1(Node): - id: int = Field(exists=True, db=db) - - -class Node2(Node): - id: int = Field(unique=True, db=db) - - -class Node3(Node): - id: int = Field(index=True, db=db) - def test_create_constraint_exist(memgraph): + class Node1(Node): + id: int = Field(exists=True, db=memgraph) + memgraph_constraint = MemgraphConstraintExists("Node1", "id") memgraph.create_constraint(memgraph_constraint) @@ -45,6 +33,9 @@ def test_create_constraint_exist(memgraph): def test_create_constraint_unique(memgraph): + class Node2(Node): + id: int = Field(unique=True, db=memgraph) + memgraph_constraint = MemgraphConstraintUnique("Node2", ("id",)) memgraph.create_constraint(memgraph_constraint) diff --git a/tests/ogm/test_load_node.py b/tests/ogm/test_load_node.py new file mode 100644 index 00000000..b5ac005c --- /dev/null +++ b/tests/ogm/test_load_node.py @@ -0,0 +1,20 @@ +from gqlalchemy import Node, Field + + +def test_load_node(memgraph): + class User(Node): + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + + class Streamer(User): + name: str = Field(index=True, unique=True, db=memgraph) + id: str = Field(index=True, unique=True, db=memgraph) + followers: int = Field() + totalViewCount: int = Field() + + streamer = Streamer(name="Mislav", id="7", followers=777, totalViewCount=7777).save(memgraph) + loaded_streamer = memgraph.load_node(streamer) + assert loaded_streamer.name == "Mislav" + assert loaded_streamer.id == "7" + assert loaded_streamer.followers == 777 + assert loaded_streamer.totalViewCount == 7777 + assert loaded_streamer._labels == {"Streamer", "User"} diff --git a/tests/ogm/test_loading.py b/tests/ogm/test_loading.py new file mode 100644 index 00000000..4e2ae107 --- /dev/null +++ b/tests/ogm/test_loading.py @@ -0,0 +1,46 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from gqlalchemy import Node +from pydantic import ValidationError + + +def test_partial_loading(memgraph): + class User(Node): + id: int + name: str = None + + User(id=1, name="Jane").save(memgraph) + + with pytest.raises(ValidationError): + memgraph.load_node(User(name="Jane")) + + user_by_id = memgraph.load_node(User(id=1)) + + assert user_by_id.id == 1 + assert user_by_id.name == "Jane" + assert user_by_id._label == "User" + + +def test_node_loading(memgraph): + class User(Node): + id: int + name: str + + User(id=1, name="Jane").save(memgraph) + user_by_name = memgraph.load_node(User(id=1, name="Jane")) + + assert user_by_name.id == 1 + assert user_by_name.name == "Jane" + assert user_by_name._label == "User" diff --git a/tests/ogm/test_multiple_inheritance.py b/tests/ogm/test_multiple_inheritance.py new file mode 100644 index 00000000..8b48e5e7 --- /dev/null +++ b/tests/ogm/test_multiple_inheritance.py @@ -0,0 +1,18 @@ +from gqlalchemy import Node, Field +from typing import Optional + + +def test_multiple_inheritance(memgraph): + class User(Node): + name: Optional[str] = Field(index=True, unique=True, db=memgraph) + + class Streamer(User): + id: Optional[str] = Field(index=True, unique=True, db=memgraph) + name: Optional[str] = Field(index=True, unique=True, db=memgraph, label="User") + + user = User(name="Ivan").save(memgraph) + streamer = Streamer(id=7, name="Pero").save(memgraph) + assert User.labels == {"User"} + assert Streamer.labels == {"Streamer", "User"} + assert user._labels == {"User"} + assert streamer._labels == {"Streamer", "User"} diff --git a/tests/ogm/test_properties.py b/tests/ogm/test_properties.py new file mode 100644 index 00000000..fd9b5e01 --- /dev/null +++ b/tests/ogm/test_properties.py @@ -0,0 +1,47 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gqlalchemy import Node + + +def test_properties(memgraph): + class User(Node): + id: int + last_name: str + _name: str + _age: int + + user = User(id=1, last_name="Smith", _name="Jane").save(memgraph) + User(id=2, last_name="Scott").save(memgraph) + loaded_user = memgraph.load_node(user) + loaded_user._age = 24 + loaded_user2 = memgraph.load_node(User(id=2, last_name="Scott")) + + assert type(loaded_user) is User + assert type(loaded_user2) is User + assert hasattr(loaded_user, "_name") is False + assert hasattr(loaded_user, "_age") is True + assert hasattr(loaded_user2, "_name") is False + assert hasattr(loaded_user2, "_age") is False + assert "id" in User.__fields__ + assert "last_name" in User.__fields__ + assert "_name" not in User.__fields__ + assert "_age" not in User.__fields__ + assert loaded_user.id == 1 + assert loaded_user.last_name == "Smith" + assert loaded_user._label == "User" + assert loaded_user2.id == 2 + assert loaded_user2.last_name == "Scott" + assert loaded_user2._label == "User" + assert user._name == "Jane" + assert loaded_user._age == 24 diff --git a/tests/ogm/test_serialisation.py b/tests/ogm/test_serialisation.py new file mode 100644 index 00000000..dd46f6ea --- /dev/null +++ b/tests/ogm/test_serialisation.py @@ -0,0 +1,148 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional +from gqlalchemy import Node, Relationship, Field + + +def test_save_node(memgraph): + class SimpleNode(Node): + id: Optional[int] = Field() + name: Optional[str] = Field() + + node1 = SimpleNode(id=1, name="First Simple Node") + assert node1._id is None + node1.save(memgraph) + assert node1._id is not None + node2 = SimpleNode(id=1) + node2.save(memgraph) + assert node1._id != node2._id + + +def test_save_node2(memgraph): + class NodeWithKey(Node): + id: int = Field(exists=True, unique=True, index=True, db=memgraph) + name: Optional[str] = Field() + + node1 = NodeWithKey(id=1, name="First NodeWithKey") + assert node1._id is None + node1.save(memgraph) + assert node1._id is not None + node2 = NodeWithKey(id=1) + node2.save(memgraph) + assert node1._id == node2._id + assert node1.name == node2.name + + +def test_save_nodes(memgraph): + class SimpleNode(Node): + id: Optional[int] = Field() + name: Optional[str] = Field() + + node1 = SimpleNode(id=1, name="First Simple Node") + node2 = SimpleNode(id=2, name="Second Simple Node") + node3 = SimpleNode(id=3, name="Third Simple Node") + + assert node1._id is None + assert node2._id is None + assert node3._id is None + + memgraph.save_nodes([node1, node2, node3]) + + assert node1._id is not None + assert node2._id is not None + assert node3._id is not None + + node1.name = "1st Simple Node" + node2.name = "2nd Simple Node" + node3.name = "3rd Simple Node" + + memgraph.save_nodes([node1, node2, node3]) + + assert node1.name == "1st Simple Node" + assert node2.name == "2nd Simple Node" + assert node3.name == "3rd Simple Node" + + +def test_save_relationship(memgraph): + class NodeWithKey(Node): + id: int = Field(exists=True, unique=True, index=True, db=memgraph) + name: Optional[str] = Field() + + class SimpleRelationship(Relationship, type="SIMPLE_RELATIONSHIP"): + pass + + node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) + node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) + relationship = SimpleRelationship( + _start_node_id=node1._id, + _end_node_id=node2._id, + ) + assert SimpleRelationship.type == relationship._type + assert SimpleRelationship._type is not None + relationship.save(memgraph) + assert relationship._id is not None + + +def test_save_relationship2(memgraph): + class NodeWithKey(Node): + id: int = Field(exists=True, unique=True, index=True, db=memgraph) + name: Optional[str] = Field() + + class SimpleRelationship(Relationship, type="SIMPLE_RELATIONSHIP"): + pass + + node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) + node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) + relationship = SimpleRelationship( + _start_node_id=node1._id, + _end_node_id=node2._id, + ) + assert SimpleRelationship.type == relationship._type + assert SimpleRelationship.type is not None + relationship.save(memgraph) + assert relationship._id is not None + relationship2 = memgraph.load_relationship(relationship) + assert relationship2._id == relationship._id + + +def test_save_relationships(memgraph): + class User(Node): + id: int = Field(exists=True, unique=True, index=True, db=memgraph) + name: Optional[str] = Field() + + class Follows(Relationship, type="FOLLOWS"): + pass + + node1 = User(id=1, name="Marin") + node2 = User(id=2, name="Marko") + + memgraph.save_nodes([node1, node2]) + assert node1._id is not None + assert node2._id is not None + + relationship1 = Follows( + _start_node_id=node1._id, + _end_node_id=node2._id, + ) + relationship2 = Follows( + _start_node_id=node2._id, + _end_node_id=node1._id, + ) + + assert Follows.type == relationship1._type + assert Follows._type is not None + + memgraph.save_relationships([relationship1, relationship2]) + assert relationship1._id is not None + assert relationship2._id is not None diff --git a/tests/ogm/test_validators.py b/tests/ogm/test_validators.py new file mode 100644 index 00000000..7d52a270 --- /dev/null +++ b/tests/ogm/test_validators.py @@ -0,0 +1,54 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from gqlalchemy import Field, Node, validator +from typing import List, Optional + + +def test_raise_value_error(memgraph): + class User(Node): + name: str = Field(index=True, exists=True, unique=True, db=memgraph) + age: int = Field() + friends: Optional[List[str]] = Field() + + @validator("name") + def name_can_not_be_empty(cls, v): + if v == "": + raise ValueError("name can't be empty") + + return v + + @validator("age") + def age_must_be_greater_than_zero(cls, v): + if v <= 0: + raise ValueError("age must be greater than zero") + + return v + + @validator("friends", each_item=True) + def friends_must_be_(cls, v): + if v == "": + raise ValueError("name can't be empty") + + return v + + with pytest.raises(ValueError): + User(name="", age=26).save(memgraph) + + with pytest.raises(ValueError): + User(name="Kate", age=0).save(memgraph) + + with pytest.raises(ValueError): + User(name="Kate", age=26, friends=["Ema", "Ana", ""]).save(memgraph) diff --git a/tests/on_disk_property_storage/test_multiprocess.py b/tests/on_disk_property_storage/test_multiprocess.py index 2c10f0b9..2de5b48c 100644 --- a/tests/on_disk_property_storage/test_multiprocess.py +++ b/tests/on_disk_property_storage/test_multiprocess.py @@ -27,11 +27,6 @@ SQLitePropertyDatabase("./tests/on_disk_storage.db", db) -class User(Node): - id: Optional[str] = Field(unique=True, index=True, db=db) - huge_string: Optional[str] = Field(on_disk=True) - - @pytest.fixture def clear_db(): db = Memgraph() @@ -63,6 +58,10 @@ def _run_n_queries(n: int): def _create_n_user_objects(n: int) -> None: + class User(Node): + id: Optional[str] = Field(unique=True, index=True, db=db) + huge_string: Optional[str] = Field(on_disk=True) + db = Memgraph() SQLitePropertyDatabase("./tests/on_disk_storage.db", db) huge_string = "I LOVE MEMGRAPH" * 1000 diff --git a/tests/on_disk_property_storage/test_query.py b/tests/on_disk_property_storage/test_query.py index 274f8558..a520d853 100644 --- a/tests/on_disk_property_storage/test_query.py +++ b/tests/on_disk_property_storage/test_query.py @@ -22,21 +22,18 @@ db = SQLitePropertyDatabase("./tests/on_disk_storage.db", memgraph) -class User(Node): - id: int = Field(unique=True, index=True, db=memgraph) - huge_string: Optional[str] = Field(on_disk=True) - - -class FriendTo(Relationship, type="FRIEND_TO"): - huge_string: Optional[str] = Field(on_disk=True) +def drop_data_and_constraints(): + memgraph.drop_database() + db.drop_database() + memgraph.ensure_indexes([]) + memgraph.ensure_constraints([]) -@pytest.fixture +@pytest.fixture(scope="module") def clear_db(): - memgraph = Memgraph() - db = SQLitePropertyDatabase("./tests/on_disk_storage.db", memgraph) - memgraph.drop_database() - db.drop_database() + drop_data_and_constraints() + yield + drop_data_and_constraints() def test_add_relationship_property(clear_db): @@ -77,7 +74,11 @@ def test_delete_relationship_property(clear_db): assert result_value is None -def test_add_node_with_an_on_disk_property(clear_db): +def test_add_node_with_an_on_disk_property(): + class User(Node): + id: int = Field(unique=True, index=True, db=memgraph) + huge_string: Optional[str] = Field(on_disk=True) + secret = "qwertyuiopasdfghjklzxcvbnm" user = User(id=12, huge_string=secret) memgraph.save_node(user) @@ -85,7 +86,14 @@ def test_add_node_with_an_on_disk_property(clear_db): assert user_2.huge_string == secret -def test_add_relationship_with_an_on_disk_property(clear_db): +def test_add_relationship_with_an_on_disk_property(): + class User(Node): + id: int = Field(unique=True, index=True, db=memgraph) + huge_string: Optional[str] = Field(on_disk=True) + + class FriendTo(Relationship, type="FRIEND_TO"): + huge_string: Optional[str] = Field(on_disk=True) + secret = "qwertyuiopasdfghjklzxcvbnm" user_1 = User(id=12).save(memgraph) user_2 = User(id=11).save(memgraph) diff --git a/tests/test_instance_runner.py b/tests/test_instance_runner.py new file mode 100644 index 00000000..9e91a830 --- /dev/null +++ b/tests/test_instance_runner.py @@ -0,0 +1,117 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import docker +import os +import pathlib +import pytest +from gqlalchemy.instance_runner import ( + DockerImage, + MemgraphInstanceBinary, + MemgraphInstanceDocker, + wait_for_port, + wait_for_docker_container, +) + + +def test_wait_for_port(): + with pytest.raises(TimeoutError): + wait_for_port(port=0000, timeout=1) + + +@pytest.mark.docker +def test_wait_for_docker_container(): + container = docker.from_env().containers.create(DockerImage.MEMGRAPH.value) + with pytest.raises(TimeoutError): + wait_for_docker_container(container, timeout=1) + + +@pytest.mark.docker +def test_start_and_connect_memgraph_docker(): + memgraph_instance = MemgraphInstanceDocker(port=7690) + memgraph = memgraph_instance.start_and_connect() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + assert memgraph_instance.is_running() + memgraph_instance.stop() + assert not memgraph_instance.is_running() + + +@pytest.mark.docker +def test_start_and_connect_memgraph_docker_config(): + memgraph_instance = MemgraphInstanceDocker(port=7691, config={"--log-level": "TRACE"}) + memgraph = memgraph_instance.start_and_connect() + assert memgraph_instance.is_running() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + memgraph_instance.stop() + assert not memgraph_instance.is_running() + + +@pytest.mark.docker +def test_start_memgraph_docker_connect(): + memgraph_instance = MemgraphInstanceDocker(port=7692) + memgraph_instance.start() + assert memgraph_instance.is_running() + memgraph = memgraph_instance.connect() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + memgraph_instance.stop() + assert not memgraph_instance.is_running() + + +@pytest.mark.ubuntu +def test_start_and_connect_memgraph_binary(): + path = pathlib.Path().resolve() / "memgraph_one" + os.system(f"mkdir {path}") + os.system(f"sudo chown -R memgraph:memgraph {path}") + memgraph_instance = MemgraphInstanceBinary( + port=7693, config={"--data-directory": str(path)}, binary_path="/usr/lib/memgraph/memgraph", user="memgraph" + ) + memgraph = memgraph_instance.start_and_connect() + assert memgraph_instance.is_running() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + memgraph_instance.stop() + assert not memgraph_instance.is_running() + os.system(f"rm -rf {path}") + + +@pytest.mark.ubuntu +def test_start_and_connect_memgraph_binary_config(): + path = pathlib.Path().resolve() / "memgraph_two/" + os.system(f"mkdir {path}") + os.system(f"sudo chown -R memgraph:memgraph {path}") + memgraph_instance = MemgraphInstanceBinary( + port=7694, config={"--data-directory": str(path)}, binary_path="/usr/lib/memgraph/memgraph", user="memgraph" + ) + memgraph = memgraph_instance.start_and_connect() + assert memgraph_instance.is_running() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + memgraph_instance.stop() + assert not memgraph_instance.is_running() + os.system(f"rm -rf {path}") + + +@pytest.mark.ubuntu +def test_start_memgraph_binary_connect(): + path = pathlib.Path().resolve() / "memgraph_three" + os.system(f"mkdir {path}") + os.system(f"sudo chown -R memgraph:memgraph {path}") + memgraph_instance = MemgraphInstanceBinary( + port=7695, config={"--data-directory": str(path)}, binary_path="/usr/lib/memgraph/memgraph", user="memgraph" + ) + memgraph_instance.start() + assert memgraph_instance.is_running() + memgraph = memgraph_instance.connect() + assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 + memgraph_instance.stop() + assert not memgraph_instance.is_running() + os.system(f"rm -rf {path}")