diff --git a/Dependencies/Net40/HtmlAgilityPack.dll b/Dependencies/Net40/HtmlAgilityPack.dll index 2a4ac17..ae71981 100644 Binary files a/Dependencies/Net40/HtmlAgilityPack.dll and b/Dependencies/Net40/HtmlAgilityPack.dll differ diff --git a/Dependencies/Net40/VDS.Common.dll b/Dependencies/Net40/VDS.Common.dll index 0b21b80..97b0b6a 100644 Binary files a/Dependencies/Net40/VDS.Common.dll and b/Dependencies/Net40/VDS.Common.dll differ diff --git a/Dependencies/Net40/dotNetRDF.dll b/Dependencies/Net40/dotNetRDF.dll index 46d10fa..5ce6e46 100644 Binary files a/Dependencies/Net40/dotNetRDF.dll and b/Dependencies/Net40/dotNetRDF.dll differ diff --git a/Dependencies/Net40/dotNetRDF.xml b/Dependencies/Net40/dotNetRDF.xml index f4e069d..4c5fd66 100644 --- a/Dependencies/Net40/dotNetRDF.xml +++ b/Dependencies/Net40/dotNetRDF.xml @@ -4,76040 +4,75956 @@ dotNetRDF - + - Object Factory for loading triple and graph collections + Callback for methods that return a SparqlResultSet asynchronously + SPARQL Results + State + + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback + - + - Tries to load a triple/graph collection which is specified in the given configuration graph + Callback for methods that return a IGraph asynchronously - Configuration Graph - Object Node - Target type - Returned Object - + Graph + State + + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback + - + - Gets whether this factory can load objects of the given type + Callback for methods that return a ITripleStore asynchronously - Type - + Triple Store + State + + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback + - + - The Configuration Loader is responsible for the loading of Configuration information - and objects based upon information encoded in a Graph but more generally may be used - for the loading of any type of object whose configuration has been loaded in a Graph - and for which a relevant IObjectFactory is available. + Callbacks for methods that process the resulting triples with an RDF Handler asynchronously + RDF Handler + State + + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback + - + - Loads the Object identified by the given URI as an object of the given type based on information from the Configuration Graph + Callbacks for methods that process the results with an SPARQL Results Handler asynchronously + SPARQL Results Handler + State - See remarks under + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + - Loads the Object identified by the given blank node identifier as an object of the given type based on information from the Configuration Graph + Callbacks for methods that may process the results with either an RDF or a SPARQL Results Handler + RDF Handler + SPARQL Results Handler + State - See remarks under + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + - Loads the Object identified by the given blank node identifier as an + Callbacks for methods that perform SPARQL Updates + State - See remarks under + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + - Loads the Object identified by the given URI as an + Callback for methods that return a Namespace Map + Namespace Map + State - See remarks under + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + - Factory class for producing Custom SPARQL Expression Factories from Configuration Graphs + Callbacks for methods that return a list of nodes + Node List + State + + In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback + - + - Tries to load a SPARQL Property Function Factory based on information from the Configuration Graph + Marker that will be passed to your callback in the event that an async error occurs, provides access to the error and any state that you passed in originally - Configuration Graph - Object Node - Target Type - Output Object - - + - Gets whether this Factory can load objects of the given Type + Creates new async error - Type - + Exception + State - + - SPARQL Operator factory which is capable of loading any implementation of ISparqlOperator which has a public unparameterized constructor + Gets the error that occurred - + - Tries to load an object of the given type + Gets the original state that was passed in to the async call - Configuration Graph - Object Node - Target Type - Returned Object - - + - Gets whether this factory can load objects of the given type + Callback that occurs when the connection to a Pellet Server instance is ready for use - Type - + Pellet Server + State - + - Class for representing errors with dotNetRDF Configuration + Callback for Pellet Constistency Service - - - Configuration exceptions are thrown when the user tries to load objects using the ConfigurationLoader and their is insufficient/invalid information to load the desired object - - + Whether the Knowledge Base is consistent + State - + - Creates a new dotNetRDF Configuration Exception + Callback for Pellet Search Service - Error Message + Pellet Search Results + State - + - Creates a new dotNetRDF Configuration Exception + Callback for Pellet Cluster Service - Error Message - Exception that caused this Exception + Clusters + State - + + + Callback for Pellet Similarity Service + + Similarity Results + State + + - Namespace for Configuration Classes which are used for dynamic loading of Configuration serialized as RDF Graphs. + Namespace which provides a client for interacting with a Pellet Server - This API which provides for encoding dotNetRDF centric configuration in RDF Graphs though it can be extended to serialize and deserialize arbitrary objects if desired. This configuration API is used extensively with our ASP.Net support as it allows for highly expressive and flexible configurations. See the documentation on the main website for many detailed examples. + Due to Pellet Server being a relatively new product it is currently only possible to reason over external knowledge bases on a Pellet Server and not to use Pellet to reason over in-memory data. As Pellet Server is updated in the future this client will be updated to take advantage of those updates and to eventually provide for in-memory reasoning. You may also want to consider using the which is the triple store from the same people who developed Pellet and which integrates some Pellet capabilities. - + - Namespace for classes related to configuring Permissions - - - Warning: The API here is experimental and may changed/be removed in future releases + Namespace which provides classes which represent the Services offered by a Pellet Server knowledge base - + - Possible permission models + Represents the Classify Service provided by a Pellet Server - + - If the action appears in the deny list it is denied unless it is in the allow list, otherwise it is allowed + Creates a new Classify Service + Service Name + JSON Object - + - If the action appears in the allow list it is allowed unless it is in the deny list, otherwise it is denied + Extracts the Graph which comprises the class hierarchy - + - All actions are allowed + Extracts the Graph which comprises the class hierarchy + Callback for when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - All actions are denied + Represents the Cluster Service provided by a Pellet Knowledge Base - + - Interface for Permission + Gets a list of lists expressing clusters within the Knowledge Base + Number of Clusters + - + - Gets whether the Permission is for a specific action + Gets a list of lists expressing clusters within the Knowledge Base - Action + Number of Clusters + QName of a Type to cluster around - + - Represents a action that can be allowed/denied + Gets the raw Cluster Graph for the Knowledge Base + Number of Clusters + - + - Creates a new Permission for the given Action + Gets the raw Cluster Graph for the Knowledge Base - Action + Number of Clusters + QName of a Type to Cluster around + - + - Gets whether the Permission is for the given action + Gets a list of lists expressing clusters within the Knowledge Base - Action - + Number of Clusters + Callback to be invoked when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Represents a set of Permissions that can be allowed/denied + Gets a list of lists expressing clusters within the Knowledge Base + Number of Clusters + QName of a Type to cluster around + Callback to be invoked when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Creates a new Permissions Set + Gets the raw Cluster Graph for the Knowledge Base - Action + Number of Clusters + Callback to be invoked when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Creates a new Permissions Set + Gets the raw Cluster Graph for the Knowledge Base - Actions + Number of Clusters + QName of a Type to Cluster around + Callback to be invoked when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets whether the Permission is for the given action + Represents the Consistency Service provided by a Pellet Server - Action - - + - Represents a Group of Users and the permissions they have to perform actions + Creates a new Consistency Service + Service Name + JSON Object - + - Creates a new User Group + Returns whether the Knowledge Base is consistent - + - Creates a new User Group which may allow guests + Determines whether the Knowledge Base is consistent - Are guests allowed? + Callback to invoke when the operation completes + State to be passed to the callback - If guests are allowed then this Groups permissions apply to unauthenticated users + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + - Gets/Sets whether Guests are allowed + Represents the Explain Service provided by a Pellet Server - + - Gets/Sets the in-use Permission Model + Base Query for use with the Explain Service - + - Adds a User to the Group + Creates a new Explain Service - User Credentials + Service Name + JSON Object - + - Adds an allow action permission to the Group + Gets a Graph explaining the result of the SPARQL Query - Permission + SPARQL Query + - + - Adds a deny action permission to the Group + Gets a Graph explaining the result of the SPARQL Query - Permission + SPARQL Query + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Returns whether the Group has a member with the given username + Represents the Explan Unsatisfiable Service provided by a Pellet Server - Username - - + - Returns whether the Group has a member with the given credentials + Creates a new Explain Unsatisfiable Service - Username - Password - + Service Name + JSON Object - + - Gets whether the Group permits the action + Gets a Graph explaining why a Class is unsatisfiable - Action + Class - - - Key for Objects that are cached by the Configuration Loader - - - + - Creates a new Cached Object Key + Gets a Graph explaining why a Class is unsatisfiable - Object Node - Configuration Graph + Class + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets the Hash Code for the Key + Represents the Explain Instance Service provided by a Pellet Server - - + - Gets whether this Key is equal to the given Object + Creates a new Explain Instance Service - Object - + Service Name + JSON Object - + - Gets whether this Key is equal to the given Key + Gets a Graph explaining why an Instance is of the given Class - Key + Instance + Class - - - Context Class for writing serializing Configuration information - - - + - Configuration Graph being written to + Gets a Graph explaining why an Instance is of the given Class + Instance + Class + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Creates a new Serialization Context + Represents the Explain Subclass Service provided by a Pellet Server - + - Creates a new Serialization Context + Creates a new Explain Subclass Service - Base Configuration Graph + Service Name + JSON Object - + - Gets the Graph to which Configuration information should be written + Gets a Graph explaining why the given Class is a subclass of the given Super Class + Class + Super Class + - + - Gets/Sets the next subject to be used + Gets a Graph explaining why the given Class is a subclass of the given Super Class + Class + Super Class + Callback to invoke when the operation completes + State to pass to the callback - - Always returns a Blank Node if none is currently explicitly specified - - - Used to link objects together when you want some subsidiary object to serialize it's configuration and link that to the configuration you are currently serializing - + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + - Factory class for producing Network Credentials + Represents the Explain Inconsistent Service provided by a Pellet Server - + - Tries to load a Network Credential based on information from the Configuration Graph + Creates a new Explain Inconsistent Service - Configuration Graph - Object Node - Target Type - Output Object - + Service Name + JSON Object - + - Gets whether this Factory can load objects of the given Type + Gets a Graph explaining why the Knowledge Base is inconsistent - Type - - - Factory class for producing Graphs from Configuration Graphs - - - + - Tries to load a Graph based on information from the Configuration Graph + Gets a Graph explaining why the Knowledge Base is inconsistent - Configuration Graph - Object Node - Target Type - Output Object - + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets whether this Factory can load objects of the given Type + Represents the Explain Property Service provided by a Pellet Server - Type - - + - Factory class for producing Triple Stores from Configuration Graphs + Creates a new Explain Property Service + Service Name + JSON Object - + - Tries to load a Triple Store based on information from the Configuration Graph + Gets a Graph explaining why the given Triple was derived - Configuration Graph - Object Node - Target Type - Output Object + Subject + Predicate + Object - + - Gets whether this Factory can load objects of the given Type + Gets a Graph explaining why the given Triple was derived - Type - + Triple - + - An Object Factory for creating SPARQL Datasets + Gets a Graph explaining why the given Triple was derived + Subject + Predicate + Object + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - - - Tries to load a SPARQL Dataset based on information from the Configuration Graph - - Configuration Graph - Object Node - Target Type - Output Object - - - + - Gets whether this Factory can load objects of the given Type + Gets a Graph explaining why the given Triple was derived - Type - + Triple + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Factory class for producing SPARQL Endpoints from Configuration Graphs + Represents the Integrity Constraint Validation Service provided by a Pellet Knowledge Base - + - Tries to load a SPARQL Endpoint based on information from the Configuration Graph + Creates a new Integrity Constraint Validation Service - Configuration Graph - Object Node - Target Type - Output Object - + Service Name + JSON Object - + - Gets whether this Factory can load objects of the given Type + Extracts an RDF Dataset which details the Constraints violated (if any) and whether Constraints are satisified - Type - + - Factory class for producing Custom SPARQL Expression Factories from Configuration Graphs + Extracts an RDF Dataset which details the Constraints violated (if any) and whether Constraints are satisified + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Tries to load a SPARQL Custom Expression Factory based on information from the Configuration Graph + Represents the Namespace Service provided by a Pellet Server knowledge base - Configuration Graph - Object Node - Target Type - Output Object - - + - Gets whether this Factory can load objects of the given Type + Creates a new Namespace Service - Type - + Service Name + JSON Object - + - An Object Factory that can generate SPARQL Query and Algebra Optimisers + Gets the Namespaces used in the Knowledge Base + - + - Tries to load a SPARQL Query/Algebra Optimiser based on information from the Configuration Graph + Gets the Namespaces used in the Knowledge Base - Configuration Graph - Object Node - Target Type - Output Object - + Callback to invoke when the operation completes + State to be passed to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets whether this Factory can load objects of the given Type + Represents the Predict Service of a Pellet Knowledge Base - Type - - + - Object Factory used by the Configuration API to load parsers from configuration graphs + Creates a new Predict Service for a Pellet Knowledge Base + Service Name + JSON Object - + - Tries to load a Parser based on information from the Configuration Graph + Gets the list of Predictions for the given Individual and Property - Configuration Graph - Object Node - Target Type - Output Object + QName of an Inidividual + QName of a Property - + - Gets whether this Factory can load objects of the given Type + Gets the Raw Predictions Graph from the Knowledge Base - Type + QName of an Individual + QName of a Property - + - Object Factory used by the Configuration API to load writers from configuration graphs + Gets the list of Predictions for the given Individual and Property + QName of an Inidividual + QName of a Property + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Tries to load a Writer based on information from the Configuration Graph + Gets the Raw Predictions Graph from the Knowledge Base - Configuration Graph - Object Node - Target Type - Output Object - + QName of an Individual + QName of a Property + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets whether this Factory can load objects of the given Type + Represents the SPARQL Query Service provided by a Pellet Server knowledge base - Type - - + - Factory class for producing Permissions from Configuration Graphs + Creates a new SPARQL Query Service + Service Name + JSON Object - + - Tries to load a Permission based on information from the Configuration Graph + Makes a SPARQL Query against the Knowledge Base - Configuration Graph - Object Node - Target Type - Output Object + SPARQL Query - + - Gets whether this Factory can load objects of the given Type + Processes a SPARQL Query against the Knowledge Base passing the results to the RDF or Results handler as appropriate - Type - + RDF Handler + Results Handler + SPARQL Query - + - Factory class for producing User Groups from Configuration Graphs + Makes a SPARQL Query against the Knowledge Base + SPARQL Query + Callback to invoke for queries that return a Graph + Callback to invoke for queries that return a Result Set + State to pass to whichever callback function is invoked + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Tries to load a User Group based on information from the Configuration Graph + Processes a SPARQL Query against the Knowledge Base passing the results to the RDF or Results handler as appropriate - Configuration Graph - Object Node - Target Type - Output Object - + RDF Handler + Results Handler + SPARQL Query + Callback to invoke once handling of results has completed + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Gets whether this Factory can load objects of the given Type + Represents the Realize Service provided by a Pellet Server - Type - - + - Factory class for producing SPARQL Query Processors from Configuration Graphs + Creates a new Realize Service + Service Name + JSON Object - + - Tries to load a SPARQL Query Processor based on information from the Configuration Graph + Gets the Graph which comprises the class hierarchy and individuals of those classes - Configuration Graph - Object Node - Target Type - Output Object - + - Gets whether this Factory can load objects of the given Type + Gets the Graph which comprises the class hierarchy and individuals of those classes - Type - + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Factory class for producing SPARQL Update Processors from Configuration Graphs + Represents the Search Service provided by a Pellet Server - + - Tries to load a SPARQL Update based on information from the Configuration Graph + Creates a new Search Service - Configuration Graph - Object Node - Target Type - Output Object - + Service Name + JSON Object - + - Gets whether this Factory can load objects of the given Type + Gets the list of Search Results which match the given search term - Type - + Search Term + A list of Search Results representing Nodes in the Knowledge Base that match the search term - + - Factory class for producing SPARQL Graph Store HTTP Protocol Processors from Configuration Graphs + Gets the list of Search Results which match the given search term + Search Term + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - Tries to load a SPARQL Graph Store HTTP Protocol Processor based on information from the Configuration Graph + Represents a Search Result returned from the - Configuration Graph - Object Node - Target Type - Output Object - - + - Gets whether this Factory can load objects of the given Type + Creates a new Search Service Result - Type - + Result Node + Result Score - + - Factory class for producing IObjectFactory instances from Configuration Graphs + Gets the Node for this Result - - - This essentially reflexive implementation allows for defining additional IObjectFactory instances that can load custom/user defined types based on user definable Configuration. If your Configuration Graphs contain custom object factory definitions then you should call ConfigurationLoader.AutoConfigureObjectFactories() before attempting to load any Configuration. - - - + - Tries to load an Object Factory based on information from the Configuration Graph + Gets the Score for this Result - Configuration Graph - Object Node - Target Type - Output Object - - + - Gets whether this Factory can load objects of the given Type + Gets the String representation of the Result - Type - + - Factory class for creating Web Proxies from Configuration Graphs + Represents the Similarity Service provided by a Pellet Knowledge Base - + - Tries to load a Web Proxy based on information from the Configuration Graph + Creates a new Similarity Service for a Pellet Knowledge Base - Configuration Graph - Object Node - Target Type - Output Object - + Service Name + JSON Object - + - Gets whether this Factory can load objects of the given Type + Gets a list of key value pairs listing Similar Individuals and their Similarity scores - Type + Number of Similar Individuals + QName of a Individual to find Similar Individuals to - - - Factory class for producing Reasoners from Configuration Graphs - - - + - Tries to load a Reasoner based on information from the Configuration Graph + Gets the raw Similarity Graph for the Knowledge Base - Configuration Graph - Object Node - Target Type - Output Object + Number of Similar Individuals + QName of a Individual to find Similar Individuals to - + - Gets whether this Factory can load objects of the given Type + Gets a list of key value pairs listing Similar Individuals and their Similarity scores - Type - + Number of Similar Individuals + QName of a Individual to find Similar Individuals to + Callback to invoke when the operation completes + State to pass to the callback + + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. + - + - The Configuration Loader is responsible for the loading of Configuration information and objects based upon information encoded in a Graph but more generally may be used for the loading of any type of object whose configuration has been loaded in a Graph and for which a relevant IObjectFactory is available. + Gets the raw Similarity Graph for the Knowledge Base + Number of Similar Individuals + QName of a Individual to find Similar Individuals to + Callback to invoke when the operation completes + State to pass to the callback - + If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + - Configuration Namespace URI + Represents some Service provided by a Pellet Server which the library does not explicitly support - + - Constants for URI Schemes with special meaning within the Configuration API + Creates a new Unsupported Service + Service Name + JSON Object - + - Constants for URI Schemes with special meaning within the Configuration API + Represents a Knowledge Base on a Pellet Server - + - URI Constants for configuration properties + Creates a new Knowledge Base + JSON Token for the Object that represents the Service - + - URI Constants for configuration properties + Gets the Name of the Knowledge Base - + - URI Constants for configuration properties + Gets the Services provided by this Knowledge Base - + - URI Constants for configuration properties + Gets whether a Service is supported by the Knowledge Base + Service Type + - + - URI Constants for configuration properties + Gets whether a Service is supported by the Knowledge Base + Service Type + - + - URI Constants for configuration properties + Gets whether a Service is supported by the Knowledge Base + Service Name + - + - URI Constants for configuration properties + Gets the first available implementation of the given Service Type for this Knowledge Base + Service Type + + Either the Service or a Null if the Knowledge Base does not expose a Service of the given Type + - + - URI Constants for configuration properties + Gets the first available implementation of the desired Service Type + Desired Service Type + - + - URI Constants for configuration properties + Gets the first available Service with the given name for this Knowledge Base + Service Name + + Either the Service or a Null if the Knowledge Base does not expose a Service with the given name + - + - URI Constants for configuration properties + Gets all the available implementations of the given Service Type for this Knowledge Base + Service Type + - + - URI Constants for configuration properties + Gets all the available services with the given name for this Knowledge Base + Service Name + - + - URI Constants for configuration properties + Helper class provided constants and helper methods for use with Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Constants for Service Names for Services that may be provided by a Pellet Server - + - URI Constants for configuration properties + Represents a Connection to a Pellet Server - + - URI Constants for configuration properties + Preferred MIME Type for the format to retrieve the Server Description in - + - URI Constants for configuration properties + Creates a new connection to a Pellet Server + Server URI - + - URI Constants for configuration properties + Creates a new connection to a Pellet Server + Server URI - + - URI Constants for configuration properties + Connects to a Pellet Server instance asynchronously invoking the callback when the connection is ready + Server URI + Callback to invoke when the connection is ready + State to pass to the callback - + - URI Constants for configuration properties + Connects to a Pellet Server instance asynchronously invoking the callback when the connection is ready + Server URI + Callback to invoke when the connection is ready + State to pass to the callback - + - URI Constants for configuration properties + Creates a new connection to a Pellet Server + Server URI + Callback to invoke when the connection is ready + State to pass to the callback - + - URI Constants for configuration properties + Creates a new connection to a Pellet Server + Server URI + Callback to invoke when the connection is ready + State to pass to the callback - + - URI Constants for configuration properties + Discovers the Knowledge Bases on a Server - + - URI Constants for configuration properties + Discovers the Knowledge Bases on a Server asynchronously + Callback to invoke when the operation completes + - + - URI Constants for configuration properties + Gets the Knowledge Bases available from this Pellet Server - + - URI Constants for configuration properties + Gets whether the Server has a Knowledge Base with the given Name + Knowledge Base Name + - + - URI Constants for configuration properties + Gets whether the Server has a Knowledge Base which supports the given Service Type + Service Type + - + - URI Constants for configuration properties + Gets the Knowledge Base with the given Name + Knowledge Base Name + + - + - URI Constants for configuration properties + Gets all the Knowledge Bases which support a given Server + Service Type + - + - URI Constants for configuration properties + Class representing Services provided by a Pellet Server Knowledge Base - + - URI Constants for configuration properties + Creates a new Pellet Service instance + Service Name + JSON Object representing the Service - + - URI Constants for configuration properties + Factory method for generating concrete Pellet Service instances representing different Pellet Services + JSON Object representing the Service + - + - URI Constants for configuration properties + Gets the Name of the Service - + - URI Constants for configuration properties + Gets the Endpoint for this Service - + - URI Constants for configuration properties + Gets the Response MIME Types supported by the Service - + - URI Constants for configuration properties + Represents the Service Endpoint for a Service provided by a Pellet Server - + - URI Constants for configuration properties + Creates a new Service Endpoint instance + JSON Object representing the Endpoint - + - URI Constants for configuration properties + Gets the URI of the Endpoint - + - URI Constants for configuration properties + Gets the HTTP Methods supported by the Endpoint - + - URI Constants for configuration properties + + Namespace for Inference Classes which provide Inferencing capabilities on RDF - these features are currently experimental and may not work as expected. + + + Classes which implement reasoning must implement the IInferenceEngine interface, these can then be attached to classes which implement the IInferencingTripleStore interface or they can be used to apply inference to any IGraph implementation with the inferred Triples optionally output to a separate Graph. + + + OWL reasoning currently has extremely limited support, we provide a Pellet client in the Pellet namespace which can be used to connect to a Pellet Server but that currently only provides reasoning on external knowledge bases on the Pellet Server + - + - URI Constants for configuration properties + Interfaces for Inference Engines + + + An Inference Engine is a class that given a Graph can infer extra information from that Graph based on fixed rules or rules computed from the Graphs it is performing inference on + + + In general terms an implementation of an Inference Engine typically provides some form of forward chaining reasoner though implementations may do more advanced reasoning or wrap other kinds of reasoner. + + - + - URI Constants for configuration properties + Applies inference to the given Graph and outputs the inferred information to that Graph + Graph - + - URI Constants for configuration properties + Applies inference to the Input Graph and outputs the inferred information to the Output Graph + Graph to apply inference to + Graph inferred information is output to - + - URI Constants for configuration properties + Initialises the Inference Engine using the given Graph + Graph to initialise from - + - URI Constants for configuration properties + Helper class containing constants and methods for use in implementing OWL support - + - URI Constants for configuration properties + Class containing Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration properties + OWL Extraction Mode constants - + - URI Constants for configuration classes + OWL Class and Property Constants - + - URI Constants for configuration classes + Proposed interface for OWL Reasoners - currently incomplete + + + Important: This interface is specifically designed so that it does not require the provision of a Graph to methods unless the method does not make sense without taking an IGraph as a parameter. This is because we envisage the use of this interface for connecting to reasoners which have their own access to the data over which they are reasoning and do not need it providing explicitly to them. + + + Reasoner implementations may throw NotSupportedException for operations they don't support and may throw any other appropriate exceptions as appropriate for operations that encounter errors. + + - + - URI Constants for configuration classes + Adds a Graph to the reasoners knowledge base + Graph + + + A reasoner may choose to do nothing in this method if that reasoner especially if it operates using some pre-defined, remote or otherwise immutable knowledge base. + + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Extract a reasoning enhanced sub-graph from the given Graph rooted at the given Node + Graph + Root Node + - + - URI Constants for configuration classes + Extracts all possible triples using the given extraction mode + Extraction Mode + + + The mode permits for the specification of an extraction mode for reasoners that can extract specific subsets of reasoning. Where this is not supported the reasoner should simply extract all triples that can be inferred by reasoning + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Extracts all possible triples using the given extraction modes + Extraction Modes + + + The modes permits for the specification of an extraction mode for reasoners that can extract specific subsets of reasoning. Where this is not supported the reasoner should simply extract all triples that can be inferred by reasoning + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Extracts the triples which comprise the class hierarchy + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Extracts the triples which comprise the class hierarchy and individuals of those classes + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Returns whether the underlying knowledge base is consistent + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Returns whether the given Graph is consistent with the underlying knowledge base + Graph + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Returns the enumeration of unsatisfiable classes + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Interface for OWL Reasoners which have access to their own SPARQL implementations - + - URI Constants for configuration classes + Executes a SPARQL Query using the reasoners SPARQL implementation + SPARQL Query + + May be thrown if the Reasoner does not support such an operation - + - URI Constants for configuration classes + Wrapper around an IOwlReasoner to make it appear like a forward-chaining reasoner + + Essentially all this class does is extract all triples which the underlying reasoner can infer. Currently the input graph and any graph passed to the Initialise() method have no effect on the output of the reasoner + - + - URI Constants for configuration classes + Creates a new OWL Reasoner Wrapper around the given OWL Reasoner + OWL Reasoner - + - URI Constants for configuration classes + Applies the reasoner to the given Graph outputting inferences into the same Graph + Graph - + - URI Constants for configuration classes + Applies the reasoner to the given input Graph outputting inferences into the output Graph + Input Graph + Output Graph - + - URI Constants for configuration classes + Initialises the reasoner + Graph to initialise with - + - URI Constants for configuration classes + Wrapper around an IOwlReasoner to make it appear like a forward-chaining reasoner + + Effectively equivalent to StaticOwlReasonerWrapper except that every Graph reasoning is applied to is added to the reasoners knowledge base (unless the reasoner uses a fixed knowledge base) + - + - URI Constants for configuration classes + Creates a new OWL Reasoner Wrapper around the given OWL Reasoner + OWL Reasoner - + - URI Constants for configuration classes + Applies the reasoner to the given Graph outputting inferences into the same Graph + Graph - + - URI Constants for configuration classes + Applies the reasoner to the given input Graph outputting inferences into the output Graph + Input Graph + Output Graph - + - URI Constants for configuration classes + A Pellet Reasoner which provides OWL 2 capable reasoning using an external knowledge base from a Pellet Server instance + + + Note: Currently this reasoner operates only on a external knowledge base and there is currently no way to introduce new knowledge bases/data through the dotNetRDF API + + - + - URI Constants for configuration classes + Creates a new Pellet Reasoner + Pellet Server + Knowledge Base name - + - URI Constants for configuration classes + Creates a new Pellet Reasoner + Pellet Server URI + Knowledge Base name - + - URI Constants for configuration classes + Gets the Knowledge Base this Reasoner operates over - + - URI Constants for configuration classes + Gets the Pellet Server this Reasoner operates on - + - URI Constants for configuration classes + Executes a SPARQL Query against the underlying Knowledge Base + SPARQL Query + - + - URI Constants for configuration classes + Adds a Graph to the Knowledge Base + Graph + + Currently not supported by Pellet Server + - + - URI Constants for configuration classes + Extract a reasoning enhanced sub-graph from the given Graph rooted at the given Node + Graph + Root Node + + + Currently not supported by Pellet Server + - + - URI Constants for configuration classes + Extracts all possible triples using the given extraction mode + Extraction Mode + + + Currently not supported by Pellet Server + - + - URI Constants for configuration classes + Extracts all possible triples using the given extraction modes + Extraction Modes + + + Currently not supported by Pellet Server + - + - QName Constants for Default Types for some configuration classes + Extracts the triples which comprise the class hierarchy + - + - QName Constants for Default Types for some configuration classes + Extracts the triples which comprise the class hierarchy and individuals of those classes + - + - QName Constants for Default Types for some configuration classes - - - - - QName Constants for Default Types for some configuration classes - - - - - QName Constants for Default Types for some configuration classes - - - - - QName Constants for Default Types for some configuration classes + Returns whether the underlying knowledge base is consistent + - + - QName Constants for Default Types for some configuration classes + Returns whether the given Graph is consistent with the underlying knowledge base + Graph + + + Currently not supported by Pellet Server + - + - QName Constants for Default Types for some configuration classes + Returns the enumeration of unsatisfiable classes + + Currently not supported by Pellet Server + - + - Cache for loaded objects + An Inference Engine which uses RDFS reasoning + + + Does basic RDFS inferencing using the schema taken from the Graph(s) which are provided in calls to the reasoners Initialise() method. + + + Types of inference performed are as follows: + +
    +
  • Class hierarchy reasoning - asserts additional types triples for anything that is typed as the subclass of a class.
  • +
  • Property hierarchy reasoning - asserts additional property triples for anything where the predicate is a subproperty of a defined property
  • +
  • Domain & Range reasoning - asserts additional type triples based on the domains and ranges of properties
  • +
+
- + - Set of built-in object factories that are automatically registered and used + Creates a new instance of the Static RdfsReasoner - + - Path resolver + Applies inference to the given Graph and outputs the inferred information to that Graph + Graph - + - Loads a Configuration Graph and applies auto-configuration + Applies inference to the Input Graph and outputs the inferred information to the Output Graph - URI to load from - + Graph to apply inference to + Graph inferred information is output to - + - Loads a Configuration Graph and applies auto-configuration if desired + Imports any Class heirarchy information from the given Graph into the Reasoners Knowledge Base in order to initialise the Reasoner - URI to load from - Whether to apply auto-configuration - + Graph to import from + + Looks for Triples defining things to be classes and those defining that something is a subClass of something + - + - Loads a Configuration Graph and applies auto-configuration + Helper method which applies Class hierarchy inferencing - File to load from - + Triple defining the type for something + Input Graph + Output Graph + List of Inferences - + - Loads a Configuration Graph and applies auto-configuration if desired + An Inference Engine which uses RDFS reasoning - File to load from - Whether to apply auto-configuration - + + Does basic RDFS inferencing as detailed in the remarks for the StaticRdfsReasoner except every Graph that inference is applied to has the potential to alter the schema which is in use. + - + - Loads a Configuration Graph and applies auto-configuration + Applies inference to the Input Graph and outputs the inferred information to the Output Graph - Embedded Resource to load - + Graph to apply inference to + Graph inferred information is output to - + - Loads a Configuration Graph and applies auto-configuration if desired + An Inference Engine that supports simple N3 rules - Embedded Resource to load - Whether to apply auto-configuration - + + + This reasoner should be initialised with a Graph that contains simple N3 rules such as the following: + + + { ?x a ?type } => { ?type a rdfs:Class }. + + + When initialised the reasoner takes account of variables declared with @forAll and @forSome directives though no guarantees that scoping will be correct if you've got multiple @forAll and @forSome directives. + + + When the reasoner is applied to a Graph rules are implemented by generating a SPARQL Update INSERT command like the following and executing it on the given Graph + + + INSERT + { + ?type a rdfs:Class . + } + WHERE + { + ?x a ?type . + } + + - + - Common loader for Configuration Graphs, handles the resolution of dnr:imports and applies the auto-configuration if selected + Applies reasoning to the given Graph materialising the generated Triples in the same Graph - Configuration Graph - Source the graph originated from - Whether to apply auto-configuration - + Graph - + - Common loader for Configuration Graphs, handles the resolution of dnr:imports and applies the auto-configuration if selected + Applies reasoning on the Input Graph materialising the generated Triples in the Output Graph - Configuration Graph - Sources the graph originated from - Whether to apply auto-configuration - + Input Graph + Output Graph - + - Given a Configuration Graph applies all available auto-configuration based on the contents of the graph + Initialises the Reasoner - Configuration Graph + Rules Graph - + - Given a Configuration Graph will detect and configure Object Factories defined in the configuration + Tries to create a Rule - Configuration Graph + Triple - + - Given a Configuration Graph will detect and configure static options that are specified using the dnr:configure property with special <dotnetrdf-configure:Class/Property> subject URIs + An Inference Engine that uses SKOS Concept Hierarchies - Configuration Graph - An example of using this mechanism to configure a static option is as follows: - -
-            <dotnetrdf-configure:VDS.RDF.Options#UsePLinqEvaluation> dnr:configure false .
-            
- - Class and property names must be fully qualified, to specify static options outside of dotNetRDF itself you can add an additional path segment with the assembly name after the initial configure keyword. If the class/property does not exist or the value of the literal cannot be appropriately converted to the type of the property then an exception will be thrown. If there is a problem setting the property (e.g. it does not have a public setter) then an exception will be thrown. + Infers additional values for properties based on SKOS Concept Hierarcies. If there is a Triple whose value is a Concept from the hierarchy then new versions of that Triple will be inferred where the object becomes each concept higher in the hierarchy.
- + - Given a Configuration Graph will detect Readers and Writers for RDF and SPARQL syntaxes and register them with MimeTypesHelper. This will cause the library defaults to be overridden where appropriate. + Namespace for SKOS - Configuration Graph - + - Given a Configuration Graph will detect and configure SPARQL Operators + Creates a new instance of the SKOS Reasoner - Configuration Graph - + - Checks for circular references and throws an error if there is one + Applies inference to the given Graph and outputs the inferred information to that Graph - Object you are attempting to load - Object being referenced - QName for the property that makes the reference - - - If the Object you are trying to load and the Object you need to load are equal then this is a circular reference and an error is thrown - - - The ConfigurationLoader is not currently capable of detecting more subtle circular references - - + Graph - + - Creates a URI Node that refers to some Configuration property/type + Applies inference to the Input Graph and outputs the inferred information to the Output Graph - Configuration Graph - QName of the property/type - - - - The QName provides should be of the form dnr:qname - the dnr prefix will be automatically be considered to be to the Configuration Namespace which is defined by the ConfigurationNamespace constant. - - - This function uses caching to ensure that URI Nodes aren't needlessly recreated in order to save memory. - - + Graph to apply inference to + Graph inferred information is output to - + - Clears the Object Loader cache (this is not recommended) + Imports any Concept heirarchy information from the given Graph into the Reasoners Knowledge Base in order to initialise the Reasoner + Graph to import from - - This method should only be invoked in cases where you have attempted to load an object and some error occurred which was external to dotNetRDF e.g. network connectivity problem and - + Looks for Triples defining SKOS concepts and relating them to narrower and broader concepts - + - Gets all the values given for a property of a given Object in the Configuration Graph + An Inference Engine that uses SKOS Concept Hierarchies - Configuration Graph - Object Node - Property Node - - Enumeration of values given for the property for the Object - - + - Gets all the literal values given for a property of a given Object in the Configuration Graph + Applies inference to the Input Graph and outputs the inferred information to the Output Graph - Configuration Graph - Object Node - Property Node - - - - Only returns the value part of Literal Nodes which are given as values for the property i.e. ignores all non-Literals and discards any language/data type from Literals - - + Graph to apply inference to + Graph inferred information is output to - + - Gets the first value given for a property of a given Object in the Configuration Graph + Class for representing errors that occur while querying RDF - Configuration Graph - Object Node - Property Node - - First value given for the property of the Object - - + - Gets the first value given for the first found property of a given Object in the Configuration Graph + Creates a new RDF Query Exception - Configuration Graph - Object Node - Properties - - First value given for the first property of the Object which is matched - + Error Message - + - Gets the String value or null of the first instance of a property for a given Object in the Configuration Graph where the value for the property is a Literal Node + Creates a new RDF Query Exception - Configuration Graph - Object Node - Property Node - - - String value of the first instance of the property or a null if no values or not a literal value - - - If you want the String value regardless of Node type then use the GetConfigurationValue function instead - - + Error Message + Exception that caused this Exception - + - Gets the String value or null of the first instance of the first property for a given Object in the Configuration Graph where the value for the property is a Literal Node + Class for representing Timeout errors that occur while querying RDF - Configuration Graph - Object Node - Property Nodes - - - String value of the first instance of the first property or a null if no values or not a literal value - - - If you want the String value regardless of Node type then use the GetConfigurationValue function instead - - - + - Gets the String value or null of the first instance of a property for a given Object in the Configuration Graph + Creates a new RDF Query Timeout Exception - Configuration Graph - Object Node - Property Node - + Error Message - + - Gets the String value or null of the first instance of the first property for a given Object in the Configuration Graph + Class for representing Exceptions occurring in RDF reasoners - Configuration Graph - Object Node - Property Nodes - - + - Gets the Boolean value or a given default of the first instance of a property for a given Object in the Configuration Graph + Creates a new RDF Reasoning Exception - Configuration Graph - Object Node - Property Node - Default Value to return if there is no valid boolean value - - If there is a valid boolean value for the property then that is returned, in any other case the given Default Value is returned - + Error Message - + - Gets the Boolean value or a given default of the first instance of the first property for a given Object in the Configuration Graph + Creates a new RDF Reasoning Exception - Configuration Graph - Object Node - Property Nodes - Default Value to return if there is no valid boolean value - - If there is a valid boolean value for any property then that is returned, in any other case the given Default Value is returned - + Error Message + Exception that caused this exception - + - Gets the 64 bit Integer value or a given default of the first instance of a property for a given Object in the Configuration Graph + Class for representing Termination errors - Configuration Graph - Object Node - Property Node - Default Value to return if there is no valid boolean value - - If there is a valid integer value for the property then that is returned, in any other case the given Default Value is returned - - + - Gets the 64 bit Integer value or a given default of the first instance of the first property for a given Object in the Configuration Graph + Creates a new RDF Query Termination Exception - Configuration Graph - Object Node - Property Nodes - Default Value to return if there is no valid boolean value - - If there is a valid integer value for any property then that is returned, in any other case the given Default Value is returned - - + - Gets the 64 bit Integer value or a given default of the first instance of a property for a given Object in the Configuration Graph + Class for representing Path Found terminations - Configuration Graph - Object Node - Property Node - Default Value to return if there is no valid boolean value - - If there is a valid integer value for the property then that is returned, in any other case the given Default Value is returned - - + - Gets the 64 bit Integer value or a given default of the first instance of the first property for a given Object in the Configuration Graph + Creates a new Path Found exception - Configuration Graph - Object Node - Property Nodes - Default Value to return if there is no valid boolean value - - If there is a valid integer value for any property then that is returned, in any other case the given Default Value is returned - - + - Gets the Username and Password specified for a given Object + + Namespace for Query Classes which provide querying capabilities on RDF. + + + Query capabilities are centered around support for the SPARQL standard. You can execute full SPARQL 1.1 queries over in-memory data or submit queries to remote SPARQL endpoints. + - Configuration Graph - Object Node - Whether settings may be specified using the dnr:credentials property - Username - Password - - Username and/or Password will be null if there is no value specified for the relevant properties - - + - Gets whether the given Object has already been loaded and cached + + Namespace for Aggregate classes which implement Aggregate functions for SPARQL + - Configuration Graph - Object Node - - - If this returns true then loading that object again should be essentially instantaneous as it will come from the cache - - + - Loads the Object identified by the given Node as an object of the given type based on information from the Configuration Graph + Namespace for aggregates provided by the Leviathan function library - Configuration Graph - Object Node - Target Type - - - - Callers of this method should be careful to check that the Object returned is of a usable type to them. The Target Type parameter does not guarantee that the return value is of that type it is only used to determine which registered instances of IObjectFactory are potentially capable of creating the desired Object - - - Callers should also take care that any Objects returned from this method are disposed of when the caller no longer has a use for them as otherwise the reference kept in the cache here will cause the Object to remain in-memory consuming resources - - - + - Loads the Object identified by the given Node based on information from the Configuration Graph + A Custom aggregate which requires the Expression to evaluate to true for all Sets in the Group - Configuration Graph - Object Node - - - - Use this overload when you have a Node which identifies an Object and you don't know what the type of that Object is. This function looks up the dnr:type property for the given Object and then calls the other version of this function providing it with the relevant type information. - - - + - Attempts to find the Default Type to load an Object as when no explicit dnr:type property has been declared but an rdf:type property has been declared giving a valid Configuration Class + Creates a new All Aggregate - Configuration Graph - Object Node - - - - Note: Only some configuration classes have corresponding default types, in general it is recommended that Configuration Graphs should always use the dnr:type property to explicitly state the intended type of an Object - - + Expression - + - Attempts to return the Default Type to load an Object as when there is no dnr:type property but there is a rdf:type property + Creates a new All Aggregate - Type URI declared by the rdf:type property - + Expression + Whether a DISTINCT modifier applies - + - Attempts to resolve special <appsettings> URIs into actual values + Applies the Aggregate to see if the expression evaluates true for every member of the Group - - + Evaluation Context + Binding IDs - - These special URIs have the form <appsetting:Key> where Key is the key for an appSetting in your applications configuration file. When used these URIs are resolved at load time into the actual values from your configuration file. This allows you to avoid spreading configuration data over multiple files since you can specify things like connection settings in the Application Config file and then simply reference them in the dotNetRDF configuration file. - - - Warning: This feature is not supported in the Silverlight build - + Does lazy evaluation - as soon as it encounters a false/error it will return false - + - Creates a new instance of , which - loads an existing configuration graph and applies auto-configuration + Gets the String Representation of the Aggregate + - + - Creates a new instance of , which - loads an existing configuration graph and optionally applies auto-configuration + Gets the Functor of the Aggregate - + - Creates a new instance of , which - loads an existing configuration graph and applies auto-configuration + A Custom aggregate which requires the Expression to evaluate to true for at least one of the Sets in the Group - + - Creates a new instance of , which - loads an existing configuration graph and optionally applies auto-configuration + Creates a new Any Aggregate + Expression - + - Creates a new instance of , which - loads an existing configuration graph from file and applies auto-configuration + Creates a new Any Aggregate + Expression + Whether a DISTINCT modifer applies - + - Creates a new instance of , which - loads an existing configuration graph and optionally applies auto-configuration + Applies the Aggregate to see if the expression evaluates true for any member of the Group + Evaluation Context + Binding IDs + + + Does lazy evaluation - as soon as it encounters a true it will return true + - + - Loads the Object identified by the given blank node identifier as an object of the given type based on information from the Configuration Graph + Gets the String Representation of the Aggregate - - See remarks under - + - + - Loads the Object identified by the given URI as an object of the given type based on information from the Configuration Graph + Gets the Functor of the Aggregate - - See remarks under - - + - Loads the Object identified by the given blank node identifier as an + Class representing MEDIAN Aggregate Functions - - See remarks under - - + - Loads the Object identified by the given URI as an + Creates a new MEDIAN Aggregate - - See remarks under - + Variable Expression - + - Registers an Object Factory with the Configuration Loader + Creates a new MEDIAN Aggregate - Object Factory + Expression - + - Gets/Sets the in-use Path Resolver + Creates a new MEDIAN Aggregate + Variable Expression + Whether a DISTINCT modifier applies - + - Resolves a Path using the in-use path-resolver + Creates a new MEDIAN Aggregate - Path to resolve - + Expression + Whether a DISTINCT modifer applies - + - Marker class used in the ConfigurationLoader Object cache to mark objects which are unloadable due to some errors to stop the loader repeatedly trying to load an Object whose configuration is invalid, incomplete or otherwise erroneous. + Applies the Median Aggregate function to the results + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Interface for Object Factories which are factory classes that can create Objects based on configuration information in a Graph + Gets the String representation of the Aggregate + - + - Attempts to load an Object of the given type identified by the given Node and returned as the Type that this loader generates + Gets the Functor of the Aggregate - Configuration Graph - Object Node - Target Type - Created Object - True if the loader succeeded in creating an Object - - - The Factory should not throw an error if some required configuration is missing as another factory further down the processing chain may still be able to create the object. If the factory encounters errors and all the required configuration information is present then that error should be thrown i.e. class instantiation throws an error or a call to load an object that this object requires fails. - - - + - Returns whether this Factory is capable of creating objects of the given type + Class representing MODE Aggregate Functions - Target Type - - + - Interface for Objects which can have their configuration serialized to RDF + Creates a new MODE Aggregate + Variable Expression - + - Serializes the Configuration in the given context + Creates a new MODE Aggregate - Configuration Serialization Context + Expression - + - Inteface for Objects which can resolve paths specified for Configuration properties + Creates a new MODE Aggregate + Variable Expression + Whether a DISTINCT modifier applies - + - Resolves a Path + Creates a new MODE Aggregate - Path - + Expression + Whether a DISTINCT modifier applies - + - Factory class for producing IStorageProvider and instances from Configuration Graphs + Applies the Mode Aggregate function to the results + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Tries to load a Generic IO Manager based on information from the Configuration Graph + Gets the String representation of the Aggregate - Configuration Graph - Object Node - Target Type - Output Object - + - Gets whether this Factory can load objects of the given Type + Gets the Functor of the Aggregate - Type - - + - A decorator for graph collections that allows for graphs to be loaded on demand if they don't exist in the underlying graph collection + A Custom aggregate which requires the Expression to evaluate to false/error for all Sets in the Group - + - Creates a new decorator + Creates a new None Aggregate + Expression - + - Creates a new decorator over the given graph collection + Creates a new None Aggregate - Graph Collection + Expression + Whether a DISTINCT modifer applies - + - Checks whether the collection contains a Graph invoking an on-demand load if not present in the underlying collection + Applies the Aggregate to see if the expression evaluates false/error for every member of the Group - Graph URI + Evaluation Context + Binding IDs + + Does lazy evaluation - as soon as it encounters a true it will return false + - + - Loads a Graph on demand + Gets the String Representation of the Aggregate - URI of the Graph to load - A Graph if it could be loaded and throws an error otherwise + - + - A decorator for graph collections where graphs not in the underlying graph collection can be loaded on-demand from the Web as needed + Gets the Functor of the Aggregate - + - Creates a new Web Demand Graph Collection which loads Graphs from the Web on demand + Class representing NMAX Aggregate Functions + + Only operates over numeric data which is typed to one of the supported SPARQL Numeric types (integers, decimals and doubles) + - + - Creates a new Web Demand Graph Collection which loads Graphs from the Web on demand + Creates a new NMAX Aggregate - Collection to decorate + Variable Expression - + - Tries to load a Graph on demand from a URI + Creates a new NMAX Aggregate - Graph URI - + Expression - + - A decorator for graph collection where graphs not in the underlying graph collection can be loaded on-demand from the Files on Disk as needed + Creates a new NMAX Aggregate + Variable Expression + Whether a DISTINCT modifier applies - + - Creates a new Disk Demand Graph Collection which loads Graphs from the Web on demand + Creates a new NMAX Aggregate + Expression + Whether a DISTINCT modifier applies - + - Creates a new Disk Demand Graph Collection + Applies the Numeric Max Aggregate function to the results - Collection to decorate + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Tries to load a Graph on demand + Gets the String representation of the Aggregate - - + - Abstract Base Implementation of the IGraph interface + Gets the Functor of the Aggregate - + - Collection of Triples in the Graph + Class representing NMIN Aggregate Functions + + Only operates over numeric data which is typed to one of the supported SPARQL Numeric types (integers, decimals and doubles) + - + - Namespace Mapper + Creates a new NMIN Aggregate + Variable Expression - + - Base Uri of the Graph + Creates a new NMIN Aggregate + Expression - + - Blank Node ID Mapper + Creates a new NMIN Aggregate + Variable Expression + Whether a DISTINCT modifier applies - + - Creates a new Base Graph using the given Triple Collection + Creates a new NMIN Aggregate - Triple Collection to use + Expression + Whether a DISTINCT modifier applies - + - Creates a new Base Graph which uses the default as the Triple Collection + Applies the Numeric Min Aggregate function to the results + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Creates a Graph from the given Serialization Information + Gets the String representation of the Aggregate - Serialization Information - Streaming Context + - + - Gets the set of Triples described in this Graph + Gets the Functor of the Aggregate - + - Gets the set of Nodes which make up this Graph + Namespace for the built-in SPARQL aggregates - + - Gets the Namespace Mapper for this Graph which contains all in use Namespace Prefixes and their URIs + Class representing AVG Aggregate Functions - - + - Gets the current Base Uri for the Graph + Creates a new AVG Aggregate - - This value may be changed during Graph population depending on whether the Concrete syntax allows the Base Uri to be changed and how the Parser handles this - + Variable Expression + Whether a DISTINCT modifier applies - + - Gets whether a Graph is Empty ie. Contains No Triples or Nodes + Creates a new AVG Aggregate + Expression + Whether a DISTINCT modifier applies - + - Asserts a Triple in the Graph + Creates a new AVG Aggregate - The Triple to add to the Graph + Variable Expression - + - Asserts a List of Triples in the graph + Creates a new AVG Aggregate - List of Triples in the form of an IEnumerable + Expression - + - Retracts a Triple from the Graph + Applies the Average Aggregate function to the results - Triple to Retract - Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted + Evaluation Context + Binding IDs over which the aggregate applies + - + - Retracts a enumeration of Triples from the graph + Gets the String representation of the Aggregate - Enumeration of Triples to retract + - + - Clears all Triples from the Graph + Gets the Functor of the Aggregate - - - The Graph will raise the ClearRequested event at the start of the Clear operation which allows for aborting the operation if the operation is cancelled by an event handler. On completing the Clear the Cleared event will be raised. - - - + - Creates a New Blank Node with an auto-generated Blank Node ID + Class representing COUNT Aggregate Function - - + - Creates a New Blank Node with a user-defined Blank Node ID + Creates a new COUNT Aggregate - Node ID to use - + Variable Expression - + - Creates a New Literal Node with the given Value + Creates a new Count Aggregate - String value of the Literal - + Expression - + - Creates a New Literal Node with the given Value and Language Specifier + Counts the results - String value of the Literal - Language Specifier of the Literal + Evaluation Context + Binding IDs over which the Aggregate applies - + - Creates a new Literal Node with the given Value and Data Type + Gets the String representation of the Aggregate - String value of the Literal - URI of the Data Type - + - Creates a new URI Node that refers to the Base Uri of the Graph + Gets the Functor of the Aggregate - - + - Creates a new URI Node with the given URI + Class representing COUNT(*) Aggregate Function - URI for the Node - - Generally we expect to be passed an absolute URI, while relative URIs are permitted the behaviour is less well defined. If there is a Base URI defined for the Graph then relative URIs will be automatically resolved against that Base, if the Base URI is not defined then relative URIs will be left as is. In this case issues may occur when trying to serialize the data or when accurate round tripping is required. + Differs from a COUNT in that it justs counts rows in the results - + - Creates a new URI Node with the given QName + Creates a new COUNT(*) Aggregate - QName for the Node - - Internally the Graph will resolve the QName to a full URI, throws an RDF Exception when this is not possible - + - Creates a new Variable Node + Counts the results - Variable Name + Evaluation Context + Binding IDs over which the Aggregate applies - + - Creates a new Graph Literal Node with its value being an Empty Subgraph + Gets the String representation of the Aggregate - + - Creates a new Graph Literal Node with its value being the given Subgraph + Gets the Functor of the Aggregate - Subgraph this Node represents - - + - Returns the Blank Node with the given Identifier + Gets the Arguments of the Aggregate - The Identifier of the Blank Node to select - Either the Blank Node or null if no Node with the given Identifier exists - + - Returns the LiteralNode with the given Value in the given Language if it exists + Class representing COUNT(DISTINCT *) Aggregate Function - The literal value of the Node to select - The Language Specifier for the Node to select - Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists - + - Returns the LiteralNode with the given Value if it exists + Creates a new COUNT(DISTINCT*) Aggregate - The literal value of the Node to select - Either the LiteralNode Or null if no Node with the given Value exists - The LiteralNode in the Graph must have no Language or DataType set - + - Returns the LiteralNode with the given Value and given Data Type if it exists + Counts the results - The literal value of the Node to select - The Uri for the Data Type of the Literal to select - Either the LiteralNode Or null if no Node with the given Value and Data Type exists + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Returns the UriNode with the given QName if it exists + Gets the String representation of the Aggregate - The QName of the Node to select - + - Returns the UriNode with the given Uri if it exists + Gets the Functor of the Aggregate - The Uri of the Node to select - Either the UriNode Or null if no Node with the given Uri exists - + - Gets all the Triples involving the given Uri + Gets the Arguments of the Aggregate - The Uri to find Triples involving - Zero/More Triples - + - Gets all the Triples involving the given Node + Class representing COUNT(DISTINCT ?x) Aggregate Function - The Node to find Triples involving - Zero/More Triples - + - Gets all the Triples with the given Uri as the Object + Creates a new COUNT(DISTINCT ?x) Aggregate - The Uri to find Triples with it as the Object - Zero/More Triples + Variable Expression - + - Gets all the Triples with the given Node as the Object + Creates a new COUNT DISTINCT Aggregate - The Node to find Triples with it as the Object + Expression + + + + Counts the results + + Evaluation Context + Binding IDs over which the Aggregate applies - + - Gets all the Triples with the given Node as the Predicate + Gets the String representation of the Aggregate - The Node to find Triples with it as the Predicate - + - Gets all the Triples with the given Uri as the Predicate + Gets the Functor of the Aggregate - The Uri to find Triples with it as the Predicate - Zero/More Triples - + - Gets all the Triples with the given Node as the Subject + Gets the Arguments of the Aggregate - The Node to find Triples with it as the Subject - Zero/More Triples - + - Gets all the Triples with the given Uri as the Subject + Class representing GROUP_CONCAT Aggregate - The Uri to find Triples with it as the Subject - Zero/More Triples - + - Selects all Triples with the given Subject and Predicate + Creates a new GROUP_CONCAT aggregate - Subject - Predicate - + Expression + Should a distinct modifer be applied - + - Selects all Triples with the given Subject and Object + Creates a new GROUP_CONCAT aggregate - Subject - Object - + Expression - + - Selects all Triples with the given Predicate and Object + Creates a new GROUP_CONCAT aggregate - Predicate - Object + Expression + Separator Expression + Should a distinct modifer be applied + + + + Creates a new GROUP_CONCAT aggregate + + Expression + Separator Expression + + + + Applies the aggregate over the given bindings + + Evaluation Context + Binding IDs - + - Gets whether a given Triple exists in this Graph + Gets the String representation of the Aggregate - Triple to test - + - Merges another Graph into the current Graph + Gets the value of the aggregate for the given binding - Graph to Merge into this Graph - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. + Evaluation Context + Binding ID + - + - Merges another Graph into the current Graph + Gets the Functor of the Aggregate - Graph to Merge into this Graph - Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in - - - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - - - The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised - - - + - Determines whether a Graph is equal to another Object + Class representing MAX Aggregate Functions - Object to test - - - - A Graph can only be equal to another Object which is an IGraph - - - Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals - - - + - Determines whether this Graph is equal to the given Graph + Creates a new MAX Aggregate - Graph to test for equality - Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes - - - See for documentation of the equality algorithm used. - + Variable Expression + Whether a DISTINCT modifier applies - + - Checks whether this Graph is a sub-graph of the given Graph + Creates a new MAX Aggregate - Graph - + Expression + Whether a DISTINCT modifier applies - + - Checks whether this Graph is a sub-graph of the given Graph + Creates a new MAX Aggregate - Graph - Mapping of Blank Nodes - + Variable Expression - + - Checks whether this Graph has the given Graph as a sub-graph + Creates a new MAX Aggregate - Graph - + Expression - + - Checks whether this Graph has the given Graph as a sub-graph + Creates a new MAX Aggregate - Graph - Mapping of Blank Nodes - + Distinct Modifier + Expression - + - Computes the Difference between this Graph the given Graph + Applies the Max Aggregate function to the results - Graph - - - - Produces a report which shows the changes that must be made to this Graph to produce the given Graph - - + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Helper function for Resolving QNames to URIs + Gets the String representation of the Aggregate - QName to resolve to a Uri - + - Creates a new unused Blank Node ID and returns it + Gets the Functor of the Aggregate - - + - Converts a Graph into a DataTable using the explicit cast operator defined by this class + Class representing MIN Aggregate Functions - - A DataTable containing three Columns (Subject, Predicate and Object) all typed as INode with a Row per Triple - - - Warning: Not available under builds which remove the Data Storage layer from dotNetRDF e.g. Silverlight - - + - Casts a Graph to a DataTable with all Columns typed as INode (Column Names are Subject, Predicate and Object + Creates a new MIN Aggregate - Graph to convert - - A DataTable containing three Columns (Subject, Predicate and Object) all typed as INode with a Row per Triple - - - Warning: Not available under builds which remove the Data Storage layer from dotNetRDF e.g. Silverlight - + Variable Expression + Whether a DISTINCT modifier applies - + - Event which is raised when a Triple is asserted in the Graph + Creates a new MIN Aggregate + Expression + Whether a DISTINCT modifier applies - + - Event which is raised when a Triple is retracted from the Graph + Creates a new MIN Aggregate + Variable Expression - + - Event which is raised when the Graph contents change + Creates a new MIN Aggregate + Expression - + - Event which is raised just before the Graph is cleared of its contents + Applies the Min Aggregate function to the results + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Event which is raised after the Graph is cleared of its contents + Gets the String representation of the Aggregate + - + - Event which is raised when a Merge operation is requested on the Graph + Gets the Functor of the Aggregate - + - Event which is raised when a Merge operation is completed on the Graph + Class representing the SAMPLE aggregate - + - Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event + Creates a new SAMPLE Aggregate - Sender - Triple Event Arguments + Expression - + - Helper method for raising the Triple Asserted event manually + Applies the SAMPLE Aggregate - Triple Event Arguments + Evaluation Context + Binding IDs + - + - Helper method for raising the Triple Asserted event manually + Gets the String representation - Triple + - + - Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event + Gets the Functor of the Aggregate - Sender - Triple Event Arguments - + - Helper method for raising the Triple Retracted event manually + Class representing SUM Aggregate Functions - - + - Helper method for raising the Triple Retracted event manually + Creates a new SUM Aggregate - Triple + Variable Expression + Whether a DISTINCT modifier applies - + - Helper method for raising the Changed event + Creates a new SUM Aggregate - Triple Event Arguments + Expression + Whether a DISTINCT modifier applies - + - Helper method for raising the Changed event + Creates a new SUM Aggregate + Variable Expression - + - Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + Creates a new SUM Aggregate - True if the operation can continue, false if it should be aborted + Expression - + - Helper method for raising the Cleared event + Applies the Sum Aggregate function to the results + Evaluation Context + Binding IDs over which the Aggregate applies + - + - Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + Gets the String representation of the Aggregate - True if the operation can continue, false if it should be aborted + - + - Helper method for raising the Merged event + Gets the Functor of the Aggregate - + - Helper method for attaching the necessary event Handlers to a Triple Collection + Namespace for aggregates provided by the XPath function library - Triple Collection - - May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's - - + - Helper method for detaching the necessary event Handlers from a Triple Collection + Represents the XPath fn:string-join() aggregate - Triple Collection - - May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's - - + - Disposes of a Graph + Separator Expression - + - Gets the Serialization Information for serializing a Graph + Creates a new XPath String Join aggregate which uses no separator - Serialization Information - Streaming Context + Expression - + - Gets the Schema for XML Serialization + Creates a new XPath String Join aggregate - + Expression + Separator Expression - + - Reads the data for XML deserialization + Applies the Aggregate in the given Context over the given Binding IDs - XML Reader + Evaluation Context + Binding IDs + - + - Writes the data for XML serialization + Gets the value of a member of the Group for concatenating as part of the result for the Group - XML Writer + Evaluation Context + Binding ID + - + - Abstract Base Class for Graph Collections + Gets the separator to use in the concatenation - Designed to allow the underlying storage of a Graph Collection to be changed at a later date without affecting classes that use it + Evaluation Context + Binding ID + - + - Checks whether the Graph with the given Uri exists in this Graph Collection + Gets the String representation of the function - Graph Uri to test - - The null URI is used to reference the Default Graph - - + - Adds a Graph to the Collection + Gets the Functor of the Expression - Graph to add - Sets whether the Graph should be merged with an existing Graph of the same Uri if present - + - Removes a Graph from the Collection + Abstract Base Class for Aggregate Functions - Uri of the Graph to remove - - The null URI is used to reference the Default Graph - - + - Gets the number of Graphs in the Collection + Expression that the aggregate operates over - + - Provides access to the Graph URIs of Graphs in the Collection + Whether a DISTINCT modifer is applied - + - Gets a Graph from the Collection + Base Constructor for Aggregates - Graph Uri - - - The null URI is used to reference the Default Graph - + Expression that the aggregate is over - + - Disposes of the Graph Collection + Base Constructor for Aggregates - Invokes the Dispose() method of all Graphs contained in the Collection + Expression that the aggregate is over + Whether a Distinct modifer is applied - + - Gets the Enumerator for the Collection + Applies the Aggregate to the Result Binder + Evaluation Context - + - Gets the Enumerator for this Collection + Applies the Aggregate to the Result Binder + Evaluation Context + Enumerable of Binding IDs over which the Aggregate applies - + - Event which is raised when a Graph is added to the Collection + Expression that the Aggregate executes over - + - Event which is raised when a Graph is removed from the Collection + Gets the String representation of the Aggregate + - + - Helper method which raises the Graph Added event manually + Gets the Type of the Expression - Graph - + - Helper method which raises the Graph Removed event manually + Gets the Functor of the Aggregate - Graph - + - Abstract Base Class for a Triple Store + Gets the Arguments of the Expression - + - Collection of Graphs that comprise the Triple Store + Interface for SPARQL Aggregates which can be used to calculate aggregates over Results - + - Event Handler definitions + Applies the Aggregate to the Result Binder and returns a single Node as a Result + Evaluation Context - + - Event Handler definitions + Applies the Aggregate to the Result Binder and returns a single Node as a Result + Evaluation Context + Enumerable of Binding IDs which the aggregate is applied over - + - Event Handler definitions + Gets the Expression that the Aggregate is applied to - + - Event Handler definitions + Gets the Type of the Aggregate - + - Event Handler definitions + Gets the URI/Keyword of the Aggregate - + - Creates a new Base Triple Store + Gets the Arguments of the Aggregate - Graph Collection to use - + - Gets whether the Triple Store is empty + + Contains the classes which model the mapping of SPARQL queries into the SPARQL Algebra. This namespace is a key component of the new Leviathan SPARQL engine introduced in the 0.2.x builds of dotNetRDF + - + - Gets the Collection of Graphs that comprise this Triple Store + Static Helper class containing extensions used in the Algebra evaluation process - + - Gets all the Triples in the Triple Store + Calculates the product of two mutlisets asynchronously with a timeout to restrict long running computations + Multiset + Other Multiset + Timeout, if <=0 no timeout is used and product will be computed sychronously + - + - Adds a Graph into the Triple Store + Delegate for generating product of two multisets asynchronously - Graph to add + Multiset + Other Multiset + Mutliset to generate the product in + Stop Token - + - Adds a Graph into the Triple Store using the chosen Merging Behaviour + Method for generating product of two multisets asynchronously - Graph to add - Whether the Graph should be merged with an existing Graph with the same Base Uri + Multiset + Other Multiset + Mutliset to generate the product in + Stop Token - + - Adds a Graph into the Triple Store which is retrieved from the given Uri + Token passed to asynchronous code to allow stop signalling - Uri of the Graph to load - + - Adds a Graph into the Triple Store which is retrieved from the given Uri using the chosen Merging Behaviour + Gets/Sets whether the code should stop - Graph to add - Whether the Graph should be merged with an existing Graph with the same Base Uri - Important: Under Silverlight/Windows Phone 7 this will happen asynchronously so the Graph may not be immediatedly available in the store + Once set to true cannot be reset - + - Removes a Graph from the Triple Store + Represents a LeftJoin predicated on the existence/non-existence of joinable sets on the RHS for each item on the LHS - Uri of the Graph to Remove - + - Checks whether a Graph with the given Base Uri exists in the Triple Store + Creates a new Exists Join - Graph Uri - True if the Graph exists in the Triple Store + LHS Pattern + RHS Pattern + Whether a joinable set must exist on the RHS for the LHS set to be preserved - + - Gets the Graph with the given URI + Evaluates an ExistsJoin - Graph URI + Evaluation Context - - - Event which is raised when a Graph is added - - - - - Event which is raised when a Graph is removed - - - + - Event which is raised when a Graphs contents changes + Gets the Variables used in the Algebra - + - Event which is raised when a Graph is cleared + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Event which is raised when a Graph has a merge operation performed on it + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Helper method for raising the Graph Added event manually + Gets the LHS of the Join - Graph - + - Helper method for raising the Graph Added event manually + Gets the RHS of the Join - Graph Event Arguments - + - Event Handler which handles the Graph Added event from the underlying Graph Collection and raises the Triple Store's Graph Added event + Gets whether this is an EXISTS join - Sender - Graph Event Arguments - Override this method if your Triple Store implementation wishes to take additional actions when a Graph is added to the Store - + - Helper method for raising the Graph Removed event manually + Gets the String representation of the Algebra - Graph + - + - Helper method for raising the Graph Removed event manually + Converts the Algebra back to a SPARQL Query - Graph Event Arguments + - + - Event Handler which handles the Graph Removed event from the underlying Graph Collection and raises the Triple Stores's Graph Removed event + Converts the Algebra back to a Graph Pattern - Sender - Graph Event Arguments + - + - Helper method for raising the Graph Changed event manually + Transforms both sides of the Join using the given Optimiser - Graph Event Arguments + Optimser + - + - Event Handler which handles the Changed event of the contained Graphs by raising the Triple Store's Graph Changed event + Transforms the LHS of the Join using the given Optimiser - Sender - Graph Event Arguments + Optimser + - + - Helper method for raising the Graph Changed event manually + Transforms the RHS of the Join using the given Optimiser - Graph + Optimser + - + - Helper method for raising the Graph Cleared event manually + Represents a LeftJoin predicated on an arbitrary filter expression - Graph Event Arguments - + - Event Handler which handles the Cleared event of the contained Graphs by raising the Triple Stores's Graph Cleared event + Creates a new LeftJoin where there is no Filter over the join - Sender - Graph Event Arguments + LHS Pattern + RHS Pattern - + - Helper method for raising the Graph Merged event manually + Creates a new LeftJoin where there is a Filter over the join - Graph Event Arguments + LHS Pattern + RHS Pattern + Filter to decide which RHS solutions are valid - + - Event Handler which handles the Merged event of the contained Graphs by raising the Triple Store's Graph Merged event + Evaluates the LeftJoin - Sender - Graph Event Arguments + Evaluation Context + - + - Helper method which attaches the Triple Store's Event Handlers to the relevant events of a Graph + Gets the Variables used in the Algebra - Graph - + - Helper method which detaches the Triple Store's Event Handlers from the relevant events of a Graph + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - - + - Disposes of the Triple Store + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Derived classes must override this to implement required disposal actions - + - A Namespace Mapper which has an explicit notion of Nesting + Gets the Filter that applies across the Join - + - Gets the Nesting Level at which the given Namespace is definition is defined + Gets the LHS of the Join - Prefix - - + - Increments the Nesting Level + Gets the RHS of the Join - + - Decrements the Nesting Level + Gets the String representation of the Algebra - - When the Nesting Level is decremented any Namespaces defined at a greater Nesting Level are now out of scope and so are removed from the Mapper - + - + - Gets the current Nesting Level + Converts the Algebra back to a SPARQL Query + - + - An indexed triple collection that uses our and implementations under the hood for the index structures + Converts the Algebra back to a SPARQL Query - - - A variation on which structures the indexes slightly differently, this may give differing performance and reduced memory usage in some scenarios. - - + - + - Creates a new Tree Indexed triple collection + Transforms both sides of the Join using the given Optimiser + Optimser + - + - Indexes a Triple + Transforms the LHS of the Join using the given Optimiser - Triple + Optimser + - + - Helper for indexing triples + Transforms the RHS of the Join using the given Optimiser - Node to index by - Triple - Index to insert into - Comparer for the Index - Hash Function for the Index + Optimser + - + - Unindexes a triple + Represents a Join - Triple - + - Helper for unindexing triples + Creates a new Join - Node to index by - Triple - Index to remove from + Left Hand Side + Right Hand Side - + - Adds a Triple to the collection + Creates either a Join or returns just one of the sides of the Join if one side is the empty BGP - Triple + Left Hand Side + Right Hand Side - + - Checks whether the collection contains a given Triple + Evalutes a Join - Triple + Evaluation Context - + - Gets the count of triples in the collection + Gets the Variables used in the Algebra - + - Deletes a triple from the collection + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Triple - - + - Gets the specific instance of a Triple in the collection + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Triple - - + - Gets all the triples with a given object + Gets the LHS of the Join - Object - - + - Gets all the triples with a given predicate + Gets the RHS of the Join - Predicate - - + - Gets all the triples with a given subject + Gets the String representation of the Join - Subject - + - Gets all the triples with a given predicate and object + Converts the Algebra back to a SPARQL Query - Predicate - Object - + - Gets all the triples with a given subject and object + Converts the Algebra back to a SPARQL Query - Subject - Object - + - Gets all the triples with a given subject and predicate + Transforms both sides of the Join using the given Optimiser - Subject - Predicate + Optimser - + - Gets the Object Nodes + Transforms the LHS of the Join using the given Optimiser + Optimser + - + - Gets the Predicate Nodes + Transforms the RHS of the Join using the given Optimiser + Optimser + - + - Gets the Subject Nodes + Represents a Union - + - Disposes of the collection + Creates a new Union + LHS Pattern + RHS Pattern - + - Gets the enumerator for the collection + Evaluates the Union + - + - Abstract decorator for Graphs to make it easier to layer functionality on top of existing implementations + Gets the Variables used in the Algebra - + - Underlying Graph this is a wrapper around + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Creates a wrapper around the default Graph implementation, primarily required only for deserialization and requires that the caller call to properly wire up event handling + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Creates a new wrapper around the given Graph + Gets the LHS of the Join - Graph - + - Deserialization Constructor + Gets the RHS of the Join - Serialization Information - Streaming Context - + - Gets/Sets the Base URI of the Graph + Gets the String representation of the Algebra + - + - Gets whether the Graph is empty + Converts the Algebra back to a SPARQL Query + - + - Gets the Namespace Map for the Graph + Converts the Algebra back to a SPARQL Query + - + - Gets the Nodes of the Graph + Transforms both sides of the Join using the given Optimiser + Optimser + - + - Gets the Triple Collection for the Graph + Transforms the LHS of the Join using the given Optimiser + Optimser + - + - Asserts a Triple in the Graph + Transforms the RHS of the Join using the given Optimiser - Triple + Optimser + - + - Asserts Triples in the Graph + Represents the Ask step of Query Evaluation - Triples + + Used only for ASK queries. Turns the final Multiset into either an IdentityMultiset if the ASK succeeds or a NullMultiset if the ASK fails + - + - Retracts a Triple from the Graph + Creates a new ASK - Triple + Inner Pattern - + - Retracts Triples from the Graph + Evaluates the ASK by turning the Results of evaluating the Inner Pattern to either an Identity/Null Multiset depending on whether there were any Results - Triples + Evaluation Context + - + - Clears the Graph + Gets the Inner Algebra - + - Creates a new Blank Node with the given Node ID + Gets the Variables used in the Algebra - Node ID - - + - Creates a new Blank Node + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - - + - Gets the next available Blank Node ID + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - - + - Creates a new Graph Literal Node with the given sub-graph + Gets the String representation of the Ask - Sub-graph - + - Creates a new Graph Literal Node + Converts the Algebra back to a SPARQL Query - + - Creates a new Literal Node + Throws an exception since an Ask() cannot be converted to a Graph Pattern - Value + Thrown since an Ask() cannot be converted to a Graph Pattern - + - Creates a new Literal Node with the given Datatype + Transforms the Inner Algebra using the given Optimiser - Value - Datatype URI + Optimiser - + - Creates a new Literal Node with the given Language + Represents a BGP which is a set of Triple Patterns - Value - Language - + + + An Ask BGP differs from a BGP in that rather than evaluating each Triple Pattern in turn it evaluates across all Triple Patterns. This is used for ASK queries where we are only concerned with whether a BGP matches and not in the specific solutions + + + An Ask BGP can only contain concrete Triple Patterns and/or FILTERs and not any of the other specialised Triple Pattern classes + + - + - Creates a new URI Node that references the Graphs Base URI + Creates a Streamed BGP containing a single Triple Pattern - + Triple Pattern - + - Creates a new URI Node from a QName + Creates a Streamed BGP containing a set of Triple Patterns - QName - + Triple Patterns - + - Creates a new URI Node + Determines whether a Triple Pattern can be evaluated using a Lazy ASK approach - URI + Triple Pattern - + - Creates a new Variable Node + Gets the number of Triple Patterns in the BGP - Variable Name - - + - Attempts to get the Blank Node with the given ID + Gets the Triple Patterns in the BGP - Node ID - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value and Language + Gets the Variables used in the Algebra - Value - Language - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Value - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value and Datatype + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Value - Datatype URI - The Node if it exists or null otherwise - + - Gets all the Triples involving the given URI + Gets whether the BGP is the emtpy BGP - The URI to find Triples involving - Zero/More Triples - + - Gets all the Triples involving the given Node + Evaluates the BGP against the Evaluation Context - The Node to find Triples involving - Zero/More Triples + Evaluation Context + - + - Gets all the Triples with the given URI as the Object + Gets the String representation of the Algebra - The URI to find Triples with it as the Object - Zero/More Triples + - + - Gets all the Triples with the given Node as the Object + Converts the Algebra back to a SPARQL Query - The Node to find Triples with it as the Object - + - Gets all the Triples with the given Node as the Predicate + Converts the BGP back to a Graph Pattern - The Node to find Triples with it as the Predicate - + - Gets all the Triples with the given Uri as the Predicate + Represents a Union - The Uri to find Triples with it as the Predicate - Zero/More Triples + + + An Ask Union differs from a standard Union in that if it finds a solution on the LHS it has no need to evaluate the RHS + + - + - Gets all the Triples with the given Node as the Subject + Creates a new Ask Union - The Node to find Triples with it as the Subject - Zero/More Triples + LHS Pattern + RHS Pattern - + - Gets all the Triples with the given Uri as the Subject + Evaluates the Ask Union - The Uri to find Triples with it as the Subject - Zero/More Triples + Evaluation Context + - + - Selects all Triples with the given Subject and Predicate + Gets the Variables used in the Algebra - Subject - Predicate - - + - Selects all Triples with the given Subject and Object + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Subject - Object - - + - Selects all Triples with the given Predicate and Object + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Predicate - Object - - + - Returns the UriNode with the given QName if it exists + Gets the LHS of the Join - The QName of the Node to select - - + - Returns the UriNode with the given Uri if it exists + Gets the RHS of the Join - The Uri of the Node to select - Either the UriNode Or null if no Node with the given Uri exists - + - Gets whether a given Triple exists in this Graph + Gets the String representation of the Algebra - Triple to test - + - Merges another Graph into the current Graph + Converts the Algebra back to a SPARQL Query - Graph to Merge into this Graph - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. + - + - Merges another Graph into the current Graph + Converts the Union back to Graph Patterns - Graph to Merge into this Graph - Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in - - - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - - - The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised - - + - + - Determines whether a Graph is equal to another Object + Transforms both sides of the Join using the given Optimiser - Object to test + Optimser - - - A Graph can only be equal to another Object which is an IGraph - - - Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals - - - + - Determines whether this Graph is equal to the given Graph + Transforms the LHS of the Join using the given Optimiser - Graph to test for equality - Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes - - - - The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs - - - Graph Equality is determined according to the following algorithm: - -
    -
  1. If the given Graph is null Graphs are not equal
  2. -
  3. If the given Graph is this Graph (as determined by Reference Equality) then Graphs are equal
  4. -
  5. If the Graphs have a different number of Triples they are not equal
  6. -
  7. Declare a list of Triples which are the Triples of the given Graph called OtherTriples
  8. -
  9. Declare two dictionaries of Nodes to Integers which are called LocalClassification and OtherClassification
  10. -
  11. For Each Triple in this Graph -
      -
    1. If it is a Ground Triple and cannot be found and removed from OtherTriples then Graphs are not equal since the Triple does not exist in both Graphs
    2. -
    3. If it contains Blank Nodes track the number of usages of this Blank Node in LocalClassification
    4. -
    -
  12. -
  13. If there are any Triples remaining in OtherTriples which are Ground Triples then Graphs are not equal since this Graph does not contain them
  14. -
  15. If all the Triples from both Graphs were Ground Triples and there were no Blank Nodes then the Graphs are equal
  16. -
  17. Iterate over the remaining Triples in OtherTriples and populate the OtherClassification
  18. -
  19. If the count of the two classifications is different the Graphs are not equal since there are differing numbers of Blank Nodes in the Graph
  20. -
  21. Now build two additional dictionaries of Integers to Integers which are called LocalDegreeClassification and OtherDegreeClassification. Iterate over LocalClassification and OtherClassification such that the corresponding degree classifications contain a mapping of the number of Blank Nodes with a given degree
  22. -
  23. If the count of the two degree classifications is different the Graphs are not equal since there are not the same range of Blank Node degrees in both Graphs
  24. -
  25. For All classifications in LocalDegreeClassification there must be a matching classification in OtherDegreeClassification else the Graphs are not equal
  26. -
  27. Then build a possible mapping using the following rules: -
      -
    1. Any Blank Node used only once should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    2. -
    3. Any Blank Node with a unique degree should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    4. -
    5. Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    6. -
    7. Build up lists of dependent pairs of Blank Nodes for both Graphs
    8. -
    9. Use these lists to determine if there are any independent nodes not yet mapped. These should be mapped to equivalent Blank Nodes in the other Graph, if this is not possible the Graphs are not equal
    10. -
    11. Use the Dependencies and existing mappings to generate a possible mapping
    12. -
    13. If a Complete Possible Mapping (there is a Mapping for each Blank Node from this Graph to the Other Graph) then test this mapping. If it succeeds then the Graphs are equal
    14. -
    15. Otherwise we now fallback to the Base Mapping and use it as a basis for Brute Forcing the possible solution space and testing every possibility until either a mapping works or we find the Graphs to be non-equal
    16. -
    -
  28. -
-
+ Optimser +
- + - Checks whether this Graph is a sub-graph of the given Graph + Transforms the RHS of the Join using the given Optimiser - Graph + Optimser - + - Checks whether this Graph is a sub-graph of the given Graph + Abstract Base Class for representing Multisets - Graph - Mapping of Blank Nodes - - + - Checks whether this Graph has the given Graph as a sub-graph + List of IDs that is used to return the Sets in order if the Multiset has been sorted - Graph - - + - Checks whether this Graph has the given Graph as a sub-graph + The number of results that would be returned without any limit clause to a query or -1 if not supported. Defaults to the same value as the Count member - Graph - Mapping of Blank Nodes - - + - Computes the Difference between this Graph the given Graph + Joins this Multiset to another Multiset - Graph + Other Multiset - - - Produces a report which shows the changes that must be made to this Graph to produce the given Graph - - - + - Helper function for Resolving QNames to URIs + Does a Left Join of this Multiset to another Multiset where the Join is predicated on the given Expression - QName to resolve to a Uri + Other Multiset + Expression - + - Converts the wrapped graph into a DataTable + Does an Exists Join of this Multiset to another Multiset where the Join is predicated on the existence/non-existence of a joinable solution on the RHS + Other Multiset + Whether a solution must exist in the Other Multiset for the join to be made - + - Event which is raised when a Triple is asserted in the Graph + Does a Minus Join of this Multiset to another Multiset where any joinable results are subtracted from this Multiset to give the resulting Multiset + Other Multiset + - + - Event which is raised when a Triple is retracted from the Graph + Does a Product of this Multiset and another Multiset + Other Multiset + - + - Event which is raised when the Graph contents change + Does a Union of this Multiset and another Multiset + Other Multiset + - + - Event which is raised just before the Graph is cleared of its contents + Determines whether the Multiset contains the given Value for the given Variable + Variable + Value + - + - Event which is raised after the Graph is cleared of its contents + Determines whether the Multiset contains the given Variable + Variable + - + - Event which is raised when a Merge operation is requested on the Graph + Determines whether the Mutliset is disjoint with the given Multiset + Multiset + - + - Event which is raised when a Merge operation is completed on the Graph + Adds a Set to the Mutliset + Set to add - + - Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event + Adds a Variable to the Multiset - Sender - Triple Event Arguments + Variable - + - Helper method for raising the Triple Asserted event manually + Sets the variable ordering for the multiset - Triple Event Arguments + Variable Ordering - + - Helper method for raising the Triple Asserted event manually + Removes a Set (by ID) from the Multiset - Triple + ID - + - Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event + Sorts a Set based on the given Comparer - Sender - Triple Event Arguments + Comparer on Sets - + - Helper method for raising the Triple Retracted event manually + Returns whether the Multiset is Empty - - + - Helper method for raising the Triple Retracted event manually + Gets the Count of Sets in the Multiset - Triple - + - Event handler to help propogate Graph events from the underlying graph + Trims the Multiset of Temporary Variables - Sender - Arguments - + - Helper method for raising the Changed event + Trims the Multiset by removing all Values for the given Variable - Triple Event Arguments + Variable - + - Helper method for raising the Changed event + Gets the Variables in the Multiset - + - Event handler to help propogate Graph events from the underlying graph + Gets the Sets in the Multiset - Sender - Arguments - + - Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + Gets the IDs of Sets in the Multiset - True if the operation can continue, false if it should be aborted - + - Event handler to help propogate Graph events from the underlying graph + Retrieves the Set with the given ID - Sender - Arguments + ID + - + - Helper method for raising the Cleared event + Gets the string representation of the multiset (intended for debugging only) + - + - Event handler to help propogate Graph events from the underlying graph + Represents a BGP which is a set of Triple Patterns - Sender - Arguments - + - Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + The ordered list of triple patterns that are contained in this BGP - True if the operation can continue, false if it should be aborted - + - Event handler to help propogate Graph events from the underlying graph + Creates a new empty BGP - Sender - Arguments - + - Helper method for raising the Merged event + Creates a BGP containing a single Triple Pattern + Triple Pattern - + - Helper method for attaching the necessary event handlers to the underlying graph + Creates a BGP containing a set of Triple Patterns + Triple Patterns - + - Disposes of the wrapper and in doing so disposes of the underlying graph + Gets the number of Triple Patterns in the BGP - + - Gets the Serialization Information + Gets the Triple Patterns in the BGP - Serialization Information - Streaming Context - + - Gets the Schema for XML serialization + Evaluates the BGP against the Evaluation Context + Evaluation Context - + - Reads the data for XML deserialization + Gets the Variables used in the Algebra - XML Reader - + - Writes the data for XML serialization + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - XML Writer - + - Provides useful Extension Methods for use elsewhere in the Library + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Takes a single item and generates an IEnumerable containing only it + Gets whether the BGP is the emtpy BGP - Type of the enumerable - Item to wrap in an IEnumerable - - - This method taken from Stack Overflow - see here - - + - Determines whether the contents of two enumerables are disjoint + Returns the String representation of the BGP - Type Parameter - An Enumerable - Another Enumerable - + - Gets the Subset of Triples from an existing Enumerable that have a given Subject + Converts the Algebra back to a SPARQL Query - Enumerable of Triples - Subject to match - + - Gets the Subset of Triples from an existing Enumerable that have a given Predicate + Converts the BGP to a Graph Pattern - Enumerable of Triples - Predicate to match - + - Gets the Subset of Triples from an existing Enumerable that have a given Object + Represents a BINDINGS modifier on a SPARQL Query - Enumerable of Triples - Object to match - - + - Gets the Blank Nodes + Creates a new BINDINGS modifier - Nodes - + Bindings - + - Gets the Graph Literal Nodes + Evaluates the BINDINGS modifier - Nodes + Evaluation Context - + - Gets the Literal Nodes + Gets the Variables used in the Algebra - Nodes - - + - Gets the URI Nodes + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Nodes - - + - Gets the Variable Nodes + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Nodes - - + - Gets an Enhanced Hash Code for a Uri + Gets the Bindings + + + + + Gets the String representation of the Algebra - Uri to get Hash Code for - - The .Net Uri class Hash Code ignores the Fragment ID when computing the Hash Code which means that URIs with the same basic URI but different Fragment IDs have identical Hash Codes. This is perfectly acceptable and sensible behaviour for normal URI usage since Fragment IDs are only relevant to the Client and not the Server. But in the case of URIs in RDF the Fragment ID is significant and so we need in some circumstances to compute a Hash Code which includes this information. - - + - Gets an SHA256 Hash for a URI + Converts the Algebra back to a SPARQL Query - URI to get Hash Code for - + - Gets a SHA256 Hash for a String + Convers the Algebra back to a Graph Pattern - String to hash - + - Asserts a new Triple in the Graph + Represents a Distinct modifier on a SPARQL Query - Graph to assert in - Subject - Predicate - Object - Handy method which means you can assert a Triple by specifying the Subject, Predicate and Object without having to explicity declare a new Triple - + - Retracts a Triple from the Graph + Creates a new Distinct Modifier - Graph to retract from - Subject - Predicate - Object - Handy method which means you can retract a Triple by specifying the Subject, Predicate and Object without having to explicity declare a new Triple + Pattern - + - Asserts a list as a RDF collection and returns the node that represents the root of the RDF collection + Creates a new Distinct Modifier - Type of Objects - Graph to assert in - Objects to place in the collection - Mapping from Object Type to INode - - Either the blank node which is the root of the collection or rdf:nil for empty collections - + Inner Algebra + Whether to ignore temporary variables - + - Asserts a list as a RDF collection using an existing node as the list root + Evaluates the Distinct Modifier - Type of Objects - Graph to assert in - Root Node for List - Objects to place in the collection - Mapping from Object Type to INode + Evaluation Context + - + - Asserts a list as a RDF collection and returns the node that represents the root of the RDF collection + Gets the Variables used in the Algebra - Graph to assert in - Objects to place in the collection - - Either the blank node which is the root of the collection or rdf:nil for empty collections - - + - Asserts a list as a RDF collection using an existing node as the list root + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Graph to assert in - Root Node for List - Objects to place in the collection - + - Gets all the Triples that make up a list (aka a RDF collection) + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Graph - Root Node for List - Triples that make up the List - + - Gets all the Nodes which are the items of the list (aka the RDF collection) + Gets the Inner Algebra - Graph - Root Node for List - Nodes that are the items in the list - + - Gets all the Nodes which are the intermediate nodes in the list (aka the RDF collection). These represents the nodes used to link the actual items of the list together rather than the actual items of the list + Gets the String representation of the Algebra - Graph - Root Node for List - Nodes that are the intermediate nodes of the list + - + - Gets whether a given Node is valid as a List Root, this does not guarantee that the list itself is valid simply that the Node appears to be the root of a list + Converts the Algebra back to a SPARQL Query - Node to check - Graph - - We consider a node to be a list root if there are no incoming rdf:rest triples and only a single outgoing rdf:first triple - - + - Gets the Node that represents the last item in the list + Throws an exception since a Distinct() cannot be converted back to a Graph Pattern - Graph - Root Node for List + Thrown since a Distinct() cannot be converted to a Graph Pattern - + - Retracts a List (aka a RDF collection) + Transforms the Inner Algebra using the given Optimiser - Graph - Root Node for List + Optimiser + - + - Adds new items to the end of a list (aka a RDF collection) + Represents a Reduced modifier on a SPARQL Query - Type of Objects - Graph to assert in - Root Node for the list - Objects to add to the collection - Mapping from Object Type to INode - + - Adds new items to the end of a list (aka a RDF collection) + Creates a new Reduced Modifier - Graph to assert in - Root Node for the list - Objects to add to the collection + Pattern - + - Removes the given items from a list (aka a RDF collection), if an item occurs multiple times in the list all occurrences will be removed + Evaluates the Reduced Modifier - Type of Objects - Graph to retract from - Root Node for the list - Objects to remove from the collection - Mapping from Object Type to INode + Evaluation Context + - + - Removes the given items from a list (aka a RDF collection), if an item occurs multiple times in the list all occurrences will be removed + Gets the Variables used in the Algebra - Graph to retract from - Root Node for the list - Objects to remove from the collection - + - Copies a Node to the target Graph + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Node to copy - Target Graph - - Shorthand for the Tools.CopyNode() method - + - Copies a Node to the target Graph + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Node to copy - Target Graph - Indicates whether Nodes should preserve the Graph Uri of the Graph they originated from - - Shorthand for the Tools.CopyNode() method - + - Copies a Triple to the target Graph + Gets the Inner Algebra - Triple to copy - Target Graph - - Shorthand for the Tools.CopyTriple() method - + - Copies a Triple to the target Graph + Gets the String representation of the Algebra - Triple to copy - Target Graph - Indicates whether Nodes should preserve the Graph Uri of the Graph they originated from - Shorthand for the Tools.CopyTriple() method - + - Copies a Triple from one Graph mapping Nodes as appropriate + Converts the Algebra back to a SPARQL Query - Triple to copy - TargetGraph - Mapping of Nodes - + - Gets either the String representation of the Object or the Empty String if the object is null + Throws an exception since a Reduced() cannot be converted back to a Graph Pattern - Object + Thrown since a Reduced() cannot be converted to a Graph Pattern - + - Gets either the String representation of the URI or the Empty String if the URI is null + Transforms the Inner Algebra using the given Optimiser - URI + Optimiser - + - Turns a string into a safe URI + Represents an Extend operation which is the formal algebraic form of the BIND operation - String - Either null if the string is null/empty or a URI otherwise - + - Gets the String representation of the URI formatted using the given Formatter + Creates a new Extend operator - URI - URI Formatter - + Pattern + Expression + Variable to bind to - + - Appends a String to the StringBuilder with an indent of spaces + Gets the Variable Name to be bound - String Builder - String to append - Indent - + - Appends a String to the StringBuilder with an indent of spaces + Gets the Assignment Expression - String Builder - String to append - Indent - - Strings containing new lines are split over multiple lines - - + - Takes a String and escapes any backslashes in it which are not followed by a valid escape character + Gets the Inner Algebra - String value - Valid Escape Characters i.e. characters which may follow a backslash - - + - Determines whether a string is ASCII + Transforms the Inner Algebra using the given Optimiser - + Optimiser - + - Determines whether a String is fully escaped + Evaluates the Algebra in the given context - String value - Valid Escape Characters i.e. characters which may follow a backslash - Characters which must be escaped i.e. must be preceded by a backslash + Evaluation Context - + - Escapes all occurrences of a given character in a String + Gets the variables used in the algebra - String - Character to escape - - - Ignores all existing escapes (indicated by a \) and so avoids double escaping as far as possible - - + - Escapes all occurrences of a given character in a String using the given escape character + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - String - Character to escape - Character to escape as - - - Ignores all existing escapes (indicated by a \) and so avoids double escaping as far as possible - - + - Provides useful Extension Methods for working with Graphs + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Turns a Graph into a Triple Store + Converts the Algebra to a Query - Graph - + - Executes a SPARQL Query on a Graph + Converts the Algebra to a Graph Pattern - Graph to query - SPARQL Query - + - Executes a SPARQL Query on a Graph handling the results using the handlers provided + Gets the String representation of the Extend - Graph to query - RDF Handler - SPARQL Results Handler - SPARQL Query + - + - Executes a SPARQL Query on a Graph + Represents a Filter - Graph to query - SPARQL Query - - + - Executes a SPARQL Query on a Graph handling the results using the handlers provided + Creates a new Filter - Graph to query - RDF Handler - SPARQL Results Handler - SPARQL Query + Algebra the Filter applies over + Filter to apply - + - Executes a SPARQL Query on a Graph + Applies the Filter over the results of evaluating the inner pattern - Graph to query - SPARQL Query + Evaluation Context - + - Executes a SPARQL Query on a Graph handling the results using the handlers provided + Gets the Variables used in the Algebra - Graph to query - RDF Handler - SPARQL Results Handler - SPARQL Query - + - Loads RDF data from a file into a Graph + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Graph to load into - File to load from - Parser to use - - - This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace - - - Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If a File URI is assigned it will always be an absolute URI for the file - - - + - Loads RDF data from a file into a Graph + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Graph to load into - File to load from - - - This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace - - - Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If a File URI is assigned it will always be an absolute URI for the file - - - + - Loads RDF data from a URI into a Graph + Gets the Filter to be used - Graph to load into - URI to load from - Parser to use - - - This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace - - - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - + - Loads RDF data from a URI into a Graph + Gets the Inner Algebra - Graph to load into - URI to load from - - - This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace - - - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - + - Loads RDF data from a String into a Graph + Gets the String representation of the FILTER - Graph to load into - Data to load - Parser to use - - This is just a shortcut to using the static Parse() methods from the StringParser class located in the Parsing namespace - + - + - Loads RDF data from a String into a Graph + Converts the Algebra back to a SPARQL Query - Graph to load into - Data to load - - This is just a shortcut to using the static Parse() methods from the StringParser class located in the Parsing namespace - + - + - Loads RDF data from an Embedded Resource into a Graph + Converts the Algebra back to a Graph Pattern - Graph to load into - Assembly qualified name of the resource to load from - - This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace - + - + - Loads RDF data from an Embedded Resource into a Graph + Transforms the Inner Algebra using the given Optimiser - Graph to load into - Assembly qualified name of the resource to load from - Parser to use - - This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace - + Optimiser + - + - Saves a Graph to a File + Algebra operator which combines a Filter and a Product into a single operation for improved performance and reduced memory usage - Graph to save - File to save to - Writer to use - + - Saves a Graph to a File + Creates a new Filtered Product - Graph to save - File to save to + LHS Algebra + RHS Algebra + Expression to filter with - + - Provides useful Extension Methods for working with Triple Stores + Gets the LHS Algebra - + - Loads an RDF dataset from a file into a Triple Store + Gets the RHS Algebra - Triple Store to load into - File to load from - Parser to use - - This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace - - + - Loads an RDF dataset from a file into a Triple Store + Transforms the inner algebra with the given optimiser - Triple Store to load into - File to load from - - This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace - + Algebra Optimiser + - + - Loads an RDF dataset from a URI into a Triple Store + Transforms the LHS algebra only with the given optimiser - Triple Store to load into - URI to load from - Parser to use - - This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace - + Algebra Optimiser + - + - Loads an RDF dataset from a URI into a Triple Store + Transforms the RHS algebra only with the given optimiser - Triple Store to load into - URI to load from - - This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace - + Algebra Optimiser + - + - Loads an RDF dataset from a String into a Triple Store + Evaluates the filtered product - Triple Store to load into - Data to load - Parser to use - - This is just a shortcut to using the static ParseDataset() methods from the StringParser class located in the Parsing namespace - + Evaluation Context + - + - Loads an RDF dataset from a String into a Triple Store + Gets the Variables used in the Algebra - Triple Store to load into - Data to load - - This is just a shortcut to using the static ParseDataset() methods from the StringParser class located in the Parsing namespace - - + - Loads an RDF dataset from an Embedded Resource into a Triple Store + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Triple Store to load into - Assembly Qualified Name of the Embedded Resource to load from - Parser to use - - This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace - - + - Loads an RDF dataset from an Embedded Resource into a Triple Store + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Triple Store to load into - Assembly Qualified Name of the Embedded Resource to load from - - This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace - - + - Saves a Triple Store to a file + Converts the algebra back into a query - Triple Store to save - File to save to - Writer to use + - + - Saves a Triple Store to a file + Converts the algebra back into a Graph Pattern - Triple Store to save - File to save to + - + - Provides extension methods for converting primitive types into appropriately typed Literal Nodes + Gets the string represenation of the algebra + - + - Creates a new Boolean typed literal + Represents a GRAPH clause - Boolean - Node Factory to use for Node creation - Literal representing the boolean - Thrown if the Factory argument is null - + - Creates a new Byte typed literal + Creates a new Graph clause - Byte - Node Factory to use for Node creation - Literal representing the byte - - Byte in .Net is actually equivalent to Unsigned Byte in XML Schema so depending on the value of the Byte the type will either be xsd:byte if it fits or xsd:usignedByte - + Pattern + Graph Specifier - + - Creates a new Byte typed literal + Evaluates the Graph Clause by setting up the dataset, applying the pattern and then generating additional bindings if necessary - Byte - Node Factory to use for Node creation - Literal representing the signed bytes - - SByte in .Net is directly equivalent to Byte in XML Schema so the type will always be xsd:byte - + Evaluation Context + - + - Creates a new Date Time typed literal + Gets the Variables used in the Algebra - Date Time - Node Factory to use for Node creation - Literal representing the date time - Thrown if the Factory argument is null - - - Creates a new Date Time typed literal - - Date Time - Node Factory to use for Node creation - Whether to preserve precisely i.e. include fractional seconds - Literal representing the date time - Thrown if the Factory argument is null + + - + - Creates a new Date Time typed literal + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Date Time - Node Factory to use for Node creation - Literal representing the date time - Thrown if the Factory argument is null - + - Creates a new Date Time typed literal + Gets the Graph Specifier - Date Time - Node Factory to use for Node creation - Whether to preserve precisely i.e. include fractional seconds - Literal representing the date time - Thrown if the Factory argument is null - + - Creates a new Date typed literal + Gets the Inner Algebra - Date Time - Node Factory to use for Node creation - - Thrown if the Factory argument is null - + - Creates a new Date typed literal + Gets the String representation of the Algebra - Date Time - Node Factory to use for Node creation - Thrown if the Factory argument is null - + - Creates a new Time typed literal + Converts the Algebra back to a SPARQL Query - Date Time - Node Factory to use for Node creation - Literal representing the time - Thrown if the Factory argument is null + - + - Creates a new Time typed literal + Converts the Algebra back to a Graph Pattern - Date Time - Node Factory to use for Node creation - Whether to preserve precisely i.e. include fractional seconds - Literal representing the time - Thrown if the Factory argument is null + - + - Creates a new duration typed literal + Transforms the Inner Algebra using the given Optimiser - Time Span - Node Factory to use for Node creation - Literal representing the time span + Optimiser + - + - Creates a new Time typed literal + Creates a Graph instance by applying a graph specifier to an algebra - Date Time - Node Factory to use for Node creation - Literal representing the time - Thrown if the Factory argument is null + The algebra to be constrained + A token specifying the graph constraint + A Graph instance representing the application of the graph constraint to the algebra - + - Creates a new Time typed literal + Represents a Grouping - Date Time - Node Factory to use for Node creation - Whether to preserve precisely i.e. include fractional seconds - Literal representing the time - Thrown if the Factory argument is null - + - Creates a new Decimal typed literal + Creates a new Group By - Decimal - Node Factory to use for Node creation - Literal representing the decimal - Thrown if the Factory argument is null + Pattern + Grouping to use + Aggregates to calculate - + - Creates a new Double typed literal + Evaluates a Group By by generating a GroupMultiset from the Input Multiset - Double - Node Factory to use for Node creation - Literal representing the double - Thrown if the Factory argument is null + SPARQL Evaluation Context + - + - Creates a new Float typed literal + Gets the Variables used in the Algebra - Float - Node Factory to use for Node creation - Literal representing the float - Thrown if the Factory argument is null - + - Creates a new Integer typed literal + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Integer - Node Factory to use for Node creation - Literal representing the short - Thrown if the Factory argument is null - + - Creates a new Integer typed literal + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Integer - Node Factory to use for Node creation - Literal representing the integer - Thrown if the Factory argument is null - + - Creates a new Integer typed literal + Gets the Inner Algebra - Integer - Node Factory to use for Node creation - Literal representing the integer - Thrown if the Factory argument is null - + - Creates a new String typed literal + Gets the Grouping that is used - String - Node Factory to use for Node creation - Literal representing the string - Thrown if the Graph/String argument is null + + If the Query supplied in the SparqlEvaluationContext is non-null and has a GROUP BY clause then that is applied rather than the clause with which the GroupBy algebra is instantiated + - + - Interface for Reader Classes which parser Sparql Result Set syntaxes into Result Set objects + Gets the Aggregates that will be applied - + - Loads a Result Set from the given Stream + Gets the String representation of the - Stream to read from - Result Set to load into - Should throw an error if the Result Set is not empty - + - Loads a Result Set from the given File + Converts the Algebra back to a SPARQL Query - File containing a Result Set - Result Set to load into - Should throw an error if the Result Set is not empty - + - Loads a Result Set from the given Input + Throws an exception since GroupBy() cannot be converted to a Graph Pattern - Input to read from - Result Set to load into - Should throw an error if the Result Set is not empty + Thrown since GroupBy() cannot be converted to a GraphPattern - + - Loads a Result Set using a Results Handler from the given Stream + Transforms the Inner Algebra using the given Optimiser - Results Handler - Stream to read from + Optimiser + - + - Loads a Result Set using a Results Handler from the given file + Multiset which represents a Grouping of Sets from another Multiset - Results Handler - File to read results from - + - Loads a Result Set using a Results Handler from the given Input + Creates a new Group Multiset - Results Handler - Input to read from + Multiset which contains the sets that are being grouped - + - Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + Gets the enumeration of the Groups in the Multiset - + - Interface for Writer classes which serialize Sparql Result Sets into concrete results set syntaxes + Gets the enumeration of the IDs of Sets in the group with the given ID + Group ID + - + - Saves the Result Set to the given File + Gets the Group with the given ID - Result Set to save - File to save to + Group ID + - + - Saves the Result Set to the given Stream + Adds a Group to the Multiset - Result Set to save - Stream to save to + - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Adds a Set to the Group Multiset + Set + Thrown since this action is invalid on a Group Multiset - + - Abstract Base Class for Triple Collections + Gets the Multiset which contains the Sets who are the members of the Groups this Multiset represents - - Designed to allow the underlying storage of a Triple Collection to be changed at a later date without affecting classes that use it. - - + - Adds a Triple to the Collection + Represents a Having Clause - Triple to add - Adding a Triple that already exists should be permitted though it is not necessary to persist the duplicate to underlying storage - + - Determines whether a given Triple is in the Triple Collection + Creates a new Having Clause - The Triple to test - True if the Triple already exists in the Triple Collection + Pattern + Having Clause - + - Gets the Number of Triples in the Triple Collection + Evaluates the Having Clause + Evaluation Context + - + - Deletes a Triple from the Collection + Gets the Variables used in the Algebra - Triple to remove - Deleting something that doesn't exist should have no effect and give no error - + - Gets the given Triple + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Triple to retrieve - - Thrown if the given Triple doesn't exist - + - Gets all the Nodes which are Objects of Triples in the Triple Collection + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Gets all the Nodes which are Predicates of Triples in the Triple Collection + Gets the Inner Algebra - + - Gets all the Nodes which are Subjects of Triples in the Triple Collection + Gets the HAVING clause used + + If the Query supplied in the SparqlEvaluationContext is non-null and has a HAVING clause then that is applied rather than the clause with which the Having algebra is instantiated + - + - Gets all the Triples with the given Subject + Gets the String representation of the Algebra - ubject to lookup - + - Gets all the Triples with the given Predicate + Converts the Algebra back to a SPARQL Query - Predicate to lookup - + - Gets all the Triples with the given Object + Throws an exception since a Having() cannot be converted back to a Graph Pattern - Object to lookup + Thrown since a Having() cannot be converted to a Graph Pattern - + - Gets all the Triples with the given Subject Predicate pair + Transforms the Inner Algebra using the given Optimiser - Subject to lookup - Predicate to lookup + Optimiser - + - Gets all the Triples with the given Predicate Object pair + Represents the Identity Multiset - Predicate to lookup - Object to lookup - - + - Gets all the Triples with the given Subject Object pair + Joins the Multiset to another Multiset - Subject to lookup - Object to lookup - + Other Multiset + + The other Multiset + - + - Diposes of a Triple Collection + Left Joins the Multiset to another Multiset + Other Multiset + Expression which the Join is predicated on + The other Multiset - + - Gets the typed Enumerator for the Triple Collection + Exists Joins the Multiset to another Multiset + Other Multiset + Whether solutions must exist in the Other Multiset for the Join to suceed - + - Gets the non-generic Enumerator for the Triple Collection + Minus Joins this Multiset to another Multiset + Other Multiset - + - Event which occurs when a Triple is added to the Collection + Generates the Product of this Set and another Multiset + Other Multiset + The other Multiset - + - Event which occurs when a Triple is removed from the Collection + Generates the Union of this Set and another Multiset + Other Multiset + The other Multiset - + - Helper method for raising the Triple Added event + Returns True since the Identity Multiset is considered to contain all values - Triple + Variable + Value + - + - Helper method for raising the Triple Removed event + Returns False since the Identity Multiset contains no Variables - Triple + Variable + - + - Possible Literal Equality Mode Settings + Returns False since the Identity Multiset is not disjoint with anything + Other Multiset + - + - Strict Mode compares Literals according to the official W3C RDF Specification + Adds a Set to the Multiset - - This means Literals are equal if and only if: -
    -
  1. The Lexical Values are identical when compared using Ordinal Comparison
  2. -
  3. The Language Tags if present are identical
  4. -
  5. The Datatypes if present are identical
  6. -
-
+ Set + Thrown since this operation is invalid on an Identity Multiset
- + - Loose Mode compares Literals based on values (if they have known Datatypes) + Adds a Variable to the Multiset - - This means Literals can be equal if they have lexically different values which are equivalent when converted to the Datatype. -

- Literals without Datatypes and those whose Datatypes are unknown or not handled by the Library will be compared using lexical equivalence as with Strict mode. -
+ Variable + Thrown since this operation is invalid on an Identity Multiset
- + - Configures Global Static Options for the Library + Sets the variable ordering for the multiset - - Some of these are Debug Build only, please see the Remarks on individual members for more detail - + Variable Ordering - + - Gets/Sets the Mode used to compute Literal Equality (Default is Strict which enforces the W3C RDF Specification) + Removes a Set to the Multiset + Set ID + Thrown since this operation is invalid on an Identity Multiset - + - Gets/Sets whether Literal Values should be normalized + Returns false as the Identity Multiset is not considered empty - + - Gets/Sets the Hard Timeout limit for SPARQL Query Execution (in milliseconds) + Returns an empty enumerable as the Identity Multiset contains no Variables - - This is used to stop SPARQL queries running away and never completing execution, it defaults to 3 mins (180,000 milliseconds) - - + - Gets/Sets whether Query Optimisation should be used + Returns an empty enumerable as the Identity Multiset contains no Sets - + - Gets/Sets whether Algebra Optimisation should be used + Returns an empty enumerable as the Identity Multiset contains no Sets - + - Gets/Sets whether some Optimisations considered unsafe can be used + Gets the Set with the given ID - - - The notion of unsafe optimisations refers to optimisations that can make significant performance improvements to some types of queries but are disabled normally because they may lead to behaviour which does not strictly align with the SPARQL specification. - - - One example of such an optimisation is an implicit join where the optimiser cannot be sure that the variables involved don't represent literals. - - + Set ID + + Thrown since the Identity Multiset contains no Sets - + - Gets/Sets the default syntax used for parsing SPARQL queries + Interface for Sets which represents a possible solution during SPARQL evaluation - - The default is SPARQL 1.1 unless you use this property to change it - - + - Gets/Sets whether functions that can't be parsed into Expressions should be represented by the UnknownFunction + Adds a Value for a Variable to the Set - When set to false a Parser Error will be thrown if the Function cannot be parsed into an Expression + Variable + Value - + - Gets/Sets whether to use rigorous query evaluation + Checks whether the Set contains a given Variable - - - Rigorous Query evaluation applies more checks to the triples produced by datasets to ensure they actually match the patterns being scanned. If the underlying index structures are able to guarantee this then rigorous evaluation may be turned off for faster evaluation which it is by default since our default and implementations will guarantee this. - - + Variable + - + - Gets/Sets whether to use strict operators + Gets whether the Set is compatible with a given set based on the given variables - - - Strict Operators refers to the interpretation of certian operators like + and - in SPARQL expression evaluation. If enabled then the operators will function only as specified in the SPARQL specification, if disabled (which is the default) then certain extensions (which the SPARQL specification allows an implementation to provide) will be allowed e.g. date time arithmetic. - - - The only time you may want to disable this is if you are developing queries locally which you want to ensure are portable to other systems or when running the SPARQL compliance tests. - - + Set + Variables + - + - Gets/Sets whether the query engine will try to use PLinq where applicable to evaluate suitable SPARQL constructs in parallel + Gets whether the Set is minus compatible with a given set based on the given variables - - For the 0.6.1 release onwards this was an experimental feature and disabled by default, from 0.7.0 onwards this is enabled by default - + Set + Variables + - + - Gets/Sets the Hard Timeout limit for SPARQL Update Execution (in milliseconds) + Gets/Sets the ID of the Set - - This is used to stop SPARQL Updates running away and never completing execution, it defaults to 3 mins (180,000 milliseconds) - - + - Gets/Sets the Default Compression Level used for Writers returned by the MimeTypesHelper class when the writers implement ICompressingWriter + Removes a Value for a Variable from the Set + Variable - + - Controls whether the indexed triple collections will create full indexes for the Triples inserted into it + Retrieves the Value in this set for the given Variable - - - By default indexes triple collections creates indexes on Triples based upon Subjects, Predicates and Objects alone. When full indexing is enabled it also creates indexes based on Subject-Predicate, Predicate-Object and Subject-Object pairs which may improve query speed but will use additional memory. - - - Default setting for Full Indexing is enabled, enabling/disabling it only has an effect on indexed triple collection instances instantiated after full indexing was enabled/disabled i.e. existing Graphs in memory using the indexed triple collections continue to use the full indexing setting that was present when they were instantiated. - - + Variable + Either a Node or a null - + - Gets/Sets whether the UriLoader uses caching + Gets the Values in the Set - + - Gets/Sets the Timeout for URI Loader requests (Defaults to 15 seconds) + Gets the Variables in the Set - + - Gets/Sets whether a UTF-8 BOM is used for UTF-8 Streams created by dotNetRDF (this does not affect Streams passed directly to methods as open streams cannot have their encoding changed) + Joins the set to another set + Other Set + - + - Gets/Sets whether IRIs are validated by parsers which support this functionality + Copies the Set - - When enabled certain parsers will validate all IRIs they see to ensure that they are valid and throw a parser error if they are not. Since there is a performance penalty associated with this and many older RDF standards were written pre-IRIs (thus enforcing IRI validity would reject data considered valid by those specifications) this feature is disabled by default. - + - + - Gets/Sets whether Blocking IO should be forced + Abstract Base Class for implementations of the ISet interface - - Blocking IO refers to how the parsing sub-system reads in inputs, it will use Blocking/Non-Blocking IO depending on the input source. In most cases the detection of which to use should never cause an issue but theoretically in some rare cases using non-blocking IO may lead to incorrect parsing errors being thrown (premature end of input detected), if you suspect this is the case try enabling this setting. If you still experience this problem with this setting enabled then there is some other issue with your input. - - + - Gets/Sets whether Basic HTTP authentication should be forced + Adds a Value for a Variable to the Set - - - There have been reported problems where some servers don't cope nicely with the HTTP authentication challenge response procedure resulting in failed HTTP requests. If the server only uses Basic HTTP authentication then you can opt to force dotNetRDF to always include the HTTP basic authentication header in requests and thus workaround this problem. - - - Warning: Under Silverlight this will only work correctly if usernames and passwords are composed only of characters within the ASCII range. - - + Variable + Value - + - Gets/Sets whether a DTD should be used for some XML formats to compress output + Checks whether the Set contains a given Variable + Variable + - + - Gets/Sets whether multi-theaded writing is permitted + Gets whether the Set is compatible with a given set based on the given variables - - In some contexts multi-threaded writing may not even work due to restrictions on thread types since we use the WaitAll() method which is only valid in MTA contexts. - + Set + Variables + - + - Gets/Sets whether the library will attempt to intern URIs to reduce memory usage + Gets whether the Set is minus compatible with a given set based on the given variables + Set + Variables + - + - Gets/Sets the default token queue mode used for tokeniser based parsers + Gets/Sets the ID of the Set - + - Gets/Sets whether HTTP Request and Response Information should be output to the Console Standard Out for Debugging purposes + Removes a Value for a Variable from the Set + Variable - + - Gets/Sets whether the HTTP Response Stream should be output to the Console Standard Output for Debugging purposes + Retrieves the Value in this set for the given Variable + Variable + Either a Node or a null - + - Gets/Sets the default culture literal comparison when literals are string or not implicitely comparable (different types, parse/cast error...) + Gets the Values in the Set - - The default is set to the invariant culture to preserve behavioural backwards compatibility with past versions of dotNetRDF - - + - Gets/Sets the default collation for literal comparison when literals are string or not implicitely comparable (different types, parse/cast error...) + Gets the Variables in the Set - - The default is set to to preserve behavioural backwards compatibility with past versions of dotNetRDF - - + - A Thread Safe version of the Graph class + Joins the set to another set - Should be safe for almost any concurrent read and write access scenario, internally managed using a ReaderWriterLockSlim. If you encounter any sort of Threading/Concurrency issue please report to the dotNetRDF Bugs Mailing List - Performance will be marginally worse than a normal Graph but in multi-threaded scenarios this will likely be offset by the benefits of multi-threading. + Other Set + - + - Locking Manager for the Graph + Copies the Set + - + - Creates a new Thread Safe Graph + Gets whether the Set is equal to another set + Set to compare with + - + - Creates a new Thread Safe graph using the given Triple Collection + Gets whether the Set is equal to another object - Triple Collection + Object to compare with + - + - Creates a new Thread Safe graph using a Thread Safe triple collection + Gets the Hash Code of the Set - Thread Safe triple collection + - + - Asserts a Triple in the Graph + Gets the String representation of the Set - The Triple to add to the Graph + - + - Asserts a List of Triples in the graph + Interface for classes that represent the SPARQL Algebra and are used to evaluate queries - List of Triples in the form of an IEnumerable - + - Retracts a Triple from the Graph + Evaluates the Algebra in the given Context - Triple to Retract - Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted + Evaluation Context + - + - Retracts a enumeration of Triples from the graph + Gets the enumeration of Variables used in the Algebra - Enumeration of Triples to retract - + - Creates a new Blank Node ID and returns it + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - - + - Disposes of a Graph + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Returns the Blank Node with the given Identifier + Converts the Algebra back to a SPARQL Query - The Identifier of the Blank Node to select - Either the Blank Node or null if no Node with the given Identifier exists + + Thrown if the given Algebra cannot be converted to a SPARQL Query - + - Returns the LiteralNode with the given Value if it exists + Converts the Algebra back to a Graph Pattern - The literal value of the Node to select - Either the LiteralNode Or null if no Node with the given Value exists - The LiteralNode in the Graph must have no Language or DataType set + + Thrown if the given Algebra cannot be converted to a Graph Pattern - + - Returns the LiteralNode with the given Value in the given Language if it exists + Interface for SPARQL Algebra constructs which are unary operators i.e. they apply over a single inner Algebra - The literal value of the Node to select - The Language Specifier for the Node to select - Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists - + - Returns the LiteralNode with the given Value and given Data Type if it exists + Gets the Inner Algebra - The literal value of the Node to select - The Uri for the Data Type of the Literal to select - Either the LiteralNode Or null if no Node with the given Value and Data Type exists - + - Returns the UriNode with the given QName if it exists + Transforms the Inner Algebra using the given Optimiser - The QName of the Node to select + Optimiser + + The operator should retain all it's existing properties and just return a new version of itself with the inner algebra having had the given optimiser applied to it + - + - Returns the UriNode with the given Uri if it exists + Marker Interface for SPARQL Algebra constructs which are terminal operators i.e. they contain no inner algebra operators - The Uri of the Node to select - Either the UriNode Or null if no Node with the given Uri exists - + - Gets all the Triples involving the given Node + Represents an Algebra construct which is a BGP - The Node to find Triples involving - Zero/More Triples - + - Gets all the Triples involving the given Uri + Gets the Number of Patterns in the BGP - The Uri to find Triples involving - Zero/More Triples - + - Gets all the Triples with the given Node as the Object + Gets the Triple Patterns in the BGP - The Node to find Triples with it as the Object - - + - Gets all the Triples with the given Uri as the Object + Represents an Algebra construct which is a Filter - The Uri to find Triples with it as the Object - Zero/More Triples - + - Gets all the Triples with the given Node as the Predicate + Gets the Filter - The Node to find Triples with it as the Predicate - - + - Gets all the Triples with the given Uri as the Predicate + Represents an Algebra construct which is an Abstract Join (i.e. any kind of Join over two algebra operators) - The Uri to find Triples with it as the Predicate - Zero/More Triples + + Specific sub-interfaces are used to mark specific kinds of Join + - + - Gets all the Triples with the given Node as the Subject + Gets the LHS of the Join - The Node to find Triples with it as the Subject - Zero/More Triples - + - Gets all the Triples with the given Uri as the Subject + Gets the RHS of the Join - The Uri to find Triples with it as the Subject - Zero/More Triples - + - A Thread Safe version of the Graph class + Transforms both sides of the Join using the given Optimiser - Should be safe for almost any concurrent read and write access scenario, internally managed using a ReaderWriterLockSlim. If you encounter any sort of Threading/Concurrency issue please report to the dotNetRDF Bugs Mailing List + Optimser + - - Performance will be marginally worse than a normal Graph but in multi-threaded scenarios this will likely be offset by the benefits of multi-threading. - - - Since this is a non-indexed version load performance will be better but query performance better - + The join should retain all it's existing properties and just return a new version of itself with the two sides of the join having had the given optimiser applied to them - + - Creates a new non-indexed Thread Safe Graph + Transforms the LHS of the Join using the given Optimiser + Optimser + + + The join should retain all it's existing properties and just return a new version of itself with LHS side of the join having had the given optimiser applied to them + - + - An indexed triple collection that uses our and implementations under the hood for the index structures + Transforms the RHS of the Join using the given Optimiser + Optimser + + The join should retain all it's existing properties and just return a new version of itself with RHS side of the join having had the given optimiser applied to them - + - Creates a new Tree Indexed triple collection + Represents an Algebra construct which is a Join - + - Creates a new Tree Indexed triple collection + Represents an Algebra construct which is a Left Join - Mode to use for compound indexes - + - Creates a new Tree Indexed triple collection with the given Indexing options + Gets the Filter used on the Join - Whether to create a subject index - Whether to create a predicate index - Whether to create an object index - Whether to create a subject predicate index - Whether to create a subject object index - Whether to create a predicate object index - Mode to use for compound indexes - + - Indexes a Triple + Represents an Algebra construct which is a Union - Triple - + - Helper for indexing triples + Represents an Algebra construct which is a Minus - Node to index by - Triple - Index to insert into - + - Helper for indexing triples + Represents an Algebra construct which is an Exists Join - Triple to index by - Index to insert into - + - Unindexes a triple + Gets whether the Join requires compatible solutions to exist on the RHS - Triple - + - Helper for unindexing triples + Represents a BGP which is a set of Triple Patterns - Node to index by - Triple - Index to remove from + + + A Lazy BGP differs from a BGP in that rather than evaluating each Triple Pattern in turn it evaluates across all Triple Patterns. This is used for queries where we are only want to retrieve a limited number of solutions + + + A Lazy BGP can only contain concrete Triple Patterns and/or FILTERs and not any of other the specialised Triple Pattern classes + + - + - Helper for unindexing triples + Creates a Streamed BGP containing a single Triple Pattern - Triple - Index to remove from + Triple Pattern - + - Adds a Triple to the collection + Creates a Streamed BGP containing a set of Triple Patterns - Triple - + Triple Patterns - + - Checks whether the collection contains a given Triple + Creates a Streamed BGP containing a single Triple Pattern - Triple - + Triple Pattern + The number of Results the BGP should attempt to return - + - Gets the count of triples in the collection + Creates a Streamed BGP containing a set of Triple Patterns + Triple Patterns + The number of Results the BGP should attempt to return - + - Deletes a triple from the collection + Evaluates the BGP against the Evaluation Context - Triple + Evaluation Context - + - Gets the specific instance of a Triple in the collection + Gets the String representation of the Algebra - Triple - + - Gets all the triples with a given object + Represents a Union - Object - + + + A Lazy Union differs from a standard Union in that if it finds sufficient solutions on the LHS it has no need to evaluate the RHS + + - + - Gets all the triples with a given predicate + Creates a new Lazy Union - Predicate - + LHS Pattern + RHS Pattern - + - Gets all the triples with a given subject + Creates a new Lazy Union - Subject - + LHS Pattern + RHS Pattern + The number of results that the Union should attempt to return - + - Gets all the triples with a given predicate and object + Evaluates the Lazy Union - Predicate - Object + Evaluation Context - + - Gets all the triples with a given subject and object + Gets the Variables used in the Algebra - Subject - Object - - + - Gets all the triples with a given subject and predicate + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Subject - Predicate - - + - Gets the Object Nodes + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Gets the Predicate Nodes + Gets the LHS of the Join - + - Gets the Subject Nodes + Gets the RHS of the Join - + - Disposes of the collection + Gets the String representation of the Algebra + - + - Gets the enumerator for the collection + Converts the Algebra back to a SPARQL Query - + - Abstract decorator for Graph Collections to make it easier to add new functionality to existing implementations + Converts the Union back to Graph Patterns + - + - Underlying Graph Collection + Transforms both sides of the Join using the given Optimiser + Optimser + - + - Creates a decorator around a default instance + Transforms the LHS of the Join using the given Optimiser + Optimser + - + - Creates a decorator around the given graph collection + Transforms the RHS of the Join using the given Optimiser - Graph Collection + Optimser + - + - Adds a Graph to the collection + Represents the Minus join - Graph - Whether to merge into an existing Graph with the same URI - - + - Gets whether the collection contains the given Graph + Creates a new Minus join - + LHS Pattern + RHS Pattern + + + + Evaluates the Minus join by evaluating the LHS and RHS and substracting the RHS results from the LHS + + Evaluation Context - + - Gets the number of Graphs in the collection + Gets the Variables used in the Algebra - + - Disposes of the collection + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Gets the enumerator for the collection + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - - + - Gets the URIs of the Graphs in the collection + Gets the LHS of the Join - + - Removes a Graph from the collection + Gets the RHS of the Join - Graph URI - - + - Gets a Graph from the collection + Gets the string representation of the Algebra - Graph URI - + - Abstract decorator for Triple Collections to make it easier to add additional functionality to existing collections + Converts the Algebra back to a SPARQL Query + - + - Underlying Triple Collection + Converts the Minus() back to a MINUS Graph Pattern + - + - Creates a new decorator over the default + Transforms both sides of the Join using the given Optimiser + Optimser + - + - Creates a new decorator around the given triple collection + Transforms the LHS of the Join using the given Optimiser - Triple Collection + Optimser + - + - Adds a triple to the collection + Transforms the RHS of the Join using the given Optimiser - Triple + Optimser - + - Gets whether the collection contains the given Triple + Represents a Multiset of possible solutions - Triple - - + - Counts the triples in the collection + Variables contained in the Multiset - + - Deletes a triple from the collection + Dictionary of Sets in the Multiset - Triple - - + - Gets the specific instance of a Triple from the collection + Counter used to assign Set IDs - Triple - - + - Gets the object nodes + Creates a new Empty Multiset - + - Gets the predicate nodes + Creates a new Empty Mutliset that has the list of given Variables + - + - Gets the subject nodes + Creates a new Multiset from a SPARQL Result Set + Result Set - + - Disposes of the collection + Creates a new Multiset by flattening a Group Multiset + Group Multiset - + - Gets the enumerator for the collection + Determines whether a given Value is present for a given Variable in any Set in this Multiset + Variable + Value - + - Gets all the triples with the given object + Returns whether a given Variable is present in any Set in this Multiset - Object + Variable + + + + + Determines whether this Multiset is disjoint with another Multiset + + Other Multiset - + + + Adds a Set to the Multiset + + Set + + - Gets all the triples with the given predicate + Adds a Variable to the list of Variables present in this Multiset - Predicate - + Variable - + - Gets all the triples with the given predicate and object + Sets the variable ordering for the multiset - Predicate - Object - + Variable Ordering - + - Gets all the triples with the given subject + Removes a Set from the Multiset - Subject - + Set ID - + - Gets all the triples with the given subject and object + Trims the Multiset to remove Temporary Variables - Subject - Object - - + - Gets all the triples with the given subject and predicate + Trims the Multiset to remove the given Variable - Subject - Predicate - + Variable - + - Abstract decorator for Triple Stores to make it easier to add new functionality on top of existing implementations + Gets whether the Multiset is empty - + - Underlying store + Gets the number of Sets in the Multiset - + - Event Handler definitions + Gets the Variables in the Multiset - + - Event Handler definitions + Gets the Sets in the Multiset - + - Event Handler definitions + Gets the IDs of Sets in the Multiset - + - Event Handler definitions + Gets a Set from the Multiset + Set ID + - + - Event Handler definitions + Represents a Negated Property Set in the SPARQL Algebra - + - Creates a new triple store decorator that uses a default in-memory + Creates a new Negated Property Set + Path Start + Path End + Negated Properties + Whether this is a set of Inverse Negated Properties - + - Creates a new triple store decorator around the given instance + Creates a new Negated Property Set - Triple Store + Path Start + Path End + Negated Properties - + - Gets whether the store is empty + Gets the Path Start - + - Gets the Graphs of the store + Gets the Path End - + - Gets the triples of the store + Gets the Negated Properties - + - Adds a Graph to the store + Gets whether this is a set of Inverse Negated Properties - Graph - - + - Adds a Graph to the store + Evaluates the Negated Property Set - Graph - Whether to merge with an existing graph with the same URI + SPARQL Evaluation Context - + - Adds a Graph to the store from a URI + Gets the Variables used in the Algebra - Graph URI - - + - Adds a Graph to the store from a URI + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Graph URI - Whether to merge with an existing graph with the same URI - - + - Removes a Graph from the store + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Graph URI - - + - Gets whether a Graph exists in the store + Transforms the Algebra back into a SPARQL QUery - Graph URI - + - Gets a Graph from the store + Transforms the Algebra back into a Graph Pattern - Graph URI - + - Event which is raised when a graph is added + Gets the String representation of the Algebra + - + - Events which is raised when a graph is removed + Represents a Multiset when there are no possible Solutions - + - Event which is raised when a graph is changed + Joins another Multiset to this Null Mutliset + Other Multiset + + Results in this Null Multiset since Null joined to anything is Null + - + - Event which is raised when a graph is cleared + Left Joins another Multiset to this Null Mutliset + Other Multiset + Expression the join is predicate upon + + Results in this Null Multiset since Null joined to anything is Null + - + - Event which is raised when a graph is merged + Exists Joins another Multiset to this Null Mutliset + Other Multiset + Whether joinable solutions must exist in the other Multiset for joins to be made + + Results in this Null Multiset since Null joined to anything is Null + - + - Helper method for raising the Graph Added event manually + Minus Joins this Multiset to another Multiset - Graph + Other Multiset + - + - Helper method for raising the Graph Added event manually + Computes the Product of this Multiset and another Multiset - Graph Event Arguments + Other Multiset + + Results in the Other Multiset since for Product we consider this Multiset to contain a single empty Set + - + - Event Handler which handles the Graph Added event from the underlying Graph Collection and raises the Triple Store's Graph Added event + Unions this Multiset with another Multiset - Sender - Graph Event Arguments - Override this method if your Triple Store implementation wishes to take additional actions when a Graph is added to the Store + Other Multiset + + Results in the Other Multiset as this is an empty Multiset + - + - Helper method for raising the Graph Removed event manually + Returns false since the Null Multiset contains no values - Graph + Variable + Value + - + - Helper method for raising the Graph Removed event manually + Returns false since the Null Multiset contains no variables - Graph Event Arguments + Variable + - + - Event Handler which handles the Graph Removed event from the underlying Graph Collection and raises the Triple Stores's Graph Removed event + Returns true since the Null Multiset is disjoint with all Multisets - Sender - Graph Event Arguments + Other Multiset + - + - Helper method for raising the Graph Changed event manually + Adds a Set to this Multiset - Graph Event Arguments + Set + Thrown since the operation is invalid on a Null Multiset - + - Event Handler which handles the Changed event of the contained Graphs by raising the Triple Store's Graph Changed event + Adds a Variable to this Multiset - Sender - Graph Event Arguments + Variable + Thrown since the operation is invalid on a Null Multiset - + - Helper method for raising the Graph Changed event manually + Sets the variable ordering for the multiset - Graph + Variable Ordering - + - Helper method for raising the Graph Cleared event manually + Removes a Set from a Multiset - Graph Event Arguments + Set ID + Thrown since the operation is invalid on a Null Multiset - + - Event Handler which handles the Cleared event of the contained Graphs by raising the Triple Stores's Graph Cleared event + Returns true since the Null Multiset is always empty - Sender - Graph Event Arguments - + - Helper method for raising the Graph Merged event manually + Returns an empty enumerable as the Null Multiset contains no Variables - Graph Event Arguments - + - Event Handler which handles the Merged event of the contained Graphs by raising the Triple Store's Graph Merged event + Returns an empty enumerable as the Null Multiset contains no Sets - Sender - Graph Event Arguments - + - Disposes of the Triple Store + Returns an empty enumerable as the Null Multiset contains no Sets - + - Class for representing errors with RDF + Gets the Set with the given ID + Set ID + + Thrown since the Null Multiset contains no Sets - + - Creates a new RDF Exception with the given Message + Represents a part of the algebra that has been determined to not return any results in advance and so can be replaced with this operator which always returns null - Error Message + + Primarily intended for use with Algebra Optimisers which are rewriting the algebra to run against an out of memory dataset (e.g. SQL based) where it may be easily possible to determine if a triple pattern will match in advance of actually returning the matches. + - + - Creates a new RDF Exception with the given Message and Inner Exception + Creates a new Null Operator - Error Message - Inner Exception + Variables in the algebra that this null is replacing - + - Class for representing errors with Ontologies + Evaluates the Null operator which of course always returns a NullMultiset + Evaluation Context + - + - Creates a new RDF Ontology Exception with the given message + Gets the variables used in this algebra - Error message - + - Creates a new RDF Ontology Exception with the given message and inner exception + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Error message - Inner Exception - + - - The Ontology Namespace is based upon Jena's Ontology API. It allows for a more ontology-centric way of manipulating RDF graphs within the dotNetRDF API. - - - The OntologyResource is the base class of resources and allows for the retrieval and manipulation of various common properties of a resource. More specialised classes like OntologyClass and OntologyProperty are used to work with classes and properties etc. - + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Represents an Individual i.e. an instance of some class in an ontology + Throws an error since a null operator cannot be transformed back into a query - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - + - + - Gets an Individual from the Graph + Throws an error since a null operator cannot be transformed back into a query - Resource that represents the Individual - Graph the Individual is in - - Requires that an individual (a resource which is the subject of at least one triple where the predicate is rdf:type) is already present in the Graph - + - + - Gets/Creates an Individual from the Graph + Gets the string representation of the algebra - Resource that represents the Individual - Class to create/add the Individual to - Graph the Individual is in - - Allows for creating new Individuals in the Graph or adding existing resources to another Class. If the resource for the Individual or the given Class are new then they will be added to the Graph - + - + - Helper method which finds all the Types given for this Resource + Represents a One or More Path (e.g. rdf:type+) in the SPARQL Algebra - + - Gets all the Classes that this resource belongs to + Creates a new One or More Path + Path Start + Path End + Path - + - Gets whether the Individual belongs to a specific class + Evaluates the One or More Path - Class + SPARQL Evaluation Context - + - Gets whether the Individual belongs to a class identified by the given resource + Gets the String representation of the Algebra - Class - + - Gets whether the Individual belongs to a class identified by the given URI + Transforms the Algebra back into a Graph Pattern - Class URI - - - Represents the meta-information about an Ontology - - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - - - + - Creates a new Ontology for the given resource + Represents an Order By clause - Resource - Graph - + - Adds a new owl:backwardsCompatibleWith triple for this Ontology + Creates a new Order By clause - Resource - + Pattern + Ordering - + - Adds a new owl:backwardsCompatibleWith triple for this Ontology + Evaluates the Order By clause - Resource + Evaluation Context - + - Adds a new owl:backwardsCompatibleWith triple for this Ontology + Gets the Variables used in the Algebra - Resource - - + - Removes all owl:backwardsCompatibleWith triples for this Ontology + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - - + - Removes a owl:backwardsCompatibleWith triple for this Ontology + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Resource - - + - Removes a owl:backwardsCompatibleWith triple for this Ontology + Gets the Inner Algebra - Resource - - + - Removes a owl:backwardsCompatibleWith triple for this Ontology + Gets the Ordering that is used - Resource - + + If the Query supplied in the SparqlEvaluationContext is non-null and has an ORDER BY clause then that is applied rather than the ordering with which the OrderBy algebra is instantiated + - + - Adds a new owl:incompatibleWith triple for this Ontology + Gets the String representation of the Algebra - Resource - + - Adds a new owl:incompatibleWith triple for this Ontology + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a new owl:incompatibleWith triple for this Ontology + Throws an error since an OrderBy() cannot be converted back to a Graph Pattern - Resource + Thrown since an OrderBy() cannot be converted back to a Graph Pattern - + - Removes all owl:incompatibleWith triples for this Ontology + Transforms the Inner Algebra using the given Optimiser + Optimiser - + - Removes a owl:incompatibleWith triple for this Ontology + Represents a Union which will be evaluated in parallel - Resource - - + - Removes a owl:incompatibleWith triple for this Ontology + Creates a new Union - Resource - + LHS Pattern + RHS Pattern - + - Removes a owl:incompatibleWith triple for this Ontology + Evaluates the Union - Resource + - + - Adds a new owl:imports triple for this Ontology + Gets the Variables used in the Algebra - Resource - - + - Adds a new owl:imports triple for this Ontology + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Resource - - + - Adds a new owl:imports triple for this Ontology + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Resource - - + - Removes all owl:imports triples for this Ontology + Gets the LHS of the Join - - + - Removes a owl:imports triple for this Ontology + Gets the RHS of the Join - Resource - - + - Removes a owl:imports triple for this Ontology + Gets the String representation of the Algebra - Resource - + - Removes a owl:imports triple for this Ontology + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a new owl:priorVersion triple for this Ontology + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a new owl:priorVersion triple for this Ontology + Transforms both sides of the Join using the given Optimiser - Resource + Optimser - + - Adds a new owl:priorVersion triple for this Ontology + Transforms the LHS of the Join using the given Optimiser - Resource + Optimser - + - Removes all owl:priorVersion triples for this Ontology + Transforms the RHS of the Join using the given Optimiser + Optimser - + - Removes a owl:priorVersion triple for this Ontology + Represents a Join which will be evaluated in parallel - Resource - - + - Removes a owl:priorVersion triple for this Ontology + Creates a new Join - Resource - + Left Hand Side + Right Hand Side - + - Removes a owl:priorVersion triple for this Ontology + Evalutes a Join - Resource + Evaluation Context - + - Gets all the Ontologies that this Ontology is backwards compatible with + Gets the Variables used in the Algebra - + - Gets all the Ontologies that this Ontology is incompatible with + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Gets all the Ontologies that this Ontology imports + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Gets all the Ontologies that are prior versions of this Ontology + Gets the LHS of the Join - + - Class for representing a class in an Ontology + Gets the RHS of the Join - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - - + - Creates a new representation of a Class in the given Ontology Mode + Gets the String representation of the Join - Resource - Graph + - + - Adds a new sub-class for this class + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a new sub-class for this class + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a new sub-class for this class + Transforms both sides of the Join using the given Optimiser - Resource + Optimser - + - Adds a new sub-class for this class + Transforms the LHS of the Join using the given Optimiser - Class + Optimser - - This overload also adds this class as a super-class of the given class - - + - Removes all sub-classes for this class + Transforms the RHS of the Join using the given Optimiser + Optimser - + - Removes a sub-class for this class + Implementation of a multiset which is suitable for multiple threads to write to in parallel, useful for parallelizing certain operations - Resource - - + - Removes a sub-class for this class + Creates a new Partionted Multiset - Resource - + Number of partitions + Partition Size - + - Removes a sub-class for this class + Gets the next Base ID to be used - Resource - + - Removes a sub-class for this class + Does a Union of this Multiset and another Multiset - Class + Other Multiset - - This overload also removes this class from being a super-class of the given class - - + - Adds a new super-class for this class + Determines whether a given Value is present for a given Variable in any Set in this Multiset - Resource + Variable + Value - + - Adds a new super-class for this class + Returns whether a given Variable is present in any Set in this Multiset - Resource + Variable - + - Adds a new super-class for this class + Determines whether this Multiset is disjoint with another Multiset - Resource + Other Multiset - + - Adds a new super-class for this class + Adds a Set to the multiset - Class - + Set - This overload also adds this class as a sub-class of the given class + Assumes the caller has set the ID of the set appropriately and will use this to determine which partition to add to - + - Removes all super-classes + Adds a Variable to the multiset - + Variable - + - Removes a super-class for this class + Sets the variable ordering for the multiset - Resource - + Variable Ordering - + - Removes a super-class for this class + Removes a Set from the multiset - Resource - + Set ID - + - Removes a super-class for this class + Gets whether the multiset is empty - Resource - - + - Removes a super-class for this class + Gets the number of sets in the multiset - Class - - - This overload also removes this class as a sub-class of the given class - - + - Adds an equivalent class for this class + Gets the variables in the multiset - Resource - - + - Adds an equivalent class for this class + Gets the sets in the multiset - Resource - - + - Adds an equivalent class for this class + Gets the Set IDs in the mutliset - Resource - - + - Adds an equivalent class for this class + Gets a Set from the multiset - Class + - - This overload also adds this class as an equivalent class of the given class - - + - Removes all equivalent classes for this class + Removes temporary variables from all sets the multiset - - + - Removes an equivalent class for this class + Removes a specific variable from all sets in the multiset - Resource - + Variable - + - Removes an equivalent class for this class + Algebra that represents the application of a Property Function - Resource - - + - Removes an equivalent class for this class + Creates a new Property function algebra - Resource - + Inner algebra + Property Function - + - Removes an equivalent class for this class + Gets the Inner Algebra - Class - - + - Adds a new disjoint class for this class + Transforms this algebra with the given optimiser - Resource + Optimiser - + - Adds a new disjoint class for this class + Evaluates the algebra in the given context - Resource + Evaluation Context - + - Adds a new disjoint class for this class + Gets the variables used in the algebra - Resource - - + - Adds a new disjoint class for this class + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Class - - - This overload also adds this class as a disjoint class of the given class - - + - Removes all disjoint classes for this class + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - - + - Removes a disjoint class for this class + Throws an error because property functions cannot be converted back to queries - Resource - + - Removes a disjoint class for this class + Throws an error because property functions cannot be converted back to graph patterns - Resource - + - Removes a disjoint class for this class + Gets the string representation of the algebra - Resource - + - Removes a disjoint class for this class + Represents an arbitrary property path in the algebra (only used when strict algebra is generated) - Class - - - This overload also removes this class as a disjoint class of the given class - - + - Gets the sub-classes of this class (both direct and indirect) + Creates a new Property Path operator + Path Start + Path Expression + Path End - + - Gets the direct sub-classes of this class + Evaluates the Path in the given context + Evaluation Context + - + - Gets the indirect sub-classes of this class + Converts the algebra back into a Graph Pattern + - + - Gets the super-classes of this class (both direct and indirect) + Gets the string representation of the algebra + - + - Gets the direct super-classes of this class + Interface for Property Path Operators - + - Gets the indirect super-classes of this class + Gets the Path Start - + - Gets the Sibling classes of this class, if this class is the root of the ontology nothing is returned even if there are multiple root classes + Gets the Path End - + - Gets the equivalent classes of this class + Gets the Property Path - + - Gets the disjoint classes of this class + Abstract Base Class for Path Operators - + - Gets the instances (individuals) of this class + Creates a new Path Operator + Path Start + Property Path + Path End - + - Gets the properties which have this class as a domain + Gets the Path Start - + - Gets the properties which have this class as a range + Gets the Path End - + - Gets whether something is a Top Class i.e. has no super classes + Gets the Property Path - + - Gets whether something is a Bottom Class i.e. has no sub classes + Evaluates the Property Path + SPARQL Evaluation Context + - + - Gets/Creates an Individual of this class + Gets the Variables used in the Algebra - Resource identifying the individual - - + - Gets whether this Class is equal to another Class + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Object to test - - + - Represents a Graph with additional methods for extracting ontology based information from it + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - - + - Creates a new Ontology Graph + Transforms the Algebra back into a Query + - + - Gets/Creates an ontology resource in the Graph + Transforms the Algebra back into a Graph Pattern - Resource - + - Gets/Creates an ontology resource in the Graph + Gets the String representation of the Algebra - Resource - + - Gets/Creates an anonymous ontology resource in the Graph + Abstract Base Class for Arbitrary Length Path Operators - - + - Gets/Creates an ontology class in the Graph + Creates a new Arbitrary Lengh Path Operator - Class Resource - + Path Start + Path End + Property Path - + - Gets/Creates an ontology class in the Graph + Determines the starting points for Path evaluation - Class Resource - + Evaluation Context + Paths + Whether to evaluate Paths in reverse - + - Gets/Creates an anonymous ontology class in the Graph + Evaluates a setp of the Path + Context + Paths + Whether to evaluate Paths in reverse - + - Gets/Creates an ontology property in the Graph + Abstract Base Class for specialised Filters which restrict the value of a variable to some values - Property Resource - - + - Gets/Creates an ontology property in the Graph + Creates a new Variable Restriction Filter - Property Resource - + Algebra the filter applies over + Variable to restrict on + Filter to use - + - Gets an existing individual in the Graph + Evalutes the algebra for the given evaluation context - Individual Resource + Evaluation Context - + - Gets/Creates an individual in the Graph of the given class + Gets the Variable that this filter restricts the value of - Individual Resource - Class - - + - Gets an existing individual in the Graph + Gets the Variables used in the Algebra - Individual Resource - - + - Gets/Creates an individual in the Graph of the given class + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Individual Resource - Class - - + - Get all OWL classes defined in the graph + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Get all the RDFS classes defined in the graph + Gets the Filter to be used - + + + Gets the Inner Algebra + + + + + Gets the String representation of the FILTER + + + + - Gets all classes defined in the graph using the standard rdfs:Class and owl:Class types + Converts the Algebra back to a SPARQL Query + - + - Get all classes defined in the graph where anything of a specific type is considered a class + Converts the Algebra back to a Graph Pattern - Type which represents classes - Enumeration of classes + - + - Gets all RDF properties defined in the graph + Transforms the Inner Algebra using the given Optimiser + Optimiser + - + - Gets all OWL Object properties defined in the graph + Abstract Base Class for specialised Filters which restrict the value of a variable to a single value - + - Gets all OWL Data properties defined in the graph + Creates a new Single Value Restriction Filter + Algebra the filter applies over + Variable to restrict on + Value to restrict to + Filter to use - + - Gets all OWL Annotation properties defined in the graph + Gets the Value Restriction which this filter applies - + - Gets all properties defined in the graph using any of the standard OWL property types (owl:AnnotationProperty, owl:DataProperty, owl:ObjectProperty) + Applies the Filter over the results of evaluating the inner pattern + Evaluation Context + - + - Gets all properties defined in the graph using any of the standard property types (rdf:Property, owl:AnnotationProperty, owl:DataProperty, owl:ObjectProperty) + Represents a special case Filter where the Filter restricts a variable to just one value i.e. FILTER(?x = <value>) - + - Get all properties defined in the graph where anything of a specific type is considered a property + Creates a new Identity Filter - Type which represents properties - Enumeration of properties + Algebra the Filter applies over + Variable to restrict on + Expression Term - + - Static Helper class for the Ontology API + Transforms the Inner Algebra using the given Optimiser - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - + Optimiser + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Represents a special case Filter where the Filter is supposed to restrict a variable to just one value i.e. FILTER(SAMETERM(?x, <value>)) - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Creates a new Same Term Filter + Algebra the Filter applies over + Variable to restrict on + Expression Term - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Transforms the Inner Algebra using the given Optimiser + Optimiser + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Represents the Selection step of Query Evaluation + + Selection trims variables from the Multiset that are not needed in the final output. + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Creates a new Select + Inner Pattern + Whether we are selecting all variables + Variables to Select - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Does this operator select all variables? - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the Inner Algebra - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Trims the Results of evaluating the inner pattern to remove Variables which are not Result Variables + Evaluation Context + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the Variables used in the Algebra - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the SPARQL Variables used + + If the Query supplied in the SparqlEvaluationContext is non-null then it's Variables are used rather than these + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Gets the String representation of the Algebra + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Converts the Algebra back to a SPARQL Query + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Throws an error as a Select() cannot be converted back to a Graph Pattern + + Thrown since a Select() cannot be converted back to a Graph Pattern - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Transforms the Inner Algebra using the given Optimiser + Optimiser + - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Represents a Service Clause - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Creates a new Service clause with the given Endpoint Specifier and Graph Pattern + Endpoint Specifier + Graph Pattern + Whether Evaluation Errors are suppressed - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Creates a new Service clause with the given Endpoint Specifier and Graph Pattern + Endpoint Specifier + Graph Pattern - + - Constant URIs for properties exposed by OntologyResource and its derived classes + Evaluates the Service Clause by generating instance(s) of SparqlRemoteEndpoint as required and issuing the query to the remote endpoint(s) + Evaluation Context + - + - Constants for URIs for classes in Ontologies + Gets the Variables used in the Algebra - + - Constants for URIs for classes in Ontologies + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Constants for URIs for classes in Ontologies + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Constants for URIs for classes in Ontologies + Gets the Endpoint Specifier - + - Constants for URIs for classes in Ontologies + Gets the Graph Pattern - + - Constants for URIs for classes in Ontologies + Gets the String representation of the Algebra + - + - Constants for URIs for classes in Ontologies + Converts the Algebra back to a SPARQL Query + - + - Constants for URIs for classes in Ontologies + Converts the Algebra into a Graph Pattern + - + - Class for representing a property in an Ontology + Represents one possible set of values which is a solution to the query - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - - + - Creates a new Ontology Property for the given resource in the given Graph + Creates a new Set - Resource - Graph - + - Creates a new RDFS Ontology Property for the given resource in the given Graph + Creates a new Set which is the Join of the two Sets - Resource - Graph + A Set + A Set - + - Adds a new domain for the property + Creates a new Set which is a copy of an existing Set - Resource - + Set to copy - + - Adds a new domain for the property + Creates a new Set from a SPARQL Result - Resource - + Result - + - Adds a new domain for the property + Creates a new Set from a Binding Tuple - Resource - + Tuple - + - Clears all domains for the property + Retrieves the Value in this set for the given Variable - + Variable + Either a Node or a null - + - Removes a domain for the property + Adds a Value for a Variable to the Set - Resource - + Variable + Value - + - Removes a domain for the property + Removes a Value for a Variable from the Set - Resource - + Variable - + - Removes a domain for the property + Checks whether the Set contains a given Variable - Resource + Variable - + - Adds a new range for the property + Gets whether the Set is compatible with a given set based on the given variables - Resource + Set + Variables - + - Adds a new range for the property + Gets whether the Set is minus compatible with a given set based on the given variables - Resource + Set + Variables - + - Adds a new range for the property + Gets the Variables in the Set - Resource - - + - Clears all ranges for the property + Gets the Values in the Set - - + - Removes a range for the property + Joins the set to another set - Resource + Other Set - + - Removes a range for the property + Copies the Set - Resource - + - Removes a range for the property + Gets whether the Set is equal to another set - Resource + Set to compare with - + - Adds a new equivalent property for the property + Comparer for checking whether sets are distinct, check may either be using the entire set or by using only a subset of variables - Resource - - + - Adds a new equivalent property for the property + Creates a new comparer that compares across all variables - Resource - - + - Adds a new equivalent property for the property + Creates a new comparer that compare only on the specific variables - Resource - + Variables - + - Adds a new equivalent property for the property + Determines whether the given sets are equal - Property - - - This overload also adds this property as an equivalent property of the given property - + First Set + Second Set + True if sets are equal, false otherwise - + - Clears all equivalent properties for this property + Gets the hash code for a set - + Set + Hash Code - + - Removes an equivalent property for the property + Represents the Slice Operation in the SPARQL Algebra - Resource - - + - Removes an equivalent property for the property + Creates a new Slice modifier which will detect LIMIT and OFFSET from the query - Resource - + Pattern - + - Removes an equivalent property for the property + Creates a new Slice modifier which uses a specific LIMIT and OFFSET - Resource - + Pattern + Limit + Offset - + - Removes an equivalent property for the property + Evaluates the Slice by applying the appropriate LIMIT and OFFSET to the Results - Property + Evaluation Context - - This overload also removes this property as an equivalent property of the given property - - + - Adds an inverse property for the property + Gets the Variables used in the Algebra - Resource - - + - Adds an inverse property for the property + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Resource - - + - Adds an inverse property for the property + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Resource - - + - Adds an inverse property for the property + Gets the Limit in use (-1 indicates no Limit) - Property - - - This overload also adds this property as an inverse property of the given property - - + - Removes all inverse properties for this property + Gets the Offset in use (0 indicates no Offset) - - + - Removes an inverse property for the property + Gets whether the Algebra will detect the Limit and Offset to use from the provided query - Resource - - + - Removes an inverse property for the property + Gets the Inner Algebra - Resource - - + - Removes an inverse property for the property + Gets the String representation of the Algebra - Resource - + - Removes an inverse property for the property + Converts the Algebra back to a SPARQL Query - Property - - This overload also removes this property as an inverse property of the given property - - + - Adds a sub-property for the property + Throws an exception since a Slice() cannot be converted back to a Graph Pattern - Resource + Thrown since a Slice() cannot be converted to a Graph Pattern - + - Adds a sub-property for the property + Transforms the Inner Algebra using the given Optimiser - Resource + Optimiser - + - Adds a sub-property for the property + Special Algebra Construct for optimising queries of the form SELECT DISTINCT ?g WHERE {GRAPH ?g {?s ?p ?o}} - Resource - - + - Adds a sub-property for the property + Creates a new Select Distinct algebra - Property - - - This overload also adds this property as a super-property of the given property - + Graph Variable to bind Graph URIs to - + - Clears all sub-properties of this property + Evaluates the Select Distinct Graphs optimisation + Evaluation Context - + - Removes a sub-property for the property + Gets the Variables used in the Algebra - Resource - - + - Removes a sub-property for the property + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Resource - - + - Removes a sub-property for the property + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Resource - - + - Removes a sub-property for the property + Gets the Graph Variable to which Graph URIs are bound - Property - - This overload also removes this property as a super-property of the given property + If the Query supplied in the SparqlEvaluationContext is non-null then the Variable Name from the Query is used rather than this - - - Adds a super-property for the property - - Resource - - - + - Adds a super-property for the property + Gets the String representation of the Algebra - Resource - + - Adds a super-property for the property + Converts the Algebra back to a SPARQL Query - Resource - + - Adds a super-property for the property + Converts the Algebra to a Graph Pattern - Property - - This overload also adds this property as a sub-property of the given property - - + - Removes all super-properties of this property + Special Algebra Construct for optimising queries of the form ASK WHERE {?s ?p ?o} - - + - Removes a super-property for the property + Evalutes the Ask Any Triples optimisation - Resource + Evaluation Context - + - Removes a super-property for the property + Gets the Variables used in the Algebra - Resource - - + - Removes a super-property for the property + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Resource - - + - Removes a super-property for the property + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Property - - - This overload also removes this property as a sub-property of the given property - - + - Gets all the Classes which are in the properties Domain + Gets the String representation of the Algebra + - + - Gets all the Classes which are in this properties Range + Converts the Algebra back to a SPARQL Query + - + - Gets all the equivalent properties of this property + Converts the Algebra to a Graph Pattern + - + - Gets the sub-properties of this property (both direct and indirect) + Represents a sub-query as an Algebra operator (only used when strict algebra is generated) - + - Gets the direct sub-classes of this class + Creates a new subquery operator + Subquery - + - Gets the indirect sub-classes of this class + Evaluates the subquery in the given context + Evaluation Context + - + - Gets the super-properties of this property (both direct and indirect) + Gets the variables used in the subquery which are projected out of it - + - Gets the direct super-properties of this property + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - Gets the indirect super-properties of this property + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - Gets whether this is a top property i.e. has no super properties defined + Converts the algebra back into a Query + - + - Gets whether this is a btoom property i.e. has no sub properties defined + Converts the algebra back into a Subquery + - + - Gets the Sibling properties of this property, if this property is the root of the ontology nothing is returned even if there are multiple root properties + Gets the string representation of the algebra + - + - Gets all the inverse properties of this property + Represents a fixed set of solutions - + - Gets all the resources that use this property + Creates a new fixed set of solutions + Table - + - Base class for representing a resource in an Ontology + Returns the fixed set of solutions - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - - + Evaluation Context + - + - Storage of Literal Properties + Gets the variables used in the algebra - + - Storage of Resource Properties + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - + - The Node which this Resource is a wrapper around + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - + - The Graph from which this Resource originates + Throws an error as this cannot be converted back into a query + - + - Creates a new Ontology Resource for the given Resource in the given Graph + Throws an error as this cannot be converted back into a graph pattern - Resource - Graph + - + - Creates a new Ontology Resource for the given Resource in the given Graph + Gets the string representation of the algebra - Resource - Graph + - + - Gets the Resource that this Ontology Resource refers to + Represents a Zero Length Path in the SPARQL Algebra - + - Gets the Graph that this Ontology Resource is from + Creates a new Zero Length Path + Path Start + Path End + Property Path - + - Retrieves all the Triples which have the Resource as the subject and the given property URI as the predicate from the Graph and stores the values locally + Evaluates a Zero Length Path - Property URI - Whether only Literal values are acceptable + Evaluation Context + - + - Adds a new literal value for a property + Gets the String representation of the Algebra - Property URI - Literal Value - Whether the new value should be added to the Graph + - + - Adds a new literal value for a property + Transforms the Algebra back into a Graph Pattern - Property URI - Literal Value - Whether the new value should be added to the Graph + - + - Adds a new value for a property + Represents a Zero or More Path in the SPARQL Algebra - Property URI - Literal Value - Whether the new value should be added to the Graph - + - Adds a new value for a property + Creates a new Zero or More Path - Property URI - Literal Value - Whether the new value should be added to the Graph + Path Start + Path End + Property Path - + - Clears all values for a Literal Property + Evaluates a Zero or More Path - Property URI - Whether the removed values are removed from the Graph + Evaluation Context + - + - Clears all values for a Literal Property + Gets the String representation of the Algebra - Property URI - Whether the removed values are removed from the Graph + - + - Clears all values for a Resource Property + Transforms the Algebra into a Graph Pattern - Property URI - Whether the removed values are removed from the Graph + - + - Clears all values for a Resource Property + + Namespace for classes used in executing CONSTRUCT queries + - Property URI - Whether the removed values are removed from the Graph - + - Removes a literal value for a property + Context used for Constructing Triples in SPARQL Query/Update - Property URI - Value to remove - Whether the removed value is removed from the Graph - + - Removes a literal value for a property + Creates a new Construct Context - Property URI - Value to remove - Whether the removed value is removed from the Graph + Graph to construct Triples in + Set to construct from + Whether Blank Nodes bound to variables should be preserved as-is + + + Either the Set or Graph parameters may be null if required + + - + - Removes a value for a property + Creates a new Construct Context - Property URI - Value to remove - Whether the removed value is removed from the Graph + Factory to create nodes with + Set to construct from + Whether Blank Nodes bound to variables should be preserved as-is + + + Either the Set or Factory parameters may be null if required + + - + - Removes a value for a property + Gets the Set that this Context pertains to - Property URI - Value to remove - Whether the removed value is removed from the Graph - + - Adds a comment for this resource + Gets the Graph that Triples should be constructed in - Comment - - + - Adds a comment in a specific language for this resource + Gets whether Blank Nodes bound to variables should be preserved - Comment - Language - - + - Removes all comments for this resource + Creates a new Blank Node for this Context + ID + + + If the same Blank Node ID is used multiple times in this Context you will always get the same Blank Node for that ID + + - + - Removes a comment for this resource + Creates a Node for the Context - Comment + Node + + + In effect all this does is ensure that all Nodes end up in the same Graph which may occassionally not happen otherwise when Graph wrappers are involved + + - + - Removes a comment for this resource + + Namespace for classes used to define a Dataset over which SPARQL Queries and Updates evaluated using the Leviathan engine operate + - Comment - - + - Removes a comment in a specific language for this resource + Abstract Base Class for Datasets which provides implementation of Active and Default Graph management - Comment - Language - - + - Adds a new owl:differentFrom triple for the resource + Reference to the Active Graph being used for executing a SPARQL Query - Resource - - + - Adds a new owl:differentFrom triple for the resource + Default Graph for executing SPARQL Queries against - Resource - - + - Adds a new owl:differentFrom triple for the resource + Stack of Default Graph References used for executing a SPARQL Query when a Query may choose to change the Default Graph from the Dataset defined one - Resource - - - This overload also adds this resource as different from the given resource - - + - Clears all owl:differentFrom triples for the resource + Stack of Active Graph References used for executing a SPARQL Query when there are nested GRAPH Clauses - - + - Removes a owl:differentFrom triple for the resource + Creates a new Dataset - Resource - - + - Removes a owl:differentFrom triple for the resource + Creates a new Dataset with the given Union Default Graph setting - Resource - + Whether to use a Union Default Graph - + - Removes a owl:differentFrom triple for the resource + Creates a new Dataset with a fixed Default Graph and without a Union Default Graph - Resource - - - This overload also removes this resource as different from the given resource - + - + - Adds a new rdfs:isDefinedBy triple for the resource + Gets a reference to the actual IGraph that is currently treated as the default graph - Resource - - + - Adds a new rdfs:isDefinedBy triple for the resource + Sets the Default Graph for the SPARQL Query - Resource - + - + - Adds a new rdfs:isDefinedBy triple for the resource + Sets the Default Graph - Resource - + Graph URI - + - Removes all rdfs:isDefinedBy triples for the resource + Sets the Default Graph - + Graph URIs - + - Removes a rdfs:isDefinedBy triple for the resource + Sets the Active Graph for the SPARQL Query - Resource - + Active Graph - + - Removes a rdfs:isDefinedBy triple for the resource + Sets the Active Graph for the SPARQL query - Resource - + Uri of the Active Graph + + Helper function used primarily in the execution of GRAPH Clauses + - + - Removes a rdfs:isDefinedBy triple for the resource + Sets the Active Graph for the SPARQL query - Resource - + URIs of the Graphs which form the Active Graph + Helper function used primarily in the execution of GRAPH Clauses - + - Adds a label for the resource + Sets the Active Graph for the SPARQL query to be the previous Active Graph - Label - - + - Adds a label in a specific language for a resource + Sets the Default Graph for the SPARQL Query to be the previous Default Graph - Label - Language - - + - Clears all labels for a resource + Gets the Default Graph URIs - - + - Removes a specific label for a resource + Gets the Active Graph URIs - Label - - + - Removes a label for a resource + Gets whether the Default Graph is treated as being the union of all Graphs in the dataset when no Default Graph is otherwise set - Label - - + - Removes a label in a specific language for a resource + Adds a Graph to the Dataset - Label - Language - + Graph - + - Adds a new owl:sameAs triple for the resource + Removes a Graph from the Dataset - Resource - + Graph URI - + - Adds a new owl:sameAs triple for the resource + Removes a Graph from the Dataset - Resource - + Graph URI - + - Adds a new owl:sameAs triple for the resource + Gets whether a Graph with the given URI is the Dataset - Resource + Graph URI - - This overload also adds this resource as an owl:sameAs triple for the given resource - - + - Removes all owl:sameAs triples for the resource + Determines whether a given Graph exists in the Dataset + Graph URI - + - Removes a owl:sameAs triple for the resource + Gets all the Graphs in the Dataset - Resource - - + - Removes a owl:sameAs triple for the resource - - Resource - + Gets all the URIs of Graphs in the Dataset + - + - Removes a owl:sameAs triple for the resource + Gets the Graph with the given URI from the Dataset - Resource + Graph URI - This overload also removes the owl:sameAs triple for the given resource + + This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage + - + - Adds a new rdfs:seeAlso triple for the resource + Gets the given Graph from the Dataset - Resource + Graph URI - + - Adds a new rdfs:seeAlso triple for the resource + Gets the Graph with the given URI from the Dataset - Resource + Graph URI + + + Graphs returned from this method must be modifiable and the Dataset must guarantee that when it is Flushed or Disposed of that any changes to the Graph are persisted + + - + - Adds a new rdfs:seeAlso triple for the resource + Gets whether the Dataset has any Triples - Resource - - + - Removes all rdfs:seeAlso triples for the resource + Gets whether the Dataset contains a specific Triple + Triple - + - Removes a rdfs:seeAlso triple for the resource + Determines whether the Dataset contains a specific Triple - Resource + Triple to search for - + - Removes a rdfs:seeAlso triple for the resource + Gets all the Triples in the Dataset - Resource - - + - Removes a rdfs:seeAlso triple for the resource + Abstract method that concrete implementations must implement to return an enumerable of all the Triples in the Dataset - Resource - + - Adds a new rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Subject - Resource + Subject - + - Adds a new rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Subject - Resource + Subject - + - Adds a new rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Predicate - Resource + Predicate - + - Removes all rdf:type triples for the resource + Gets all the Triples in the Dataset with the given Predicate + Predicate - + - Removes a rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Object - Resource + Object - + - Removes a rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Object - Resource + Object - + - Removes a rdf:type triple for the resource + Gets all the Triples in the Dataset with the given Subject and Predicate - Resource + Subject + Predicate - + - Adds version information for the resource + Gets all the Triples in the Dataset with the given Subject and Predicate - Version Information + Subject + Predicate - + - Clears version information for the resource + Gets all the Triples in the Dataset with the given Subject and Object + Subject + Object - + - Remove version information for the resource + Gets all the Triples in the Dataset with the given Subject and Object - Version Information + Subject + Object - + - Remove version information for the resource + Gets all the Triples in the Dataset with the given Predicate and Object - Version Information + Predicate + Object - + - Gets the values for a property which is restricted to literals + Gets all the Triples in the Dataset with the given Predicate and Object - Property URI + Predicate + Object - + - Gets the values for a property which is restricted to literals + Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage - Property URI - - + - Gets the values for a property which can be any node type + Ensures that any changes to the Dataset (if any) are discarded - Property URI - - + - Gets the values for a property which can be any node type + Abstract Base Class for Immutable Datasets - Property URI - - + - Gets the Version Information for the Resource + Throws an exception since Immutable Datasets cannot be altered + Graph to add - + - Gets the Comment(s) for the Resource + Throws an exception since Immutable Datasets cannot be altered + Graph URI - + - Gets the Label(s) for the Resource + Throws an exception since Immutable Datasets cannot be altered + Graph URI - + - Gets the See Also(s) for the Resource + Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage - + - Gets the Same As('s) for the Resource + Ensures that any changes to the Dataset (if any) are discarded - + - Gets the Is Defined By(s) for the Resource + Abstract Base Class for Mutable Datasets that support Transactions + + + The Transaction implementation of dotNetRDF is based upon a MRSW concurrency model, since only one writer may be active changes are immediately pushed to the dataset and visible within the transaction and they are committed or rolled back when Flush() or Discard() are called. + + + So in practical terms it is perfectly OK for the storage to be updated during a transaction because if the transaction fails the changes will be rolled back because all changes are stored in-memory until the end of the transaction. This may not be an ideal transaction model for all scenarios so you may wish to implement your own version of transactions or code your implementations of the abstract methods accordingly to limit actual persistence to the end of a transaction. + + - + - Gets the Different From(s) for the Resource + Creates a new Transactional Dataset - + - Gets the rdf:type's for the Resource + Creates a new Transactional Dataset with the given Union Default Graph setting + Whether to use a Union Default Graph - + - Gets all the Triples from the Graph where the Resource occurs as the Subject + Creates a new Transactional Dataset with a fixed Default Graph and no Union Default Graph + Default Graph URI - + - Gets all the Triples from the Graph where the Resource occurs as the Object + Adds a Graph to the Dataset + Graph to add - + - Gets all the Triples from the Graph where the Resource occurs as the Predicate + Adds a Graph to the Dataset + Graph to add - + - Gets all the Triples where the Resource occurs in any position + Removes a Graph from the Dataset + Graph URI - + - Gets the String representation of the Resource + Gets a Graph from the Dataset + Graph URI - This is either the first label (if any are declared) or the string representation of the INode that this resource wraps + If the Graph has been modified during the active Transaction the modified version is returned rather than the original version - + - Casts a Resource into an Ontology Class + Gets a Graph from the Dataset that can be modified + Graph URI - - Anything may be cast to a regardless of whether it actually represents a class in the ontology - - + - Casts a Resource into an Ontology Property + Gets a Graph from the Dataset that can be modified transactionally + Graph URI - - Anything may be cast to a regardless of whether it actually represents a property in the ontology - - + - Casts a Resource into a Graph + Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage - Resource - - Equivalent to doing a SPARQL DESCRIBE query on this resource + Commits the Active Transaction - + - Represents a Graph with a reasoner attached + Ensures that any changes to the Dataset (if any) are discarded - - This class wraps an existing Graph and applies the given reasoner to it materialising the Triples in this Graph. The original Graph itself is not modified but can be accessed if necessary using the BaseGraph property - - - Any changes to this Graph (via Assert() and Retract()) affect this Graph - specifically the set of materialised Triples - rather than the original Graph around which this Graph is a wrapper - - - See Using the Ontology API for some informal documentation on the use of the Ontology namespace - + Rollsback the Active Transaction - - - Creates a new Reasoner Graph which is a wrapper around an existing Graph with a reasoner applied and the resulting Triples materialised - - Graph - Reasoner - - + - Creates a new Reasoner Graph which is a wrapper around an existing Graph with multiple reasoners applied and the resulting Triples materialised + Allows the derived dataset to take any post-Flush() actions required - Graph - Reasoner - + - Internal method which initialises the Graph by applying the reasoners and setting the Node and Triple collections to be union collections + Allows the derived dataset to take any post-Discard() actions required - + - Gets the Base Graph which the reasoning is based upon + Abstract Dataset wrapper implementation for datasets that can load graphs on demand - + - Class for representing errors in parsing RDF + Creates a new Demand Dataset + Underlying Dataset - + - Creates a new RDF Parse Exception with the given Message + Sees if the underlying dataset has a graph and if not tries to load it on demand - Error Message + Graph URI + - + - Creates a new RDF Parse Exception with the given Message and Inner Exception + Method to be implemented by derived classes which implements the loading of graphs on demand - Error Message - Inner Exception + Graph URI + Graph + - + - Creates a new RDF Parse Exception which contains Position Information taken from the given Token + Abstract Base class of dataset designed around out of memory datasets where you rarely wish to load data into memory but simply wish to know which graph to look in for data - Error Message - Token - + - Creates a new RDF Parse Exception which contains Position Information taken from the given Token + Creates a new Quad Dataset - Error Message - Token - Inner Exception - + - Creates a new RDF Parse Exception which contains Position Information + Creates a new Quad Dataset - Error Message - Line the error occurred on - Column Position the error occurred at - Exeception that caused this exception + Whether to make the default graph the union of all graphs - + - Creates a new RDF Parse Exception which contains Position Information + Creates a new Quad Dataset - Error Message - Line the error occurred on - Column Position the error occurred at + URI of the Default Graph - + - Creates a new RDF Parse Exception which contains Position Information + Sets the Active Graph - Error Message - Line the error occurred on - Column Position the error starts at - Column Position the error ends at - Error that caused this exception + Graph URIs - + - Creates a new RDF Parse Exception which contains Position Information + Sets the Active Graph - Error Message - Line the error occurred on - Column Position the error starts at - Column Position the error ends at + Graph URI - + - Creates a new RDF Parse Exception which contains Position Information + Sets the Default Graph - Error Message - Line the error starts on - Line the error ends on - Column Position the error starts at - Column Position the error ends at - Error that caused this exception + Graph URI - + - Creates a new RDF Parse Exception which contains Position Information + Sets the Default Graph - Error Message - Line the error starts on - Line the error ends on - Column Position the error starts at - Column Position the error ends at + Graph URIs - + - Creates a new RDF Parse Exception which contains Position Information + Resets the Active Graph - Error Message - Position Information - Error that caused this exception - + - Creates a new RDF Parse Exception which contains Position Information + Resets the Default Graph - Error Message - Position Information - + - Gets whether the Exception has any position information + Gets the Default Graph URIs - + - Gets the Start Line of the Error or -1 if no position information + Gets the Active Graph URIs - + - Gets the End Line of the Error or -1 if no position information + Gets whether this dataset uses a union default graph - + - Gets the Start Column of the Error or -1 if no position information + Gets whether the given URI represents the default graph of the dataset + Graph URI + - + - Gets the End Column of the Error or -1 if no position information + Adds a Graph to the dataset + Graph - + - Class of exceptions that may occur when doing multi-threaded parsing of RDF + Adds a Quad to the Dataset - - - Used when a process may result in multiple errors from different threads - - + Graph URI + Triple - + - Creates a new Threaded RDF Parsing Exception + Removes a Graph from the Dataset - Error Message + Graph URI - + - Adds an Exception to the list of Inner Exceptions + Removes a Quad from the Dataset - Exception + Graph URI + Triple - + - Gets the enumeration of Exceptions + Gets whether a Graph with the given URI is the Dataset + Graph URI + - + - Class for representing errors in selecting an appropriate parser to parse RDF with + Determines whether a given Graph exists in the Dataset + Graph URI + - + - Creates a new RDF Parser Selection Exception with the given Message + Gets the Graphs in the dataset - Error Message - + - Creates a new RDF Parser Selection Exception with the given Message and Inner Exception + Gets the URIs of the graphs in the dataset - Error Message - Inner Exception - + - Class for representing that a parser has been terminated by a IRdfHandler + Gets the Graph with the given URI from the Dataset + Graph URI + - Used internally to help force execution to jump back to the point where we can handle by safely discarding this exception and stop parsing + + This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage + - - - Creates a new Parsing Terminated exception - - - + - Abstract Base class for RDF parsers which can read GZipped input + Gets a Graph from the dataset - - - While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such - - + Graph URI + - + - Creates a new GZipped input parser + Gets a modifiable graph from the dataset - Underlying parser + Graph URI + - + - Loads a Graph from GZipped input + Gets whether the dataset has any triples - Graph to load into - Stream to load from - + - Loads a Graph from GZipped input + Gets whether the dataset contains a triple - Graph to load into - Reader to load from + Triple + - + - Loads a Graph from GZipped input + Gets whether a Triple exists in a specific Graph of the dataset - Graph to load into - File to load from + Graph URI + Triple + - + - Loads RDF using a RDF Handler from GZipped input + Gets all triples from the dataset - RDF Handler to use - Stream to load from - + - Loads RDF using a RDF Handler from GZipped input + Gets all the Triples for a specific graph of the dataset - RDF Handler to use - Reader to load from + Graph URI + - + - Loads RDF using a RDF Handler from GZipped input + Gets all the Triples with a given subject - RDF Handler to use - File to load from + Subject + - + - Helper method for raising warning events + Gets all the Triples with a given subject from a specific graph of the dataset - + Graph URI + Subject + - + - Warning event which is raised when non-fatal errors are encounted parsing RDF + Gets all the Triples with a given predicate + Predicate + - + - Gets the description of the parser + Gets all the Triples with a given predicate from a specific graph of the dataset + Graph URI + Predicate - + - Parser for loading GZipped NTriples + Gets all the Triples with a given object + Object + - + - Creates a new GZipped NTriples parser + Gets all the Triples with a given object from a specific graph of the dataset + Graph URI + Object + - + - Parser for loading GZipped Turtle + Gets all the Triples with a given subject and predicate + Subject + Predicate + - + - Creates a new GZipped Turtle parser + Gets all the Triples with a given subject and predicate from a specific graph of the dataset + Graph URI + Subject + Predicate + - + - Creates a new GZipped Turtle parser + Gets all the Triples with a given subject and object - Turtle Syntax + Subject + Object + - + - Parser for loading GZipped Notation 3 + Gets all the Triples with a given subject and object from a specific graph of the dataset + Graph URI + Subject + Object + - + - Creates a new GZipped Notation 3 parser + Gets all the Triples with a given predicate and object + Predicate + Object + - + - Parser for loading GZipped RDF/XML + Gets all the Triples with a given predicate and object from a specific graph of the dataset + Graph URI + Predicate + Object + - + - Creates a new GZipped RDF/XML parser + Flushes any changes to the dataset - + - Creates a new GZipped RDF/XML parser + Discards any changes to the dataset - RDF/XML parser mode - + - Parser for loading GZipped RDF/JSON + Abstract Base class for immutable quad datasets - + - Creates a new GZipped RDF/JSON parser + Throws an error as this dataset is immutable + Graph - + - Parser for loading GZipped RDFa + Throws an error as this dataset is immutable + Graph URI + Triple - + - Creates a new GZipped RDFa parser + Throws an error as this dataset is immutable + Graph URI - + - A subclass of JsonTextReader which automatically ignores all comments + Throws an error as this dataset is immutable + Graph URI + Triple - + - Reads the next non-comment Token if one is available + Throws an error as this dataset is immutable - True if a Token was read, False otherwise + Graph URI + - + - Abstract Base Class for parsers that handle GZipped input + Abstract Base class for quad datasets that support transactions - While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such + The Transaction implementation of dotNetRDF is based upon a MRSW concurrency model, since only one writer may be active changes are immediately pushed to the dataset and visible within the transaction and they are committed or rolled back when Flush() or Discard() are called. + + + So in practical terms it is perfectly OK for the storage to be updated during a transaction because if the transaction fails the changes will be rolled back because all changes are stored in-memory until the end of the transaction. This may not be an ideal transaction model for all scenarios so you may wish to implement your own version of transactions or code your implementations of the abstract methods accordingly to limit actual persistence to the end of a transaction. - + - Creates a new GZipped input parser + Creates a Transactional Quad Dataset - The underlying parser to use - + - Loads a RDF dataset from GZipped input + Creates a Transactional Quad Dataset - Triple Store to load into - File to load from + Sets whether the default graph should be the union of all graphs - + - Loads a RDF dataset from GZipped input + Creates a Transactional Quad Dataset - Triple Store to load into - Input to load from + Default Graph URI - + - Loads a RDF dataset from GZipped input + Adds a Graph to the Dataset - RDF Handler to use - File to load from + Graph to add - + - Loads a RDF dataset from GZipped input + Adds a Graph to the Dataset - RDF Handler to use - Input to load from + Graph to add - + - Warning Event raised on non-fatal errors encountered parsing + Removes a Graph from the Dataset + Graph URI - + - Helper method for raising warning events + Removes a Graph from the dataset - Warning Message + Graph URI - + - Gets the description of the parser + Gets a Graph from the dataset + Graph URI - + - Parser for loading GZipped NQuads + Gets a Graph from the Dataset that can be modified + Graph URI + - + - Creates a new GZipped NQuads Parser + Gets a Graph from the Dataset that can be modified transactionally + Graph URI + - + - Parser for loading GZipped TriG + Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + + Commits the Active Transaction + - + - Creates a new GZipped TriG Parser + Ensures that any changes to the Dataset (if any) are discarded + + Rollsback the Active Transaction + - + - Parser for loading GZipped TriX + Allows the derived dataset to take any post-Flush() actions required - + - Creates a new GZipped TriX Parser + Allows the derived dataset to take any post-Discard() actions required - + - Abstract Base class for Results parser that read GZipped input + A Graph Collection which wraps an ISparqlDataset implementation so it can be used as if it was a Graph Collection - - - While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such - - - + - Creates a new GZipped results parser + Creates a new Dataset Graph collection - Underlying parser + SPARQL Dataset - + - Loads a Result Set from GZipped input + Gets whether the Collection contains a Graph with the given URI - Result Set to load into - Input to load from + Graph URI + - + - Loads a Result Set from GZipped input + Adds a Graph to the Collection - Result Set to load into - Input to load from + Graph to add + Whether to merge the given Graph with any existing Graph with the same URI + Thrown if a Graph with the given URI already exists and the mergeIfExists is set to false - + - Loads a Result Set from GZipped input + Removes a Graph from the Collection - Result Set to load into - File to load from + URI of the Graph to removed - + - Loads a Result Set from GZipped input + Gets the number of Graphs in the Collection - Results Handler to use - Input to load from - + - Loads a Result Set from GZipped input + Gets the URIs of Graphs in the Collection - Results Handler to use - Input to load from - + - Loads a Result Set from GZipped input + Gets the Graph with the given URI - Results Handler to use - File to load from + Graph URI + - + - Gets the description of the parser + Disposes of the Graph Collection + + + + + Gets the enumeration of Graphs in this Collection - + - Helper method for raising warning events + Represents an in-memory dataset (i.e. a InMemoryQueryableStore) for querying and updating using SPARQL - Warning message - + - Event which is raised if non-fatal errors are countered with parsing results + Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage - + - Parser for GZipped SPARQL XML + Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage + Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Creates a new GZipped SPARQL XML parser + Creates a new in-memory dataset containing initially just the given graph and treating the given graph as the default graph of the dataset + Graph - + - Parser for GZipped SPARQL JSON + Creates a new In-Memory dataset + In-Memory queryable store - + - Creates a new GZipped SPARQL JSON parser + Creates a new In-Memory dataset + In-Memory queryable store + Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Parser for GZipped SPARQL CSV + Creates a new In-Memory dataset + In-Memory queryable store + Default Graph URI - + - Creates a new GZipped SPARQL CSV parser + Gets the Lock used to ensure MRSW concurrency on the dataset when available - + - Parser for GZipped SPARQL TSV + Adds a Graph to the Dataset merging it with any existing Graph with the same URI + Graph - + - Creates a new GZipped SPARQL TSV parser + Removes a Graph from the Dataset + Graph URI - + - Parser Context for Turtle parsing + Gets whether a Graph with the given URI is the Dataset + Graph URI + - + - Creates a new Turtle Parser Context with default settings + Gets all the Graphs in the Dataset - Graph to parse into - Tokeniser to use - Turtle Syntax - + - Creates a new Turtle Parser Context with custom settings + Gets all the URIs of Graphs in the Dataset - Graph to parse into - Tokeniser to use - Turtle Syntax - Tokeniser Queue Mode - + - Creates a new Turtle Parser Context with custom settings + Gets the Graph with the given URI from the Dataset - Graph to parse into - Tokeniser to use - Turtle Syntax - Whether to trace parsing - Whether to trace tokenisation + Graph URI + + + + For In-Memory datasets the Graph returned from this property is no different from the Graph returned by the GetModifiableGraphInternal() method + + - + - Creates a new Turtle Parser Context with custom settings + Gets a Modifiable wrapper around a Graph in the Dataset - Graph to parse into - Tokeniser to use - Turtle Syntax - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Graph URI + - + - Creates a new Turtle Parser Context with default settings + Gets whether the Dataset contains a specific Triple - RDF Handler - Tokeniser to use - Turtle Syntax + Triple + - + - Creates a new Turtle Parser Context with custom settings + Gets all the Triples in the underlying in-memory store - RDF Handler - Tokeniser to use - Turtle Syntax - Tokeniser Queue Mode + - + - Creates a new Turtle Parser Context with custom settings + Gets all the Triples in the Dataset with the given Subject - RDF Handler - Tokeniser to use - Turtle Syntax - Whether to trace parsing - Whether to trace tokenisation + Subject + - + - Creates a new Turtle Parser Context with custom settings + Gets all the Triples in the Dataset with the given Predicate - RDF Handler - Tokeniser to use - Turtle Syntax - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Predicate + - + - Gets the Turtle Syntax being used + Gets all the Triples in the Dataset with the given Object + Object + - + - Function for unescaping QNames + Gets all the Triples in the Dataset with the given Subject and Predicate + Subject + Predicate + - + - - Namespace for Parser Context classes, these are classes that are used internally by parsers to store their state. This allows parsers to be safely used in a multi-threaded scenario so the parsing of one Graph/Store cannot affect the parsing of another. - + Gets all the Triples in the Dataset with the given Subject and Object + Subject + Object + - + - Base Class for Parser Contexts + Gets all the Triples in the Dataset with the given Predicate and Object + Predicate + Object + - + - RDF Handler used to handle the generated RDF + If there have been changes made to the Dataset and the underlying in-memory store is a ITransactionalStore ensures the underlying store is notified to flush those changes - + - Is Parsing Traced? + An in-memory dataset that operates in terms of quads, underlying storage is identical to a InMemoryDataset though this dataset should be more performant for queries that access named graphs frequently - + - Creates a new Base Parser Context + Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage - Graph to parse into - + - Creates a new Base Parser Context + Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage - Graph to parse into - Whether to trace parsing + Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Creates a new Base Parser Context + Creates a new in-memory dataset containing initially just the given graph and treating the given graph as the default graph of the dataset + + Graph + + + + Creates a new In-Memory dataset + + In-Memory queryable store + + + + Creates a new In-Memory dataset + + In-Memory queryable store + Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset + + + + Creates a new In-Memory dataset - RDF Handler + In-Memory queryable store + Default Graph URI - + - Creates a new Base Parser Context + Gets the Lock used to ensure MRSW concurrency on the dataset when available - RDF Handler - Whether to trace parsing - + - Gets the Handler used to handle the generated RDF + Adds a Graph to the Dataset merging it with any existing Graph with the same URI + Graph - + - Gets/Sets whether to trace parsing + Removes a Graph from the Dataset + Graph URI - + - Gets the Namespace Map for the parsing context + Gets whether a Graph with the given URI is the Dataset + Graph URI + - + - Gets the Base URI for the parsing context + Gets all the Graphs in the Dataset - + - Class for Parser Contexts for Tokeniser based Parsing + Gets all the URIs of Graphs in the Dataset - + - Tokeniser + Gets the Graph with the given URI from the Dataset + Graph URI + + + + For In-Memory datasets the Graph returned from this property is no different from the Graph returned by the GetModifiableGraphInternal() method + + - + - Is Tokeniser traced? + Gets a Modifiable wrapper around a Graph in the Dataset + Graph URI + - + - Local Tokens + Adds a quad to the dataset + Graph URI + Triple - + - Creates a new Tokenising Parser Context with default settings + Gets whether the dataset contains a given Quad - Graph to parse into - Tokeniser to use + Graph URI + Triple - + - Creates a new Tokenising Parser Context with custom settings + Gets all quads for a given graph - Graph to parse into - Tokeniser to use - Tokeniser Queue Mode + Graph URI + - + - Creates a new Tokenising Parser Context with custom settings + Gets all Quads with a given object - Graph to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Graph URI + Object + - + - Creates a new Tokenising Parser Context with custom settings + Gets all Quads with a given predicate - Graph to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Graph URI + Predicate + - + - Creates a new Tokenising Parser Context with default settings + Gets all Quads with a given predicate and object - RDF Handler - Tokeniser to use + Graph URI + Predicate + Object + - + - Creates a new Tokenising Parser Context with custom settings + Gets all Quads with a given subject - RDF Handler - Tokeniser to use - Tokeniser Queue Mode + Graph URI + Subject + - + - Creates a new Tokenising Parser Context with custom settings + Gets all Quads with a given subject and object - RDF Handler - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Graph URI + Subject + Object + - + - Creates a new Tokenising Parser Context with custom settings + Gets all Quads with a given subject and predicate - RDF Handler - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Graph URI + Subject + Predicate + - + - Gets the Token Queue + Removes a quad from the dataset + Graph URI + Triple - + - Gets the Local Tokens stack + Flushes any changes to the store - + - Gets/Sets whether tokeniser tracing is used + Interfaces for Datasets that SPARQL Queries and Updates can be applied to + + + Note: For all operations that take a Graph URI a null Uri should be considered to refer to the Default Graph of the dataset + +

Default and Active Graph

+ + Leviathan expects that a Query operates over the Dataset in the following order: +
    +
  1. If an Active Graph is set then Queries operate over that
  2. +
  3. Otherwise if a Default Graph is set then Queries operate over that
  4. +
  5. Finally the Queries operate over the all triples, the notion of all triples may be dataset implementation specific i.e. may be union of all graphs, the default unnamed graph only or something else entirely
  6. +
+ Please note that the Query may change the Active and Default Graph over the course of the query depending on the Query e.g. FROM, FROM NAMED and GRAPH all can potentially change these. +
+ + You can limit your queries to use specific portions of your dataset by using the SetActiveGraph() and SetDefaultGraph() methods on your dataset instance before then passing it to the LeviathanQueryProcessor + + + Note: By default the InMemoryDataset uses the Union of all Graphs in the Dataset if no Active/Default Graph is otherwise specified. Use the UsesUnionDefaultGraph property to see whether a Dataset implementation behaves in this way. + +
- + - Base class for SPARQL Results Parser Contexts + Sets the Active Graph to be the merge of the Graphs with the given URIs + Graph URIs - + - Controls parser tracing behaviour + Sets the Active Graph to be the Graph with the given URI + Graph URI - + - Creates a new Results Parser Context + Sets the Default Graph to be the Graph with the given URI - Result Set - Whether to trace parsing + Graph URI - + - Creates a new Results Parser Context + Sets the Default Graph to be the merge of the Graphs with the given URIs - Result Set + Graph URIs - + - Creates a new Parser Context + Resets the Active Graph to the previous Active Graph - Results Handler - Whether to trace parsing - + - Creates a new Results Parser Context + Resets the Default Graph to the previous Default Graph - Results Handler - + - Gets the Results Handler to be used + Gets the enumeration of the Graph URIs that currently make up the default graph - + - Gets the Variables that have been seen + Gets the enumeration of the Graph URIs that currently make up the active graph - + - Gets/Sets whether Parser Tracing is used + Gets whether the Default Graph is treated as being the union of all Graphs in the dataset when no Default Graph is otherwise set - + - Class for Tokenising SPARQL Results Parser Contexts + Adds a Graph to the Dataset + Graph + May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable - + - Tokeniser + Removes a Graph from the Dataset + Graph URI + May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable - + - Is Tokeniser traced? + Gets whether a Graph with the given URI is the Dataset + Graph URI + - + - Local Tokens + Gets all the Graphs in the Dataset - + - Creates a new Tokenising Parser Context with default settings + Gets all the URIs of Graphs in the Dataset - Result Set to parse into - Tokeniser to use - + - Creates a new Tokenising Parser Context with custom settings + Gets the Graph with the given URI from the Dataset - Result Set to parse into - Tokeniser to use - Tokeniser Queue Mode + Graph URI + + + + This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage + + - + - Creates a new Tokenising Parser Context with custom settings + Gets the Graph with the given URI from the Dataset - Result Set to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Graph URI + + May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable + + + Graphs returned from this method must be modifiable and the Dataset must guarantee that when it is Flushed or Disposed of that any changes to the Graph are persisted + + - + - Creates a new Tokenising Parser Context with custom settings + Gets whether the Dataset has any Triples - Result Set to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new Tokenising Parser Context with default settings + Gets whether the Dataset contains a specific Triple - Results Handler - Tokeniser to use + Triple + - + - Creates a new Tokenising Parser Context with custom settings + Gets all the Triples in the Dataset - Results Handler - Tokeniser to use - Tokeniser Queue Mode + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Creates a new Tokenising Parser Context with custom settings + Gets all the Triples in the Dataset with the given Subject - Results Handler - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Subject + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Creates a new Tokenising Parser Context with custom settings + Gets all the Triples in the Dataset with the given Predicate - Results Handler - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Predicate + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Gets the Token Queue + Gets all the Triples in the Dataset with the given Object + Object + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Gets the Local Tokens stack + Gets all the Triples in the Dataset with the given Subject and Predicate + Subject + Predicate + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Gets/Sets whether tokeniser tracing is used + Gets all the Triples in the Dataset with the given Subject and Object + Subject + Object + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Base Class for Store Parser Contexts + Gets all the Triples in the Dataset with the given Predicate and Object + Predicate + Object + + + + Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + + - + - Is Parsing Traced? + Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + + + While partly intended for use in implementations which support transactions though other implementations may wish to use this to ensure that changes to the dataset are persisted properly + + - + - Creates a new Store Parser Context + Ensures that any changes to the Dataset (if any) are discarded - RDF Handler - Whether to trace parsing + + + Primarily intended for use in implementations which support transactions though other implementations may wish to use this to ensure that changes to the dataset are persisted properly + + - + - Creates a new Store Parser Context + Interface for SPARQL Datasets which also provide a Lock by which threading can be controlled - RDF Handler + + Note that there is no guarantees that consuming code will respect the fact that a Dataset is Thread Safe and use the Lock property appropriately. Additionally some datasets may choose to implement thread safety in other ways which don't rely on this interface + - + - Creates a new Base Store Parser Context + Gets the Lock used to ensure MRSW concurrency of the Dataset when used with the Leviathan SPARQL processors - Triple Store - + - Creates a new Base Parser Context + A Triple Collection which is a thin wrapper around a BaseQuadDataset to reduce much of the complexity for ISparqlDataset implementors around returning of Graphs - Triple Store - Whether to trace parsing - + - Gets/Sets whether to trace parsing + Implementation of a dataset wrapper which can load additional graphs from the web on demand - + - Gets the RDF Handler that is in-use + Creates a new Web Demand Dataset + Underlying Dataset - + - Gets the Namespace Map for the parser context + Tries to load graphs from the web + Graph URI + Graph + - + - Gets the Base URI for the parser context + An abstract dataset wrapper that can be used to wrap another dataset and just modify some functionality i.e. provides a decorator over an existing dataset - + - Class for Store Parser Contexts for Tokeniser based Parsing + Underlying Dataset - + - Tokeniser + Creates a new wrapped dataset + Dataset - + - Is Tokeniser traced? + Gets the Lock used to ensure MRSW concurrency on the dataset when available - + - Local Tokens + Gets the underlying dataset - + - Creates a new Tokenising Store Parser Context with default settings + Sets the Active Graph for the dataset - Store to parse into - Tokeniser to use + Graph URIs - + - Creates a new Tokenising Store Parser Context with custom settings + Sets the Active Graph for the dataset - Store to parse into - Tokeniser to use - Tokeniser Queue Mode + Graph URI - + - Creates a new Tokenising Store Parser Context with custom settings + Sets the Default Graph for the dataset - Store to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Graph URI - + - Creates a new Tokenising Store Parser Context with custom settings + Sets the Default Graph for the dataset - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Graph URIs - + - Creates a new Tokenising Store Parser Context with default settings + Resets the Active Graph - Store to parse into - Tokeniser to use - + - Creates a new Tokenising Store Parser Context with custom settings + Resets the Default Graph - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - + - Creates a new Tokenising Store Parser Context with custom settings + Gets the Default Graph URIs - Store to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new Tokenising Store Parser Context with custom settings + Gets the Active Graph URIs - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation - + - Gets the Token Queue + Gets whether the default graph is the union of all graphs - + - Gets the Local Tokens stack + Adds a Graph to the dataset + Graph - + - Gets/Sets whether tokeniser tracing is used + Removes a Graph from the dataset + Graph URI - + - Interface for Event Parser contexts + Gets whether the dataset contains a given Graph - Event Type + Graph URI + - + - Queue of Events + Gets the Graphs in the dataset - + - Interface for Parser Contexts + Gets the URIs of Graphs in the dataset - + - Gets the RDF Handler which is used to instantiate Nodes and to handle the generated RDF + Gets a Graph from the dataset + Graph URI + - + - Gets/Sets whether Parser Tracing should be used (if the Parser supports it) + Gets a modifiable graph from the dataset + Graph URI + - + - Gets the Namespace Map for the Handler + Gets whether the dataset has any triples - + - Gets the Base URI for the Handler + Gets whether the dataset contains a given triple + Triple + - + - Interface for Parser Contexts which use Tokeniser based parsing + Gets all triples from the dataset - + - Gets/Sets whether Tokenisation is Traced + Gets triples with a given subject + Subject + - + - Gets the Local Tokens Stack + Gets triples with a given predicate + Predicate + - + - Gets the Token Queue + Gets triples with a given object + Object + - + - Interface for SPARQL Results Parser Contexts + Gets triples with a given subject and predicate + Subject + Predicate + - + - Gets the SPARQL Results Handler to use + Gets triples with a given subject and object + Subject + Object + - + - Interface for Store Parser Contexts + Gets triples with a given predicate and object + Predicate + Object + - + - Gets the RDF Handler which is used to instantiate Nodes and to handle the generated RDF + Flushes any changes to the dataset - + - Gets/Sets whether Parser Tracing should be used (if the Parser supports it) + Discards any changes to the dataset - + - Gets the Namespace Map for the Handler + Serializes the Configuration of the Dataset + Serialization Context - + - Gets the Base URI for the Handler + + Namespace for classes which implement algorithms for executing DESCRIBE queries + - + - Parser Context for RDF/JSON Parsers + Abstract Base Class for SPARQL Describe Algorithms which provides BNode rewriting functionality - + - Creates a new JSON Parser Context + Gets the Description Graph based on the Query Results from the given Evaluation Context - Graph to parse into - JSON Text Reader to read from + SPARQL Evaluation Context + - + - Creates a new JSON Parser Context + Gets the Description Graph based on the Query Results from the given Evaluation Context passing the resulting Triples to the given RDF Handler - RDF Handler to use - JSON Text Reader to read from + RDF Handler + SPARQL Evaluation Context - + - Gets the JSON Text Reader which input is read from + Generates the Description for each of the Nodes to be described + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Gets the Current Position of the JSON Text Reader + Gets the Nodes that the algorithm should generate the descriptions for + Factory to create Nodes in + SPARQL Evaluation Context + - + - Gets the Position range from the given Start Position to the current Position + Helper method which rewrites Blank Node IDs for Describe Queries - Start Position + Triple + Mapping of IDs to new Blank Nodes + Factory to create Nodes in - + - Parser Context for Notation 3 Parsers + Computes a Concise Bounded Description for all the Values resulting from the Query + + + The Description returned is all the Triples for which a Value is a Subject and with any Blank Nodes expanded to include Triples with the Blank Node as the Subject + + - + - Creates a new Notation 3 Parser Context with default settings + Generates the Description for each of the Nodes to be described - Graph to parse into - Tokeniser to use + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Creates a new Notation 3 Parser Context with custom settings + Interface for classes that implement the DESCRIBE functionality of SPARQL - Graph to parse into - Tokeniser to use - Tokeniser Queue Mode + + + This is designed so that developers can introduce their own DESCRIBE algorithms as required + + - + - Creates a new Notation 3 Parser Context with custom settings + Generates a Graph which is the description of the resources resulting from the Query - Graph to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + SPARQL Evaluation Context + - + - Creates a new Notation 3 Parser Context with custom settings + Generates the Description Graph based on the Query Results from the given Evaluation Context passing the resulting Triples to the given RDF Handler - Graph to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + RDF Handler + SPARQL Evaluation Context - + - Creates a new Notation 3 Parser Context with default settings + Computes a Labelled Description for all the Values resulting from the Query - RDF Handler to use - Tokeniser to use + + + The Description returned is all the Triples for which a Value is a Subject and with any Blank Nodes expanded to include their rdfs:label property if present + + - + - Creates a new Notation 3 Parser Context with custom settings + Generates the Description for each of the Nodes to be described - RDF Handler to use - Tokeniser to use - Tokeniser Queue Mode + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Creates a new Notation 3 Parser Context with custom settings + Computes the merge of the Minimal Spanning Graphs for all the Values resulting from the Query - RDF Handler to use - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new Notation 3 Parser Context with custom settings + Generates the Description for each of the Nodes to be described - RDF Handler to use - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Gets/Sets whether Keywords Mode is in use + Computes a Description for all the results such that the description is the merge of all the Graphs named with a resulting URI - + - Gets the list of in-use Keywords + Generates the Description for each of the Nodes to be described + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Gets the Variable Context for Triples + Computes a Simple Subject Description for all Values resulting from the Query + + + The Description returned is all the Triples for which a Value is the Subject - this description does not expand any Blank Nodes + + - + - Pushes the current in-scope Graph onto the Graph stack and creates a new empty Graph to be the in-scope Graph + Generates the Description for each of the Nodes to be described + + RDF Handler + SPARQL Evaluation Context + Nodes to be described + + + + Computes a Simple Subject Object Description for all Values resulting from the Query - Used for Graph Literal parsing - Base Uri and Namespace Maps of the outermost Graph is propogated to the innermost Graph + + The Description returned is all the Triples for which a Value is the Subject or Object - this description does not expand any Blank Nodes + - + - Pops a Graph from the Graph stack to become the in-scope Graph + Generates the Description for each of the Nodes to be described + + RDF Handler + SPARQL Evaluation Context + Nodes to be described + + + + Computes a Symmetric Concise Bounded Description for all the Values resulting from the Query - Used for Graph Literal parsing + + The Description returned is all the Triples for which a Value is a Subject/Object and with any Blank Nodes expanded to include Triples with the Blank Node as the Subject + - + - Gets the current sub-graph (if any) + Generates the Description for each of the Nodes to be described + RDF Handler + SPARQL Evaluation Context + Nodes to be described - + - Gets whether the Context is currently for a Graph Literal + + Namespace containing all the classes related to the execution of expressions in SPARQL queries. Any valid expression should be able to be modelled and executed using these clases. + - + - Parser Context for RDFa Parsers + Namespace containing expression classes pertaining to arithmetic operations - + - Creates a new Parser Context + Class representing Arithmetic Addition expressions - Graph - XML Document - + - Creates a new Parser Context + Creates a new Addition Expression - Graph - HTML Document - Whether to Trace Parsing + Left Hand Expression + Right Hand Expression - + - Creates a new Parser Context + Calculates the Numeric Value of this Expression as evaluated for a given Binding - RDF Handler to use - HTML Document - Whether to Trace Parsing + Evaluation Context + Binding ID + - + - Creates a new Parser Context + Gets the String representation of this Expression - RDF Handler to use - HTML Document + - + - Gets the HTML Document + Gets the Type of the Expression - + - Gets/Sets whether xml:base is allowed in the embedded RDF + Gets the Functor of the Expression - + - Gets/Sets the Default Vocabularly + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets/Sets the RDFa syntax in use + Class representing Arithmetic Division expressions - + - Evaluation Context for RDFa Parsers + Creates a new Division Expression + Left Hand Expression + Right Hand Expression - + - Creates a new RDFa Evaluation Context + Calculates the Numeric Value of this Expression as evaluated for a given Binding - Base URI + Evaluation Context + Binding ID + - + - Creates a new RDFa Evaluation Context + Gets the String representation of this Expression - Base URI - Namepace Map + - + - Gets/Sets the Base URI + Gets the Type of the Expression - + - Gets/Sets the Parent Subject + Gets the Functor of the Expression - + - Gets/Sets the Parent Object + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the Namespace Map + Class representing Unary Minus expressions (sign of numeric expression is reversed) - + - Gets/Sets the Language + Creates a new Unary Minus Expression + Expression to apply the Minus operator to - + - Gets the list of incomplete Triples + Calculates the Numeric Value of this Expression as evaluated for a given Binding + Evaluation Context + Binding ID + - + - Gets/Sets the Local Vocabulary + Gets the String representation of this Expression + - + - Represents an incomplete Triple as part of the RDFa parsing process + Gets the Type of the Expression - + - Creates a new Incomplete Triple + Gets the Functor of the Expression - Predicate - Direction - + - Gets the Predicate of the Incomplete Triple + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the Direction of the Incomplete Triple + Class representing Arithmetic Multiplication expressions - + - Possible Directions for Incomplete Triples + Creates a new Multiplication Expression + Left Hand Expression + Right Hand Expression - + - Forward + Calculates the Numeric Value of this Expression as evaluated for a given Binding + Evaluation Context + Binding ID + - + - Reverse + Gets the String representation of this Expression + - + - Parser Context for RDF/XML Parser + Gets the Type of the Expression - + - Creates a new Parser Context + Gets the Functor of the Expression - Graph - XML Document - + - Creates a new Parser Context + Transforms the Expression using the given Transformer - Graph - XML Document - Whether to Trace Parsing + Expression Transformer + - + - Creates a new Parser Context + Class representing Arithmetic Subtraction expressions - RDF Handler - XML Document - + - Creates a new Parser Context + Creates a new Subtraction Expression - RDF Handler - XML Document - Whether to Trace Parsing + Left Hand Expression + Right Hand Expression - + - Creates a new Parser Context which uses Streaming parsing + Calculates the Numeric Value of this Expression as evaluated for a given Binding - Graph - Stream + Evaluation Context + Binding ID + - + - Creates a new Parser Context which uses Streaming parsing + Gets the String representation of this Expression - RDF Handler - Stream + - + - Creates a new Parser Context which uses Streaming parsing + Gets the Type of the Expression - Graph - Input - + - Creates a new Parser Context which uses Streaming parsing + Gets the Functor of the Expression - RDF Handler - Input - + - Gets the Event Queue + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the Mapping of in-use IDs + Namespace containing expression classes pertaining to comparison operations - + - SPARQL JSON Parser Context + Class representing Relational Equality expressions - + - Creates a new Parser Context + Creates a new Equality Expression - JSON Text Reader - Results Handler + Left Hand Expression + Right Hand Expression - + - Creates a new Parser Context + Evaluates the expression - JSON Text Reader - SPARQL Result Set + Evaluation Context + Binding ID + - + - Gets the JSON Text Reader + Gets the String representation of this Expression + - + - Parser Context for SPARQL Query parser + Gets the Type of the Expression - + - Creates a new SPARQL Query Parser Context with default settings + Gets the Functor of the Expression - Tokeniser to use - + - Creates a new SPARQL Query Parser Context with custom settings + Transforms the Expression using the given Transformer - Tokeniser to use - Tokeniser Queue Mode + Expression Transformer + - + - Creates a new SPARQL Query Parser Context with custom settings + Class representing Relational Greater Than Expressions - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new SPARQL Query Parser Context with custom settings + Creates a new Greater Than Relational Expression - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Left Hand Expression + Right Hand Expression - + - Creates a new SPARQL Query Parser Context for parsing sub-queries + Evaluates the expression - Parent Query Parser Context - Tokens that need parsing to form a subquery + Evaluation Context + Binding ID + - + - Creates a new Query Parser Context from the given Token Queue + Gets the String representation of this Expression - Token Queue + - + - Gets the Query that this Parser Context is populating + Gets the Type of the Expression - + - Gets/Sets whether the Query Verb has been seen + Gets the Functor of the Expression - + - Returns whether this Parser Context is for a sub-query + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets/Sets the Syntax that should be supported + Class representing Relational Greater Than or Equal To Expressions - + - Gets/Sets the default Base Uri to resolve relative URIs against + Creates a new Greater Than or Equal To Relational Expression + Left Hand Expression + Right Hand Expression - + - Gets the Expression Parser + Evaluates the expression + Evaluation Context + Binding ID + - + - Gets the Property Path Parser + Gets the String representation of this Expression + - + - Gets/Sets the current Graph Pattern ID + Gets the Type of the Expression - + - Gets a new Blank Node ID + Gets the Functor of the Expression - - + - Gets the mapping of in use Blank Nodes IDs + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the last Blank Node ID that was issued + Class representing Relational Less Than Expressions - + - Gets/Sets whether Blank Node scoping must be checked + Creates a new Less Than Relational Expression - - If false then only name tracking will be done to prevent auto-generated IDs colliding with user allocated IDs - + Left Hand Expression + Right Hand Expression - + - Gets the Next Available Alias ID for aliasing Project Expressions and Aggregates which don't have an Aggregate Specified + Evaluates the expression + Evaluation Context + Binding ID + - + - Gets the Custom Expression Factories valid for this Parser + Gets the String representation of this Expression + - + - Parser Context for SPARQL RDF Parser + Gets the Type of the Expression - + - Creates a new Parser Context + Gets the Functor of the Expression - Graph to parse from - Results Handler - + - Creates a new Parser Context + Transforms the Expression using the given Transformer - Graph to parse from - Results Handler + Expression Transformer + - + - Gets the Graph being parsed from + Class representing Relational Less Than or Equal To Expressions - + - Parser Context for SPARQL Update Parser + Creates a new Less Than or Equal To Relational Expression + Left Hand Expression + Right Hand Expression - + - Creates a new SPARQL Update Parser Context + Evaluates the expression - Tokeniser + Evaluation Context + Binding ID + - + - Creates a new SPARQL Update Parser Context with custom settings + Gets the String representation of this Expression - Tokeniser to use - Tokeniser Queue Mode + - + - Creates a new SPARQL Update Parser Context with custom settings + Gets the Type of the Expression - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new SPARQL Update Parser Context with custom settings + Gets the Functor of the Expression - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation - + - Gets the Update Command Set that is being populated + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the Expression Parser + Class representing Relational Non-Equality expressions - + - Gets the Path Parser + Creates a new Non-Equality Expression + Left Hand Expression + Right Hand Expression - + - Gets the Query Parser + Evaluates the expression + Evaluation Context + Binding ID + - + - Gets the Namespace Map + Gets the String representation of this Expression + - + - Gets/Sets the locally scoped custom expression factories + Gets the Type of the Expression - + - Gets the set of BNodes used in INSERT DATA commands so far + Gets the Functor of the Expression - + - Parser Context for SPARQL XML Results parsers + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Parser Context + Namespace containing expression classes pertaining to conditional operations - XML Reader - Results Handler - + - Creates a new Parser Context + Class representing Conditional And expressions - XML Reader - Results Set to load into - + - Gets the XML Reader + Creates a new Conditional And Expression + Left Hand Expression + Right Hand Expression - + - Parser Context class for TriG Parsers + Evaluates the expression + Evaluation Context + Binding ID + - + - Creates a new TriG Parser Context with default settings + Gets the String representation of this Expression - Store to parse into - Tokeniser to use + - + - Creates a new TrigG Parser Context with custom settings + Gets the Type of the Expression - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - + - Creates a new TriG Parser Context with custom settings + Gets the Functor of the Expression - Store to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation - + - Creates a new TriG Parser Context with custom settings + Transforms the Expression using the given Transformer - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + Expression Transformer + - + - Creates a new TriG Parser Context with default settings + Class representing logical Not Expressions - Store to parse into - Tokeniser to use - + - Creates a new TrigG Parser Context with custom settings + Creates a new Negation Expression - Store to parse into - Tokeniser to use - Tokeniser Queue Mode + Expression to Negate - + - Creates a new TriG Parser Context with custom settings + Evaluates the expression - Store to parse into - Tokeniser to use - Whether to trace parsing - Whether to trace tokenisation + Evaluation Context + Binding ID + - + - Creates a new TriG Parser Context with custom settings + Gets the String representation of this Expression - Store to parse into - Tokeniser to use - Tokeniser Queue Mode - Whether to trace parsing - Whether to trace tokenisation + - + - Gets/Sets whether the Default Graph exists + Gets the Type of the Expression - + - Gets/Sets the Syntax to be used + Gets the Functor of the Expression - + - RDF Handler which turns triples into rows in a Data Table + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants for Default Column Names + Class representing Conditional Or expressions - + - Constants for Default Column Names + Creates a new Conditional Or Expression + Left Hand Expression + Right Hand Expression - + - Constants for Default Column Names + Evaluates the expression + Evaluation Context + Binding ID + - + - Data Table into which Triples will be converted to rows + Gets the String representation of this Expression + - + - Creates a new Handler for a given Data Table with custom column names + Gets the Type of the Expression - Data Table - Subject Column Name - Predicate Column Name - Object Column Name - + - Creates a new Handler for a given Data Table using the default column names + Gets the Functor of the Expression - Data Table - + - Handles a Triple by turning it into a row in the Data Table + Transforms the Expression using the given Transformer - Triple + Expression Transformer - - To customize how a Triple is converted into a row in the table derive from this class and override this method - - + - Indicates that the Handler accepts all triples + + Namespace containing expression classes which model functions in SPARQL expressions + - + - A Results Handler which extracts URIs from one/more variables in a Result Set + Namespace containing expression classes which provide the ARQ function library - + - Creates a new List URIs Handler + Represents the ARQ afn:bnode() function - Variable to build the list from - + - Creates a new List URIs Handler + Creates a new ARQ afn:bnode() function - Variables to build the list from + Expression - + - Gets the URIs + Gets the value of the function in the given Evaluation Context for the given Binding ID + Evaluation Context + Binding ID + - + - Starts handling results + Gets the String representation of the function + - + - Handles boolean results + Gets the Type of the Expression - Result - + - Handles variable declarations + Gets the Functor of the Expression - Variable - - + - Handles results by extracting any URI values from the relevant variables + Transforms the Expression using the given Transformer - Result + Expression Transformer - + - A Results Handler which extracts Literals from one/more variables in a Result Set + Gets whether the expression can be parallelized - + - Creates a new List Strings handler + Gets the arguments of the expression - Variable to build the list from - + - Creates a new List Strings handler + Gets the variables in the expression - Variables to build the list from - + - Gets the Strings + Represents the ARQ e() function - + - Starts handling results + Evaluates the function + Context + Binding ID + - + - Handles boolean results + Gets the String representation of the function - Result + - + - Handles variable declarations + Gets the Functor of the Expression - Variable - - + - Handles results by extracting strings from relevant variables + Gets the Variables used - Result - - + - A decorator for handlers which ensures that all blank nodes get unique IDs even if a blank node identifier is reused + Gets the type of the expression - - - In most parsing scenarios this handler is not suitable for usage as it may unintentionally modify the RDF data being parsed, in non-parsing scenarios where this handler is instead being used as a means to generate RDF data from some non-RDF source it may prove very useful. - - - This handler essentially works by redirecting all calls to the argument taking form of with the non-argument form which should always generate a new blank node thus guaranteeing the uniqueness of nodes. - - - + - Creates a new Unique Blank Nodes handler + Gets the arguments of the expression - - + - Gets the inner handler + Gets whether an expression can safely be evaluated in parallel - + - Creates a Blank Node + Applies a transformer to the expressions arguments - Node ID which will be ignored by this Handler + Transformer - + - Starts handling RDF + Represents the ARQ afn:localname() function - + - Ends handling RDF + Creates a new ARQ Local Name function - Whether parsing completed OK + Expression - + - Handles a Base URI declaration + Gets the value of the function in the given Evaluation Context for the given Binding ID - Base URI + Evaluation Context + Binding ID - + - Handles a Namespace declaration + Gets the String representation of the function - Prefix - Namespace URI - + - Handles a Triple + Gets the Type of the Expression - Triple - - + - Gets whether the inner handler accepts all + Gets the Functor of the Expression - + - - Namespace for RDF and SPARQL Results Handlers - - - Handlers are a powerful low level part of the parsers API, they allow you to parse RDF, RDF Datasets and SPARQL Results in such a way that you can take arbitrary actions with the data and choose to end parsing as soon as desired. - + Transforms the Expression using the given Transformer + Expression Transformer + - + - A RDF Handler which just determines whether any Triples are present terminating parsing as soon as the first triple is received + Represents the ARQ max() function - + - Creates a new Any Handler + Creates a new ARQ max() function + First Argument + Second Argument - + - Gets whether any Triples have been parsed + Gets the numeric value of the function in the given Evaluation Context for the given Binding ID + Evaluation Context + Binding ID + - + - Starts handling RDF by resetting the Any flag to false + Gets the String representation of the function + - + - Handles Base URIs by ignoring them + Gets the Type of the Expression - Base URI - - + - Handles Namespaces by ignoring them + Gets the Functor of the Expression - Prefix - Namespace URI - - + - Handles Triples by setting the Any flag and terminating parsing + Transforms the Expression using the given Transformer - Triple + Expression Transformer - + - Gets that this handler does not accept all triples since it stops as soon as it sees the first triple + Represents the ARQ min() function - + - A RDF Handler that rewrites the Graph URIs of Triples before passing them to an inner handler + Creates a new ARQ min() function + First Argument + Second Argument - + - Creates a new Graph URI rewriting handler + Gets the numeric value of the function in the given Evaluation Context for the given Binding ID - Handler to wrap - Graph URI to rewrite to + Evaluation Context + Binding ID + - + - Gets the Inner Handler + Gets the String representation of the function + - + - Starts handling of RDF + Gets the Type of the Expression - + - Ends handling of RDF + Gets the Functor of the Expression - Whether parsing completed OK - + - Handles a Base URI declaration + Transforms the Expression using the given Transformer - Base URI + Expression Transformer - + - Handles a Namespace declaration + Represents the ARQ namespace() function - Namespace Prefix - Namespace URI - - + - Handles a Triple by rewriting the Graph URI and passing it to the inner handler + Creates a new ARQ Namespace function - Triple + Expression + + + + Gets the value of the function in the given Evaluation Context for the given Binding ID + + Evaluation Context + Binding ID - + - Returns true since this handler accepts all triples + Gets the String representation of the function + - + - Abstract Base Class for Handlers + Gets the Type of the Expression - + - Creates a new Handler + Gets the Functor of the Expression - + - Creates a new Handler using the given Node Factory + Transforms the Expression using the given Transformer - Node Factory + Expression Transformer + - + - Gets/Sets the in-use Node Factory + Represents the ARQ afn:now() function - + - Creates a Blank Node + Gets the value of the function in the given Evaluation Context for the given Binding ID - + Evaluation Context + Binding ID + + Returns a constant Literal Node which is a Date Time typed Literal + - + - Creates a Blank Node with the given ID + Gets the Type of the Expression - Node ID - - + - Creates a Graph Literal Node + Gets the Functor of the Expression - - + - Creates a Graph Literal Node with the given sub-graph + Gets whether an expression can safely be evaluated in parallel - Sub-graph - - + - Creates a Literal Node with the given Datatype + Gets the String representation of the function - Value - Datatype URI - + - Creates a Literal Node + Gets the variables in the expression - Value - - + - Creates a Literal Node with the given Language + Gets the arguments of the expression - Value - Language - - + - Creates a URI Node + Returns the expression as there are no arguments to be transformed - URI + Expression Transformer - + - Creates a Variable Node + Represents the ARQ pi() function - Variable Name - - + - Gets the next available Blank Node ID + Creates a new ARQ Pi function - - + - Abstract Base Class for RDF Handlers + Evaluates the expression + Evaluation Context + Binding ID + - + - Creates a new RDF Handler + Gets the String representation of the function + - + - Creates a new RDF Handler using the given Node Factory + Gets the Functor of the Expression - Node Factory - + - Starts the Handling of RDF + Gets the variables in the expression - + - Optionally used by derived Handlers to do additional actions on starting RDF handling + Gets whether an expression can safely be evaluated in parallel - + - Ends the Handling of RDF + Gets the type of the expression - Whether the parsing completed without error - + - Optionally used by derived Handlers to do additional actions on ending RDF handling + Gets the arguments of the expression - Whether the parsing completed without error - + - Handles Namespace declarations + Returns the expression as there are no arguments to be transformed - Prefix - Namespace URI + Expression Transformer - + - Optionally used by derived Handlers to do additional actions on handling namespace declarations + Represents the ARQ afn:sha1sum() function - Prefix - Namespace URI - - + - Handles Base URI declarations + Creates a new ARQ SHA1 Sum function - Base URI - + Expression - + - Optionally used by derived Handlers to do additional actions on handling Base URI declarations + Gets the String representation of the function - Base URI - + - Handles Triples + Gets the Functor of the Expression - Triple - - + - Must be overridden by derived handlers to take appropriate Triple handling action + Transforms the Expression using the given Transformer - Triple + Expression Transformer - + - Gets whether the Handler will accept all Triples i.e. it will never abort handling early + Represents the ARQ afn:strjoin() function which is a string concatenation function with a separator - + - Abstract Base Class for SPARQL Results Handlers + Creates a new ARQ String Join function + Separator Expression + Expressions to concatentate - + - Creates a new SPARQL Results Handler + Gets the value of the function in the given Evaluation Context for the given Binding ID - Node Factory + Evaluation Context + Binding ID + - + - Creates a new SPARQL Results Handler + Gets the Variables used in the function - + - Starts Results Handling + Gets the String representation of the function + - + - Optionally used by derived classes to take additional actions on starting Results Handling + Gets the Type of the Expression - + - Ends Results Handling + Gets the Functor of the Expression - Whether parsing completed without error - + - Optionally used by derived classes to take additional actions on ending Results Handling + Gets the Arguments of the Expression - Whether parsing completed without error - + - Handles a Boolean Results + Gets whether an expression can safely be evaluated in parallel - Result - + - Must be overridden by derived handlers to appropriately handle boolean results + Transforms the Expression using the given Transformer - Result + Expression Transformer + - + - Handles a Variable declaration + Represents the ARQ afn:substring() function which is a sub-string with Java semantics - Variable Name - - + - Must be overridden by derived handlers to appropriately handle variable declarations + Creates a new ARQ substring function - Variable Name - + Expression + Expression giving an index at which to start the substring - + - Handlers SPARQL Results + Creates a new ARQ substring function - Result - + Expression + Expression giving an index at which to start the substring + Expression giving an index at which to end the substring - + - Must be overridden by derived handlers to appropriately handler SPARQL Results + Gets the value of the function in the given Evaluation Context for the given Binding ID - Result + Evaluation Context + Binding ID - + - A RDF Handler which wraps another Handler allowing handling to be cancelled + Gets the Variables used in the function - + - Creates a new Cancellable Handler + Gets the String representation of the function - + - + - Gets the Inner Handler wrapped by this Handler + Gets the Type of the Expression - + - Starts RDF Handling on the inner Handler + Gets the Functor of the Expression - + - Ends RDF Handling on the inner Handler + Gets the Arguments of the Expression - Indicates whether parsing completed without error - + - Handles Base URIs by passing them to the inner handler and cancelling handling if it has been requested + Gets whether an expression can safely be evaluated in parallel - Base URI - - + - Handles Namespace Declarations by passing them to the inner handler and cancelling handling if it has been requested + Transforms the Expression using the given Transformer - Namespace Prefix - Namespace URI + Expression Transformer - + - Handles Triples by passing them to the inner handler and cancelling handling if it has been requested + Namespace containing expression classes which provide the Leviathan function library - Triple - - + - Gets that this Handler does not accept all Triples + Namespace containing expression classes which provide the hash functions from the Leviathan function library - + - Informs the Handler that it should cancel handling at the next point possible assuming handling has not already completed + Represents the Leviathan lfn:md5hash() function - + - Static Class of extension methods for use with Handler classes + Creates a new Leviathan MD5 Hash function + Expression - + - Gets the Base URI from the RDF Handler + Gets the String representation of the function - RDF Handler - + - Applies the triples of a Graph to an RDF Handler + Gets the Functor of the Expression - RDF Handler - Graph - + - Applies the triples to an RDF Handler + Transforms the Expression using the given Transformer - RDF Handler - Triples + Expression Transformer + - + - Applies the result set to a Results Handler + Represents the Leviathan lfn:sha256hash() function - Results Handler - Result Set - + - A SPARQL Results Handler which loads directly into a Multiset + Creates a new Leviathan SHA 256 Hash function - - Primarily intended for internal usage for future optimisation of some SPARQL evaluation - + Expression - + - Creates a new Multiset Handler + Gets the String representation of the function - Multiset + - + - Handles a Boolean Result by doing nothing + Gets the Functor of the Expression - Boolean Result - + - Handles a Variable by adding it to the Multiset + Transforms the Expression using the given Transformer - Variable + Expression Transformer - + - Handles a Result by adding it to the Multiset - - Result - + Namespace containing expression classes which provide the numeric functions from the Leviathan function library + - + - A SPARQL Results Handler which just counts Results + Namespace containing expression classes which provide the trigonometric functions from the Leviathan function library - - Note: For a Boolean Result Set the counter will either be 1 for true or 0 for false - - + - Creates a new Result Count Handler + Abstract Base Class for Unary Trigonometric Functions in the Leviathan Function Library - + - Starts Results Handling and resets the counter to zero + Trigonometric function - + - Handles a Boolean Result + Creates a new Unary Trigonometric Function - Result + Expression - + - Handles a Variable Declaration + Creates a new Unary Trigonometric Function - Variable Name - + Expression + Trigonometric Function - + - Handles a SPARQL Result by incrementing the counter + Evaluates the expression - Result + Evaluation Context + Binding ID - + - Gets the Count of Results + Gets the expression type - - For Boolean Results counter will be either 1 or 0 depending on whether the result was True/False - - + - A SPARQL Results Handler which loads Results into a SparqlResultSet + Gets the string representation of the Function + - + - Creates a new Result Set Handler + Represents the Leviathan lfn:cosec() or lfn:cosec-1 function - Result Set - + - Starts Results Handling + Creates a new Leviathan Cosecant Function + Expression - + - Handles a Boolean Result by setting the Result property of the Result Set + Creates a new Leviathan Cosecant Function - Result + Expression + Whether this should be the inverse function - + - Handles a Variable Declaration by adding the Variable to the Result Set + Gets the String representation of the function - Variable Name - + - Handles a Result by adding it to the Result Set + Gets the Functor of the Expression - Result - - + - A SPARQL Results Handler which allows you to load multiple Result Sets into a single SparqlResultSet which the standard ResultSetHandler does not permit + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Merging Result Set Handler + Represents the Leviathan lfn:cos() or lfn:cos-1 function - Result Set - + - Overrides the base classes logic to avoid the empty check on the Result Set thus allowing multiple result sets to be merged + Creates a new Leviathan Cosine Function + Expression - + - A RDF Handler which simply counts the Triples and Graphs + Creates a new Leviathan Cosine Function + Expression + Whether this should be the inverse function - + - Creates a new Store Count Handler + Gets the String representation of the function + - + - Starts RDF Handling by reseting the counters + Gets the Functor of the Expression - + - Handles Triples/Quads by counting the Triples and distinct Graph URIs + Transforms the Expression using the given Transformer - Triple + Expression Transformer - + - Gets the count of Triples + Represents the Leviathan lfn:cot() or lfn:cot-1 function - + - Gets the count of distinct Graph URIs + Creates a new Leviathan Cotangent Function + Expression - + - Gets that this Handler accepts all Triples + Creates a new Leviathan Cotangent Function + Expression + Whether this should be the inverse function - + - A Handler which passes the RDF to be handled to multiple Handlers where Handling terminates in the handling request where one of the Handlers returns false + Gets the String representation of the function - - - This differs from ChainedHandler in that even if one Handler indicates that handling should stop by returning false all the Handlers still have a chance to handle the Base URI/Namespace/Triple before handling is terminated. All Handlers will always have their StartRdf and EndRdf methods called - - + - + - Creates a new Multi Handler + Gets the Functor of the Expression - Inner Handlers for this Handler - + - Gets the Inner Handlers used by this Handler + Transforms the Expression using the given Transformer + Expression Transformer + - + - Starts RDF Handling by starting handling on all inner handlers + Represents the Leviathan lfn:degrees-to-radians() function - + - Ends RDF Handling by ending handling on all inner handlers + Creates a new Leviathan Degrees to Radians Function - Whether parsing completed without error + Expression - + - Handles Base URIs by getting all inner handlers to handle the Base URI + Evaluates the expression - Base URI + Evaluation Context + Binding ID - - Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first - - + - Handles Namespace Declarations by getting all inner handlers to handle it + Gets the String representation of the function - Namespace Prefix - Namespace URI - - Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first - - + - Handles Triples by getting all inner handlers to handler it + Gets the Functor of the Expression - Triple - - - Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first - - + - Gets whether this Handler accepts all Triples based on whether all inner handlers do so + Gets the Type of this expression - + - A Handler which passes the RDF to be handled through a sequence of Handlers where Handling is terminated as soon as any Handler returns false + Transforms the Expression using the given Transformer - - - This differs from the MultiHandler in that as soon as any Handler indicates that handling should stop by returning false handling is immediately terminated. All Handlers will always have their StartRdf and EndRdf methods called - - + Expression Transformer + - + - Creates a new Chained Handler + Represents the Leviathan lfn:radians-to-degrees() function - Inner Handlers to use - + - Gets the Inner Handlers used by this Handler + Creates a new Leviathan Radians to Degrees Function + Expression - + - Starts the Handling of RDF for each inner handler + Evaluates the expression + Evaluation Context + Binding ID + - + - Ends the Handling of RDF for each inner handler + Gets the String representation of the function - Whether parsing completed without errors + - + - Handles Base URIs by getting each inner handler to attempt to handle it + Gets the Functor of the Expression - Base URI - - - Handling terminates at the first Handler which indicates handling should stop - - + - Handles Namespaces by getting each inner handler to attempt to handle it + Gets the type of the expression - Namespace Prefix - Namespace URI - - - Handling terminates at the first Handler which indicates handling should stop - - + - Handles Triples by getting each inner handler to attempt to handle it + Transforms the Expression using the given Transformer - Triple + Expression Transformer - - Handling terminates at the first Handler which indicates handling should stop - - + - Gets that this Handler accepts all Triples if all inner handlers do so + Represents the Leviathan lfn:sec() or lfn:sec-1 function - + - A RDF Handler that ignores everything it handles + Creates a new Leviathan Secant Function - - Useful if you simply want to parse some RDF to see if it parses and don't care about the actual data being parsed - + Expression - + - Creates a new Null Handler + Creates a new Leviathan Secant Function + Expression + Whether this should be the inverse function - + - Handles a Triple by doing nothing + Gets the String representation of the function - Triple - + - Indicates that the Handler accepts all Triples + Gets the Functor of the Expression - + - A RDF Handler which simply counts the Triples + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a Handler which counts Triples + Represents the Leviathan lfn:sin() or lfn:sin-1 function - + - Resets the current count to zero + Creates a new Leviathan Sine Function + Expression - + - Handles the Triple by incrementing the Triple count + Creates a new Leviathan Sine Function - Triple - + Expression + Whether this should be the inverse function - + - Gets the Count of Triples handled in the most recent parsing operation + Gets the String representation of the function - - Note that each time you reuse the handler the count is reset to 0 - + - + - Gets that the Handler accepts all Triples + Gets the Functor of the Expression - + - A RDF Handler which asserts Triples into a Graph + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Graph Handler + Represents the Leviathan lfn:tan() or lfn:tan-1 function - Graph - + - Gets the Base URI of the Graph currently being parsed into + Creates a new Leviathan Tangent Function + Expression - + - Gets the Graph that this handler wraps + Creates a new Leviathan Tangent Function + Expression + Whether this should be the inverse function - + - Starts Handling RDF ensuring that if the target Graph is non-empty RDF is handling into a temporary Graph until parsing completes successfully + Gets the String representation of the function + - + - Ends Handling RDF discarding the handled Triples if parsing failed (indicated by false for the ok parameter) and otherwise merging the handled triples from the temporary graph into the target graph if necessary + Gets the Functor of the Expression - Indicates whether parsing completed OK - + - Handles Namespace Declarations by adding them to the Graphs Namespace Map + Transforms the Expression using the given Transformer - Namespace Prefix - Namespace URI + Expression Transformer - + - Handles Base URI Declarations by setting the Graphs Base URI + Represents the Leviathan lfn:cartesian() function - Base URI - - + - Handles Triples by asserting them in the Graph + Creates a new 2D Cartesian Function - - + Expression for X Coordinate of 1st point + Expression for Y Coordinate of 1st point + Expression for X Coordinate of 2nd point + Expression for Y Coordinate of 2nd point - + - Gets that this Handler accepts all Triples + Creates a new 3D Cartesian Function + Expression for X Coordinate of 1st point + Expression for Y Coordinate of 1st point + Expression for Z Coordiante of 1st point + Expression for X Coordinate of 2nd point + Expression for Y Coordinate of 2nd point + Expression for Z Coordinate of 2nd point - + - A RDF Handler which wraps another handler passing only the chunk of triples falling within a given limit and offset to the underlying Handler + Evaluates the expression - - This handler does not guarantee that you will receive exactly the chunk specified by the limit and offset for two reasons: -
    -
  1. It does not perform any sort of data de-duplication so it is possible that if this handler receives duplicate triples and the underlying handler performs de-duplication then you may see less triples than you expect in your final output since although the underlying handler will receive at most the specified chunk size of triples it may not retain them all
  2. -
  3. If there are fewer triples than the chunk size or if the chunk exceeds the bounds of the data then you will only receive the triples that fall within the chunk (if any)
  4. -
-
+ Evaluation Context + Binding ID +
- + - Creates a new Paging Handler + Internal helper for calculating 2D Cartesian Distance - Inner Handler to use - Limit - Offset - - If you just want to use an offset and not apply a limit then set limit to be less than zero - + Evaluation Context + Binding ID + - + - Creates a new Paging Handler + Internal helper for calculating 3D Cartesian Distance - Inner Handler to use - Limit + Evaluation Context + Binding ID + - + - Gets the Inner Handler wrapped by this Handler + Gets the Variables used in the function - + - Starts RDF Handler + Gets the String representation of the function + - + - Ends RDF Handler + Gets the Type of the Expression - Indicated whether parsing completed without error - + - Handles a Triple by passing it to the Inner Handler only if the Offset has been passed and the Limit has yet to be reached + Gets the Functor of the Expression - Triple - - - Terminates handling immediately upon the reaching of the limit - - + - Handles Namespace Declarations by allowing the inner handler to handle it + Gets the Arguments of the Expression - Namespace Prefix - Namespace URI - - + - Handles Base URI Declarations by allowing the inner handler to handle it + Gets whether an expression can safely be evaluated in parallel - Base URI - + - Gets whether the Handler will accept all Triples based on its Limit setting + Transforms the Expression using the given Transformer + Expression Transformer + - + - A RDF Handler that loads Quads into a ITripleStore instance + Represents the Leviathan lfn:cube() function - + - Creates a new Store Handler + Creates a new Leviathan Cube Function - Triple Store + Expression - + - Gets the Triple Store that this Handler is populating + Evaluates the expression + Evaluation Context + Binding ID + - + - Handles namespaces by adding them to each graph + Gets the String representation of the function - Namespace Prefix - Namespace URI - + - Handles Triples by asserting them into the appropriate Graph creating the Graph if necessary + Gets the Type of this expression - Triple - - + - Starts handling RDF + Gets the Functor of the Expression - + - Ends RDF handling and propogates all discovered namespaces to all discovered graphs + Transforms the Expression using the given Transformer - Whether parsing completed successfully + Expression Transformer + - + - Gets that the Handler accepts all Triples + Represents the Leviathan lfn:e() function - + - A RDF Handler which writes the handled Triples out to a TextWriter using a provided ITripleFormatter + Creates a new Leviathan E Function + + Expression + + + + Evaluates the expression + Evaluation Context + + - + - Creates a new Write-Through Handler + Gets the String representation of the function - Triple Formatter to use - Text Writer to write to - Whether to close the writer at the end of RDF handling + - + - Creates a new Write-Through Handler + Gets the Functor of the Expression - Triple Formatter to use - Text Writer to write to - + - Creates a new Write-Through Handler + Gets the type of the expression - Type of the formatter to create - Text Writer to write to - Whether to close the writer at the end of RDF handling - + - Creates a new Write-Through Handler + Transforms the Expression using the given Transformer - Type of the formatter to create - Text Writer to write to + Expression Transformer + - + - Starts RDF Handling instantiating a Triple Formatter if necessary + Represents the Leviathan lfn:factorial() function - + - Ends RDF Handling closing the TextWriter being used if the setting is enabled + Creates a new Leviathan Factorial Function - Indicates whether parsing completed without error + Expression - + - Handles Namespace Declarations passing them to the underlying formatter if applicable + Evaluates the expression - Namespace Prefix - Namespace URI + Evaluation Context + Binding ID - + - Handles Base URI Declarations passing them to the underlying formatter if applicable + Gets the String representation of the function - Base URI - + - Handles Triples by writing them using the underlying formatter + Gets the Functor of the Expression - Triple - - + - Gets that the Handler accepts all Triples + Gets the type of the expression - + - A Results Handler which writes the handled Results out to a TextWriter using a provided IResultFormatter + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Write-Through Handler + Represents the Leviathan lfn:log() function - Triple Formatter to use - Text Writer to write to - Whether to close the writer at the end of RDF handling - + - Creates a new Write-Through Handler + Creates a new Leviathan Log Function - Triple Formatter to use - Text Writer to write to + Expression - + - Creates a new Write-Through Handler + Creates a new Leviathan Log Function - Type of the formatter to create - Text Writer to write to - Whether to close the writer at the end of RDF handling + Expression + Log Base Expression - + - Creates a new Write-Through Handler + Evaluates the expression - Type of the formatter to create - Text Writer to write to + Evaluation Context + Binding ID + - + - Starts writing results + Gets the String representation of the function + - + - Ends the writing of results closing the TextWriter depending on the option set when this instance was instantiated + Gets the Functor of the Expression - - + - Writes a Boolean Result to the output + Gets the type of the expression - Boolean Result - + - Writes a Variable declaration to the output + Transforms the Expression using the given Transformer - Variable Name + Expression Transformer - + - Writes a Result to the output + Represents the Leviathan lfn:ln() function - SPARQL Result - - + - A RDF Handler which writes the Triples being parsed directly to a IStorageProvider in batches provided the manager supports the UpdateGraph() method + Creates a new Leviathan Natural Logarithm Function + Expression - + - Default Batch Size for writes + Evaluates the expression + Evaluation Context + Binding ID + - + - Creates a new Write to Store Handler + Gets the String representation of the function - Manager to write to - Graph URI to write Triples from the default graph to - Batch Size + - + - Creates a new Write to Store Handler + Gets the Type of this expression - Manager to write to - Graph URI to write Triples from the default graph to - + - Creates a new Write to Store Handler + Gets the Functor of the Expression - Manager to write to - Batch Size - + - Creates a new Write to Store Handler + Transforms the Expression using the given Transformer - Manager to write to + Expression Transformer + - + - Starts RDF Handling by ensuring the queue of Triples to write is empty + Represents the Leviathan lfn:pow() function - + - Ends RDF Handling by ensuring the queue of Triples to write has been processed + Creates a new Leviathan Power Function - Indicates whether parsing completed without error + First Argument + Second Argument - + - Handles Triples by queuing them for writing and enacting the writing if the Batch Size has been reached/exceeded + Evaluates the expression - Triple + Evaluation Context + Binding ID - + - Gets that the Handler accepts all Triples + Gets the String representation of the function + - + - Tokeniser for NTriples RDF Syntax + Gets the Functor of the Expression - + - Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Gets the type of the expression - Stream to read Tokens from - NTriples syntax to tokenise - + - Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Transforms the Expression using the given Transformer - Stream to read Tokens from + Expression Transformer + - + - Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Represents the Leviathan lfn:pythagoras() function - Stream to read Tokens from - + - Creates a new NTriples Tokeniser which reads Tokens from the given Input + Creates a new Leviathan Pythagorean Distance Function - Input to read Tokens from + First Argument + Second Argument - + - Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Evaluates the expression - Stream to read Tokens from - NTriples syntax to tokenise + Evaluation Context + Binding ID + - + - Creates a new NTriples Tokeniser which reads Tokens from the given Input + Gets the String representation of the function - Input to read Tokens from - NTriples syntax to tokenise + - + - Gets/Sets the NTriples syntax that should be supported + Gets the Functor of the Expression - + - Gets/Sets whether the output should be altered slightly to support NQuads parsing + Gets the type of the expression - - - This is used internally to alter how DataTypes get tokenised, normally these are just returned as a UriToken since a Literal can only occur as the Object in NTriples and so if we see a Uri after a Literal it must be it's datatype and not part of another Triple. - - - In the case of NQuads a UriToken may follow a Literal as the Context of that Triple and not its datatype so it's important to distinguish by using a DataTypeToken instead - - - + - Gets the next available Token from the Input Stream + Transforms the Expression using the given Transformer + Expression Transformer - - - Possible Escape Handling Modes for the Tokeniser - - - + - Escaping for URIs (only \u and \U escapes are valid) + Represents the Leviathan lfn:rnd() function - + - Permissive escaping for URIs (only \" is invalid) + Creates a new Leviathan Random Function - + - Escaping for Quoted Literals (every escape but \< and \' is valid) + Creates a new Leviathan Random Function + Maximum - + - Escaping for single Quoted Literals (every escape but \< and \" is valid) + Creates a new Leviathan Random Function + Minumum + Maximum - + - Escaping for Quoted Literals (every escape but \< is valid), this differs from and in that it allows both \' and \" + Evaluates the expression + Evaluation Context + Binding ID + - + - Escaping for QNames (only Unicode espaces are valid) + Gets the String representation of the function + - + - Abstract Base Class for Tokeniser which handles the Position tracking + Gets the Functor of the Expression - + - Constructor for the BaseTokeniser which takes in a TextReader that the Tokeniser will generate Tokens from + Gets the type of the expression - TextReader to generator Tokens from - + - Gets/Sets the Format that this Tokeniser is used for + Transforms the Expression using the given Transformer - The value set here will replace any instances of {0} specified in inputs to the Error() function allowing messages regarding certain syntaxes not being valid in a given format to be provided + Expression Transformer + - + - Gets the Next available Token from the Input + Represents the Leviathan lfn:reciprocal() function - - Parser Exception if a valid Token cannot be retrieved - + - Informs the Helper that you wish to start reading a new Token + Creates a new Leviathan Reciprocal Function + Expression - + - Peeks at the next Character + Evaluates the expression + Evaluation Context + Binding ID - + - Allows you to Backtrack one character (and no more) + Gets the String representation of the function + - + - Gets the value of the Output Buffer + Gets the Functor of the Expression - + - Gets the current length of the Output Buffer + Gets the type of the expression - + - Gets the Current Line in the Input Stream + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the Current Position in the Input Stream + Represents the Leviathan lfn:root() function - + - Gets the Start Line in the Input Stream of the current Token + Creates a new Leviathan Root Function + First Argument + Second Argument - + - Gets the Start Position in the Input Stream of the current Token + Evaluates the expression + Evaluation Context + Binding ID + - + - Gets the End Line in the Input Stream of the current Token + Gets the String representation of the function + - + - Gets the End Position in the Input Stream of the current Token + Gets the Type of this expression - + - Gets/Sets the Last Token Type + Gets the Functor of the Expression - + - Gets whether the Tokeniser has backtracked + Transforms the Expression using the given Transformer + Expression Transformer + - + - Consumes a single Character into the Output Buffer and increments the Position Counters + Represents the Leviathan lfn:sq() function - Thrown if the caller tries to read beyond the end of the Stream - + - Consumes a single Character into the Output Buffer and increments the Position Counters + Creates a new Leviathan Square Function - Whether EOF is allowed - True if the EOF is reached - - If is set to false then the normal behaviour is used and an error will be thrown on end of file - + Expression - + - Consumes a New Line (which may be a single \n or \r or the two characters following each other) + Evaluates this expression - Whether the New Line should be added to the Output Buffer + Evaluation Context + Binding ID + - + - Consumes a New Line (which may be a single \n or \r or the two characters following each other) + Gets the String representation of the function - Whether the New Line should be added to the Output Buffer - Whether EOF is permitted instead of a New Line + - + - Skips a single Character and increments the Position Counters + Gets the Type of this expression - Use when you are reading characters into some local buffer and not the global token buffer, used in String escaping - Thrown if the caller tries to read beyond the end of the Stream - + - Helper function which discards White Space which the Tokeniser doesn't care about and increments position counters correctly + Gets the Functor of the Expression - + - Handles the standard escapes supported in all the UTF-8 based RDF serializations + Transforms the Expression using the given Transformer + Expression Transformer + - + - Handles the complex escapes that can occur in a local name + Represents the Leviathan lfn:sqrt() function - - Unlike HandleEscapes() this only unescapes unicode escapes, other escapes are simply validated and passed through for later unescaping - - + - Determines whether a given Character can be valid as a Hex Digit + Creates a new Leviathan Square Root Function - Character to test - + Expression - + - Helper Function for generating Standardised Parser Errors + Evaluates the expression - The Error Message + Evaluation Context + Binding ID - + - Helper Function for generating Standardised Parser Errors about unexpected characters + Gets the String representation of the function - Unexpected Character - Message detailing what was expected (may be empty if no explicit expectation) - + - Helper Function for generating Standardised Parser Errors about unexpected end of input + Gets the Functor of the Expression - Message detailing what was expected (may be empty if no explicit expectation) - - + - Helper Function for generating Standardised Parser Errors about unexpected new lines + Gets the type of the expression - Message detailing what was expected (may be empty if no explicit expectation) - - + - Helper Function for generating Standardised Parser Errors about unexpected tokens + Transforms the Expression using the given Transformer - Message detailing what was expected (may be empty if no explicity expectation) - Token that was parsed + Expression Transformer - - - Tokeniser for TriG (Turtle with Named Graphs) RDF Syntax - - - + - Creates a new TriG Tokeniser which reads Tokens from the given Stream + Represents the Leviathan lfn:ten() function - Stream to read Tokens from - + - Creates a new TriG Tokeniser which reads Tokens from the given Stream using the specified syntax + Creates a new Leviathan Ten Function - Stream to read Tokens from - Syntax + Expression - + - Creates a new TriG Tokeniser which reads Tokens from the given Stream + Evaluates the expression - Stream to read Tokens from + Evaluation Context + Binding ID + - + - Creates a new TriG Tokeniser which reads Tokens from the given Stream + Gets the String representation of the function - Stream to read Tokens from - Syntax + - + - Creates a new TriG Tokeniser which reads Tokens from the given Input + Gets the Functor of the Expression - Input to read Tokens from - + - Creates a new TriG Tokeniser which reads Tokens from the given Input + Gets the type of the expression - Input to read Tokens from - Syntax - + - Gets the next available Token from the Input Stream + Transforms the Expression using the given Transformer + Expression Transformer - + - Base Implementation of IToken used by all derived tokens for ease of implementation + Namespace containing expression classes which provide the SPARQL built-in functions - + - Variables for representing the Type and Position of the Token + Namespace containing expression classes which provide the SPARQL built-in functions which have boolean results - + - Variables for representing the Type and Position of the Token + Class representing the SPARQL BOUND() function - + - Variables for representing the Type and Position of the Token + Creates a new Bound() function expression + Variable Expression - + - Variables for representing the Type and Position of the Token + Evaluates the expression + Evaluation Context + Binding ID + - + - Variables for representing the Type and Position of the Token + Gets the String representation of this Expression + - + - Variable containg the value of the Token + Gets the Type of the Expression - + - Creates a Token and fills in its Values + Gets the Functor of the Expression - Integer denoting the Tokens Type - String value that the Token represents (if any) - Line at which the Token starts - Line at which the Token ends - Column at which the Token starts - Column at which the Token ends - All the derived classes use this Constructor to fill in the basic values of a Token - + - Gets an arbitrary integer which indicates the Type of the Token + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the String Value which this Token represents (if any) + Represents an EXIST/NOT EXISTS clause used as a Function in an Expression - + - Gets the Line at which this Token Starts + Creates a new EXISTS/NOT EXISTS function + Graph Pattern + Whether this is an EXIST - + - Gets the Line at which this Token Ends + Gets the Value of this function which is a Boolean as a Literal Node + Evaluation Context + Binding ID + - + - Gets the Column at which this Token Starts + Internal method which evaluates the Graph Pattern + Evaluation Context + + We only ever need to evaluate the Graph Pattern once to get the Results + - + - Gets the Column at which this Token Ends + Gets the Variables used in this Expression - + - Gets the Length of the Tokens Value + Gets whether an expression can safely be evaluated in parallel - + - Gets a String representation of the Token Type and Value + Gets the String representation of the Expression - + - Gets a Hash Code for a Token + Gets the Type of the Expression - - + - Basic Token Queue which provides no Buffering except in the sense that it queues all possible Tokens when the InitialiseBuffer method is called + Gets the Functor of the Expression - + - Internal Queue object which this class is a wrapper around + Gets the Arguments of the Expression - + - Creates a new Token Queue + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Token Queue with the given Tokeniser + Class representing the Sparql IsBlank() function - Tokeniser - + - Removes and returns the first Token from the Queue + Creates a new IsBlank() function expression - First Token in the Queue + Expression to apply the function to - + - Adds a Token to the end of the Queue + Computes the Effective Boolean Value of this Expression as evaluated for a given Binding - Token to add + Evaluation Context + Binding ID + - + - Gets the first Token from the Queue without removing it + Gets the String representation of this Expression - First Token in the Queue + - + - Empties the Token Queue + Gets the Type of the Expression - + - Gets the number of Tokens in the Queue + Gets the Functor of the Expression - + - Initialises the Token Queue Buffer + Transforms the Expression using the given Transformer + Expression Transformer + - + - Initialises the Token Queue Buffer to the set Buffer Amount + Class representing the Sparql IsIRI() function - Amount of Tokens to Buffer - + - Gets the underlying Queue of Tokens + Creates a new IsIRI() function expression + Expression to apply the function to - + - Internal Helper Method for Tokeniser Tracing + Computes the Effective Boolean Value of this Expression as evaluated for a given Binding - + Evaluation Context + Binding ID + - + - Token Queue which is not backed by a Tokeniser + Gets the String representation of this Expression - - Designed to be explicitly populated with Tokens for when a Parser needs to be invoked on a subset of the overall Tokens - + - + - Creates a new non-Tokenised Queue + Gets the Type of the Expression - + - Removed and returns the first Token from the Queue + Gets the Functor of the Expression - - + - Gets the first Token from the Queue without removing it + Transforms the Expression using the given Transformer - First Token in the Queue + Expression Transformer + - + - Initialises the Buffer by doing nothing since there is no buffering on this Queue + Class representing the Sparql IsURI() function - + - A Buffered Queue for a Tokeniser which synchronously buffers a number of Tokens when the Queue is accessed and nothing is Buffered + Creates a new IsURI() function expression + Expression to apply the function to - + - Variable storing the Buffer Size + Gets the String representation of this Expression + - + - Creates a new Buffered Queue for the given Tokeniser + Gets the Functor of the Expression - Tokeniser to Buffer - + - Creates a new Buffered Queue + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the next Token in the Queue and removes it from the Queue + Class representing the Sparql IsLiteral() function - Token at the front of the Queue - + - Gets the next Token in the Queue without removing it from the Queue + Creates a new IsLiteral() function expression - Token at the front of the Queue + Expression to apply the function to - + - Causes the Buffer to be filled using the Default Buffering level of 10 + Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Evaluation Context + Binding ID + - + - Causes the Buffer to be filled and sets the Buffering level for the Queue + Gets the String representation of this Expression - Number of Tokens to Buffer - If a Buffer amount of less than zero is given then Buffer size will stay at default size (10) or existing size if it's previously been set + - + - Internal Helper Method which performs the Buffering + Gets the Type of the Expression - + - An Asynchronous version of BufferedTokenQueue which automatically Buffers as many Tokens as possible in a Background thread + Gets the Functor of the Expression - - Periodic instablility is fixed to the best of my knowledge, it is still recommended to use a BufferedTokenQueue or the basic TokenQueue. This implementation offers little/no performance improvement over the other types of Token Queue. - - + - Creates a new Asynchronous Buffered Queue with the given Tokeniser + Transforms the Expression using the given Transformer - Tokeniser to Buffer + Expression Transformer + - + - Creates a new Asynchronous Buffered Queue + Represents the SPARQL ISNUMERIC() Function - + - Gets the next Token in the Queue and removes it from the Queue + Creates a new SPARQL ISNUMERIC() Function - Token at the front of the Queue + Argument Expression - + - Gets the next Token in the Queue without removing it from the Queue + Evaluates the expression - Token at the front of the Queue + + + - + - Internal Helper Method which starts the Background Buffering if not already running + Gets the Type of this Expression - + - Internal Thread Method which does the Background Buffering + Gets the Functor of this Expression - + - A Class for Reading an Input Stream and generating SPARQL Tokens + Gets the String representation of this Expression + - + - Creates a new Instance of the Tokeniser + Transforms the Expression using the given Transformer - The Input Stream to generate Tokens from - Syntax Mode to use when parsing + Expression Transformer + - + - Creates a new Instance of the Tokeniser + Class representing the Sparql LangMatches() function - The Input Stream to generate Tokens from - Syntax Mode to use when parsing - + - Creates a new Instance of the Tokeniser + Creates a new LangMatches() function expression - The Input to generate Tokens from - Syntax Mode to use when parsing + Expression to obtain the Language of + Expression representing the Language Range to match - + - Gets the next parseable Token from the Input or raises an Error + Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Evaluation Context + Binding ID - Occurs when a Token cannot be parsed - + - Interface for defining Token classes to be used in Parsing RDF + Gets the String representation of this Expression + - + - Gives some Integer representing the Token Type as understood by a specific Parser implementation + Gets the Type of the Expression - + - Gives the Value of the Token + Gets the Functor of the Expression - + - Gives the Line at which the Token starts + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gives the Line at which the Token ends + Class representing the SPARQL REGEX function - + - Gives the Position within the Start Line that the Token starts + Creates a new Regex() function expression + Text to apply the Regular Expression to + Regular Expression Pattern - + - Gives the Position within the End Line that the Token ends + Creates a new Regex() function expression + Text to apply the Regular Expression to + Regular Expression Pattern + Regular Expression Options - + - Gives the Length of the Token + Configures the Options for the Regular Expression + Node detailing the Options + Whether errors should be thrown or suppressed - + - Token Queue Mode Constants + Evaluates the expression + Evaluation Context + Binding ID + - + - No Buffering used + Gets the String representation of this Expression + - + - Synchronous Buffering used + Gets the enumeration of Variables involved in this Expression - + - Asynchronous Buffering used + Gets the Type of the Expression - + - Interface for Tokenisers + Gets the Functor of the Expression - - A Tokeniser is a class that takes an input stream and produces textual tokens from it for use in token based parsers - - + - Causes the Tokeniser to attempt to retrieve the next Token + Gets the Arguments of the Expression - - Thrown if a valid Token cannot be parsed - Thrown if there is a problem reading the Input Stream - + - Interface for implementing Token Queues which provide Bufferable wrappers to Tokenisers + Gets whether an expression can safely be evaluated in parallel - + - Removes the first Token from the Queue + Transforms the Expression using the given Transformer + Expression Transformer - + - Adds a Token to the end of the Queue + Class representing the Sparql SameTerm() function - Token to add - + - Gets the first Token from the Queue without removing it + Creates a new SameTerm() function expression - + First Term + Second Term - + - Tokeniser that this is a Queue for + Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Evaluation Context + Binding ID + - + - Clears the Token Queue + Gets the String representation of this Expression + - + - Gets the number of Tokens in the Queue + Gets the Type of the Expression - + - Initialises the Buffer + Gets the Functor of the Expression - + - Initialises the Buffer and sets the Buffering Level + Transforms the Expression using the given Transformer - Buffering Amount + Expression Transformer + - + - Gets the underlying Queue of Tokens + Namespace containing expression classes which provide the SPARQL built-in functions which construct new terms - + - Gets/Sets whether Tokeniser Tracing should be used + Class representing the SPARQL BNODE() function - + - Gets the Token Type of the last Token dequeued + Creates a new BNode Function - + - Abstract base implementation of a Token Queue + Creates a new BNode Function + Argument Expression - + - Tokeniser used to fill the Token Queue + Gets the value of the expression as evaluated in a given Context for a given Binding + Evaluation Context + Binding ID + - + - Variable indicating whether Tokeniser Tracing is enabled + Gets the Type of the Expression - + - Type of Last Token dequeued + Gets the Functor of the Expression - + - Abstract Definition of Interface Method + Gets the Variables used in the Expression - - + - Abstract Definition of Interface Method + Gets the Arguments of the Expression - + - Abstract Definition of Interface Method + Gets whether the expression can be parallelised - + - Sets the Tokeniser used by the Queue + Gets the String representation of the Expression - Setting the Tokeniser causes the Queue to clear itself + - + - Abstract Definition of Interface Method + Transforms the Expression using the given Transformer + Expression Transformer + - + - Abstract Definition of Interface Property + Class representing the SPARQL IRI() function - + - Abstract Definition of Interface Method + Creates a new IRI() function expression + Expression to apply the function to - + - Abstract Definition of Interface Method + Returns the value of the Expression as evaluated for a given Binding as a Literal Node - Buffering Amount + Evaluation Context + Binding ID + - + - Abstract Definition of Interface Property + Gets the String representation of the function + - + - Gets/Sets Tracing for the Token Queue + Gets the Type of the Expression - + - Gets the Token Type of the last Token dequeued + Gets the Functor of the Expression - + - - Namespace for Token classes which are used to support Token Based parsing of RDF syntaxes - + Transforms the Expression using the given Transformer + Expression Transformer + - + - A Class for Reading an Input Stream and generating Notation 3 Tokens from it + Class representing the Sparql StrDt() function - + - Pattern for Valid QNames that use only the Latin Alphabet + Creates a new STRDT() function expression + String Expression + Datatype Expression - + - Patter for Valid Variable Names + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new Instance of the Tokeniser + Gets the String representation of this Expression - The Input Stream to generate Tokens from + - + - Creates a new Instance of the Tokeniser + Gets the Type of the Expression - The Input Stream to generate Tokens from - + - Creates a new Instance of the Tokeniser + Gets the Functor of the Expression - The Input to generate Tokens from - + - Gets the next parseable Token from the Input or raises an Error + Transforms the Expression using the given Transformer + Expression Transformer - Occurs when a Token cannot be parsed - + - Internal Helper method which attempts to get a Comment Token + Class representing the Sparql StrDt() function - - + - Determines whether a given Token represents an RDF Term or part thereof + Creates a new STRLANG() function expression - Token Type to test - + String Expression + Language Expression - + - A Class for Reading an Input Stream and generating Turtle Tokens from it + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new Turtle Tokeniser + Gets the String representation of this Expression - The Input Stream to generate Tokens from + - + - Creates a new Turtle Tokeniser + Gets the Type of the Expression - The Input Stream to generate Tokens from - + - Creates a new Turtle Tokeniser + Gets the Functor of the Expression - Input to read from - + - Creates a new Turtle Tokeniser + Transforms the Expression using the given Transformer - The Input Stream to generate Tokens from - Turtle Syntax + Expression Transformer + - + - Creates a new Turtle Tokeniser + Namespace containing expression classes which provide the SPARQL built-in functions pertaining to date times - The Input Stream to generate Tokens from - Turtle Syntax - + - Creates a new Turtle Tokeniser + Represents the SPARQL DAY() Function - Input to read from - Turtle Syntax - + - Gets the next parseable Token from the Input or raises an Error + Creates a new SPARQL DAY() Function - - Occurs when a Token cannot be parsed + Argument Expression - + - Internal Helper method which attempts to get a Directive Token + Gets the Functor of this Expression - - + - Internal Helper method which attempts to get a Prefix Token + Gets the String representation of this Expression - + - Internal Helper method which attempts to get a QName Token + Transforms the Expression using the given Transformer + Expression Transformer - In fact this function may return a number of Tokens depending on the characters it finds. It may find a QName, Plain Literal, Blank Node QName (with ID) or Keyword. QName & Keyword Validation is carried out by this function - + - Internal Helper method which attempts to get a Language Specifier Token + Represents the SPARQL HOURS() Function - - + - Internal Helper method which attempts to get a Date Type Token + Creates a new SPARQL HOURS() Function - + Argument Expression - + - Internal Helper method which attempts to get a Comment Token + Gets the Functor of this Expression - - + - Token which represents the Start of the Input + Gets the String representation of this Expression + - + - Creates a new Beginning of File Token + Transforms the Expression using the given Transformer + Expression Transformer + - + - Token which represents the End of the Input + Represents the SPARQL MINUTES() Function - + - Creates a new End of File Token + Creates a new SPARQL MINUTES() Function - Line at which the File Ends - Column as which the File Ends + Argument Expression - + - Token which represents the End of a Line + Gets the Functor of this Expression - + - Creates a new End of Line Token + Gets the String representation of this Expression - Line - Column at which the line ends + - + - Token which represents the @ Character + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new @ Token + Represents the SPARQL MONTH() Function - Line at which the @ occurs - Column at which the @ occurs - + - Token which represents the . Character + Creates a new SPARQL YEAR() Function + Argument Expression - + - Creates a new . Token + Gets the Functor of this Expression - Line at which the . occurs - Column at which the . occurs - + - Token which represents the ; Character + Gets the String representation of this Expression + - + - Creates a new ; Token + Transforms the Expression using the given Transformer - Line at which the ; occurs - Column at which the ; occurs + Expression Transformer + - + - Token which represents the , Character + Represents the SPARQL NOW() Function - + - Creates a new , Token + Gets the Functor of this Expression - Line at which the , occurs - Column at which the , occurs - + - Tokens which represents the tab character + Gets the String representation of this Expression + - + - Creates a new Tab Token + Represents the SPARQL SECONDS() Function - Line at which the tab occurs - Column at which the tab occurs - + - Token which represents the # Character + Creates a new SPARQL SECONDS() Function + Argument Expression - + - Creates a new # Token + Gets the Functor of this Expression - Line at which the # occurs - Column at which the # occurs - + - Token which repreents the _ Character + Gets the String representation of this Expression + - + - Creates a new _ Token + Transforms the Expression using the given Transformer - Line at which the _ occurs - Column at which the _ occurs + Expression Transformer + - + - Token which represents the ^^ sequence used for Data Type specification in some RDF Syntaxes + Represents the SPARQL TIMEZONE() Function - + - Creates a new ^^Token + Creates a new SPARQL TIMEZONE() Function - Line at which the ^^ occurs - Column at which the ^^ occurs + Argument Expression - + - Token which represents the ^ Character used for Reverse Path Traversal in somme RDF Syntaxes + Gets the Timezone of the Argument Expression as evaluated for the given Binding in the given Context + Evaluation Context + Binding ID + - + - Creates a new ^ Token + Gets the Functor of this Expression - Line at which the ^ occurs - Column at which the ^ occurs - + - Token which represents the ! Character used for Forward Path Traversal in some RDF Syntaxes + Gets the String representation of this Expression + - + - Creates a new ! Token + Transforms the Expression using the given Transformer - Line at which the ! occurs - Column at which the ! occurs + Expression Transformer + - + - Token which represents Comments + Represents the SPARQL TZ() Function - + - Creates a new Comment Token + Creates a new SPARQL TZ() Function - The Comment - Line on which the Comment occurs - Column at which the Comment starts - Column at which the Comment ends + Argument Expression - + - Token which represents the [ Character + Gets the Timezone of the Argument Expression as evaluated for the given Binding in the given Context + Evaluation Context + Binding ID + - + - Creates a new [ Token + Gets the Type of this Expression - Line at which the [ occurs - Column at which the [ occurs - + - Token which represents the ] Character + Gets the Functor of this Expression - + - Creates a new ] Token + Gets the String representation of this Expression - Line at which the ] occurs - Column at which the ] occurs + - + - Token which represents the ( Character + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new ( Token + Represents the SPARQL YEAR() Function - Line at which the ( occurs - Column at which the ( occurs - + - Token which represents the ) Character + Creates a new SPARQL YEAR() Function + Argument Expression - + - Creates a new ) Token + Gets the Functor of this Expression - Line at which the ) occurs - Column at which the ) occurs - + - Token which represents the { Character + Gets the String representation of this Expression + - + - Creates a new { Token + Transforms the Expression using the given Transformer - Line at which the { occurs - Column at which the { occurs + Expression Transformer + - + - Token which represents the } Character + Namespace containing expression classes which provide the SPARQL built-in functions pertaining to hash algorithms - + - Creates a new } Token + Abstract base class for Hash Functions - Line at which the } occurs - Column at which the } occurs - + - Token which reprsents the := Assignment Operator + Creates a new Hash function + Expression + Hash Algorithm to use - + - Creates a new := Token + Gets the value of the function in the given Evaluation Context for the given Binding ID - Line on which the := occurs - Position at which the := occurs + Evaluation Context + Binding ID + - + - Token which represents the ? Character + Computes Hashes + Input String + - + - Creates a new ? Token + Gets the String representation of the function - Line at which the ? occurs - Column at which the ? occurs + - + - Token which represents the | Character + Gets the Type of the Expression - + - Creates a new | Token + Represents the SPARQL MD5() Function - Line at which the | occurs - Column at which the | occurs - + - Token which represents a Prefix Directive + Creates a new MD5() Function + Argument Expression - + - Creates a new Prefix Direction Token + Gets the Functor of the Expression - Line at which the Prefix Directive occurs - Column at which the Prefix Directive occurs - + - Token which represents the Prefix specified after a Prefix Directive + Gets the String representation of the Expression + - + - Creates a new Prefix Token + Transforms the Expression using the given Transformer - Prefix - Line at which the Prefix occurs - Column at which the Prefix starts - Column at which the Prefix ends + Expression Transformer + - + - Token which represents a Base Directive + Represents the SPARQL SHA1() Function - + - Creates a new Base Directive Token + Creates a new SHA1() Function - Line at which the Base Directive occurs - Column at which the Base Directive occurs + Argument Expression - + - Token which represents a Keyword Directive + Gets the Functor of the Expression - + - Creates a new Keyword Directive Token + Gets the String representation of the Expression - Line at which the Keyword Directive occurs - Column at which the Keyword Directive occurs + - + - Token which represents a For All Quantifier + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new For All Quantifier Token + Represents the SPARQL SHA256() Function - Line at which the For All Quantifier occurs - Column at which the For All Quantifier occurs - + - Token which represents a For Some Quantifier + Creates a new SHA256() Function + Argument Expression - + - Creates a new For Some Quantifier Token + Gets the Functor of the Expression - Line at which the For Some Quantifier occurs - Column at which the For Some Quantifier occurs - + - Token which represents URIs + Gets the String representation of the Expression + - + - Creates a new Uri Token + Transforms the Expression using the given Transformer - Value of the Uri including the < > deliminators - Line the Uri occurs on - Column the Uri starts at - Column the Uri ends at + Expression Transformer + - + - Token which represents QNames + Represents the SPARQL SHA384() Function - + - Creates a new QName Token + Creates a new SHA384() Function - QName - Line the QName occurs on - Column the QName starts at - Column the QName ends at + Argument Expression - + - Token which represents Plain (Unquoted) Literals + Gets the Functor of the Expression - + - Creates a new Plain Literal Token + Gets the String representation of the Expression - Literal Value - Line the Literal occurs on - Column the Literal starts at - Column the Literal ends at + - + - Token which represents Literals + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Literal Token + Represents the SPARQL SHA512() Function - Literal Value including the Quote deliminators - Line the Literal occurs on - Column the Literal starts at - Column the Literal ends at - + - Creates a new Literal Token + Creates a new SHA512() Function - Literal Value including the Quote deliminators - Line the Literal starts on - Line the Literal ends on - Column the Literal starts at - Column the Literal ends at - - Most syntaxes use different deliminators for multiline literals and will usually use a LongLiteralToken instead but some formats like CSV only use quotes for multiline literals and use no delimitors for single line literals - + Argument Expression - + - Token which represents Long Literals (allows multi-line values) + Gets the Functor of the Expression - + - Creates a new Long Literal Token + Gets the String representation of the Expression - Literal Value including the Triple Quote deliminators - Line the Long Literal starts on - Line the Long Literal ends on - Column the Literal starts at - Column the Literal ends at + - + - Token which represents the Language Specifier for a Literal + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Language Specifier Token + Namespace containing expression classes which provide the SPARQL built-in numeric functions - Language Specifier - Line the Literal occurs on - Column the Literal starts at - Column the Literal ends at - + - Token which represents the Data Type for a Literal + Represents the SPARQL ABS() Function - + - Creates a new DataType Token + Creates a new SPARQL ABS() Function - DataType Uri including the < > deliminators or a QName - Line the DataType occurs on - Column the DataType starts at - Column the DataType ends at + Argument Expression - + - Token which represents Literals with Language Specifiers + Gets the Functor of this Expression - + - Creates a new Literal with Language Specifier Token + Gets the String representation of this Expression - Literal Token - Language Specifier Token + - + - The Language Specifier for this Literal + Transforms the Expression using the given Transformer + Expression Transformer + - + - Token which represents Literals with Data Types + Represents the SPARQL CEIL() Function - + - Creates a new Literal with DataType Token + Creates a new SPARQL CEIL() Function - Literal Token - DataType Token + Argument Expression - + - The Data Type Uri/QName for this Literal + Gets the Functor of this Expression - + - Token which represents Graph Literals + Gets the String representation of this Expression + - + - Creates a new Graph Literal Token + Transforms the Expression using the given Transformer - Value of the Graph Literal - Line the Graph Literal starts on - Line the Graph Literal ends on - Column the Graph Literal starts at - Column the Graph Literal ends at + Expression Transformer + - + - Token which represents anonymous Blank Nodes + Represents the SPARQL FLOOR() Function - + - Creates a new Anonymous Blank Node Token + Creates a new SPARQL FLOOR() Function - Line the Blank Node occurs on - Column the Blank Node occurs at + Argument Expression - + - Token which represents named Blank Nodes + Gets the Functor of this Expression - + - Creates a new Blank Node Token + Gets the String representation of this Expression - ID of the Blank Node - Line the Blank Node occurs on - Column the Blank Node starts at - Column the Blank Node ends at + - + - Token which represents Blank Node Collections + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Blank Node Collection Token + Represents the SPARQL RAND() Function - Contents of the Blank Node Collection - Line the Collection starts on - Line the Collection ends on - Column the Collection starts at - Column the Collection ends at - + - The Tokens contained in the Blank Node Collection + Creates a new SPARQL RAND() Function - + - Token representing the 'a' Keyword + Evaluates the expression + Evaluation Context + Binding ID + - + - Creates a new 'a' Keyword Token + Gets the Variables used in this Expression - Line the Keyword occurs on - Column the Keyword occurs at - + - Token representing the 'is' Keyword + Gets the Type of this Expression - + - Creates a new 'is' Keyword Token + Gets the Arguments of this Expression - Line the Keyword occurs on - Column the Keyword occurs at - + - Token representing the 'of' Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new 'of' Keyword Token + Gets the Functor of this Expression - Line the Keyword occurs on - Column the Keyword occurs at - + - Token representing the '=>' implies Syntax + Gets the String representation of this Expression + - + - Creates a new '=>' Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Column the Keyword occurs at + Expression Transformer + - + - Token representing the '>=' implied by Syntax + Represents the SPARQL ROUND() Function - + - Creates a new '<=' Keyword Token + Creates a new SPARQL ROUND() Function - Line the Keyword occurs on - Column the Keyword occurs at + Argument Expression - + - Token representing the '=' equality Syntax + Gets the Functor of this Expression - + - Creates a new '=' Keyword Token + Gets the String representation of this Expression - Line the Keyword occurs on - Column the Keyword occurs at + - + - Token representing the use of a Custom Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Custom Keyword Token + Namespace containing expression classes which provide the SPARQL built-in functions pertaining to sets (IN and NOT IN) - Custom Keyword - Line the Keyword occurs on - Column the Keyword starts at - Column the Keyword ends at - + - Token representing the definition of a Custom Keyword + Abstract base class for SPARQL Functions which operate on Sets - + - Creates a new Custom Keyword Definition Token + Variable Expression Term that the Set function applies to - Custom Keyword Definition - Line the Keyword occurs on - Column the Keyword starts at - Column the Keyword ends at - + - Token representing Variables + Set that is used in the function - + - Creates a new Variable Token + Creates a new SPARQL Set function - Variable - Line the Variable occurs on - Column the Variable starts at - Column the Variable ends at + Expression + Set - + - Tokeniser for tokenising CSV inputs + Gets the value of the function as evaluated for a given Binding in the given Context + SPARQL Evaluation Context + Binding ID + - + - Creates a new CSV Tokeniser + Gets the Variable the function applies to - Text Reader - + - Creates a new CSV Tokeniser + Gets the Type of the Expression - Stream Reader - + - Gets the next available token from the input + Gets the Functor of the Expression - - + - Tokeniser for tokenising TSV inputs + Gets the Arguments of the Exception - + - Creates a new TSV Tokeniser + Gets whether an expression can safely be evaluated in parallel - Text Reader - + - Creates a new TSV Tokeniser + Gets the String representation of the Expression - Stream Reader + - + - Gets the next available token from the input + Transforms the Expression using the given Transformer + Expression Transformer - + - Special Token which acts as a Placeholder for SPARQL Property Paths + Class representing the SPARQL IN set function - + - Creates a new Path Token + Creates a new SPARQL IN function - Path + Expression + Set - + - Gets the Path this Token acts as a placeholder for + Evaluates the expression + Evaluation Context + Binding ID + - + - Token which represents the SPARQL SELECT Keyword + Gets the Functor of the Expression - + - Creates a new SELECT Keyword Token + Gets the String representation of the Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL ASK Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new ASK Keyword Token + Class representing the SPARQL NOT IN set function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL DESCRIBE Keyword + Creates a new SPARQL NOT IN function + Expression + Set - + - Creates a new DESCRIBE Keyword Token + Evaluates the expression - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL CONSTRUCT Keyword + Gets the Functor of the Expression - + - Creates a new CONSTRUCT Keyword Token + Gets the String representation of the Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the use of the * character to mean All + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new All Token + Namespace containing expression classes which provide the SPARQL built-in functions pertaining to string manipulation - Line the * occurs on - Position the * occurs at - + - Token which represents the SPARQL ABS Keyword + Abstract Base Class for SPARQL String Testing functions which take two arguments - + - Creates a new ABS Keyword Token + Creates a new Base Binary SPARQL String Function - Line the Keyword occurs on - Position the Keyword occurs at + String Expression + Argument Expression - + - Token which represents the SPARQL ALL Keyword + Evaluates the expression + Evaluation Context + Binding ID + - + - Creates a new ALL Keyword Token + Abstract method that child classes must implement to - Line the Keyword occurs on - Position the Keyword occurs at + + + - + - Token which represents the SPARQL AS Keyword + Determines whether the Arguments are valid + String Literal + Argument Literal + - + - Creates a new AS Keyword Token + Gets the Expression Type - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL ASC Keyword + Abstract Base Class for functions that generate UUIDs - + - Creates a new ASC Keyword Token + Evaluates the expression - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL AVG Keyword + Method to be implemented by derived classes to implement the actual logic of turning the generated UUID into a RDF term + UUID + - + - Creates a new AVG Keyword Token + Gets the variables used in the expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL BIND Keyword + Gets the Type of the expression - + - Creates a new BIND Keyword Token + Gets the Functor of the expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL BINDINGS Keyword + Gets the arguments of the expression - + - Creates a new BINDINGS Keyword Token + Applies the transformer to the arguments of this expression - Line the Keyword occurs on - Position the Keyword occurs at + Transformer + - + - Token which represents the SPARQL BNODE Keyword + Returns whether the function can be parallelised - + - Creates a new BNODE Keyword Token + Represents the SPARQL CONCAT function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL BOUND Keyword + Creates a new SPARQL Concatenation function + Enumeration of expressions - + - Creates a new BOUND Keyword Token + Gets the Value of the function as evaluated in the given Context for the given Binding ID - Line the Keyword occurs on - Position the Keyword occurs at + Context + Binding ID + - + - Token which represents the SPARQL CALL Keyword + Gets the Arguments the function applies to - + - Creates a new CALL Keyword Token + Gets whether an expression can safely be evaluated in parallel - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL CEIL Keyword + Gets the Variables used in the function - + - Creates a new CEIL Keyword Token + Gets the String representation of the function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL COALESCE Keyword + Gets the Type of the SPARQL Expression - + - Creates a new COALESCE Keyword Token + Gets the Functor of the expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL CONCAT Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new CONCAT Keyword Token + Represents the SPARQL CONTAINS function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL COUNT Keyword + Creates a new SPARQL CONTAINS function + String Expression + Search Expression - + - Creates a new COUNT Keyword Token + Determines whether the String contains the given Argument - Line the Keyword occurs on - Position the Keyword occurs at + String Literal + Argument Literal + - + - Token which represents the SPARQL DATATYPE Keyword + Gets the Functor of the Expression - + - Creates a new DATATYPE Keyword Token + Gets the String representation of the Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL DAY Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new DAY Keyword Token + Class representing the SPARQL Datatype() function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL DESC Keyword + Creates a new Datatype() function expression + Expression to apply the function to - + - Creates a new DESC Keyword Token + Returns the value of the Expression as evaluated for a given Binding as a Literal Node - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL DISTINCT Keyword + Gets the String representation of this Expression + - + - Creates a new DISTINCT Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL ENCODE_FOR_URI Keyword + Gets the Functor of the Expression - + - Creates a new ENCODE_FOR_URI Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL EXISTS Keyword + Class representing the SPARQL Datatype() function in SPARQL 1.1 + + This is required because the changes to the function in SPARQL 1.1 are not backwards compatible with SPARQL 1.0 + - + - Creates a new EXISTS Keyword Token + Creates a new DataType function - Line the Keyword occurs on - Position the Keyword occurs at + Expression - + - Token which represents the SPARQL FILTER Keyword + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new FILTER Keyword Token + Represents the SPARQL ENCODE_FOR_URI Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL FLOOR Keyword + Creates a new Encode for URI function + Expression - + - Creates a new FLOOR Keyword Token + Gets the Value of the function as applied to the given String Literal - Line the Keyword occurs on - Position the Keyword occurs at + Simple/String typed Literal + - + - Token which represents the SPARQL FROM Keyword + Gets the String representation of the function + - + - Creates a new FROM Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL FROM NAMED Keyword combination + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new FROM NAMED Keyword Token + Class representing the Sparql Lang() function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL GRAPH Keyword + Creates a new Lang() function expression + Expression to apply the function to - + - Creates a new GRAPH Keyword Token + Returns the value of the Expression as evaluated for a given Binding as a Literal Node - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL GROUP BY Keyword + Gets the String representation of this Expression + - + - Creates a new GROUP BY Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL GROUP_CONCAT Keyword + Gets the Functor of the Expression - + - Creates a new GROUP_CONCAT Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL HAVING Keyword + Represents the SPARQL LCASE Function - + - Creates a new HAVING Keyword Token + Creates a new LCASE function - Line the Keyword occurs on - Position the Keyword occurs at + Argument Expression - + - Token which represents the SPARQL HOURS Keyword + Calculates + + - + - Creates a new HOURS Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL IF Keyword + Gets the String representation of the Expression + - + - Creates a new IF Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL IN Keyword + Represents the XPath fn:replace() function - + - Creates a new IN Keyword Token + Creates a new SPARQL Replace function - Line the Keyword occurs on - Position the Keyword occurs at + Text Expression + Search Expression + Replace Expression - + - Token which represents the SPARQL IRI Keyword + Creates a new SPARQL Replace function + Text Expression + Search Expression + Replace Expression + Options Expression - + - Creates a new IRI Keyword Token + Configures the Options for the Regular Expression - Line the Keyword occurs on - Position the Keyword occurs at + Node detailing the Options + Whether errors should be thrown or suppressed - + - Token which represents the SPARQL ISBLANK Keyword + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new ISBLANK Keyword Token + Gets the String representation of this Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL ISIRI Keyword + Gets the enumeration of Variables involved in this Expression - + - Creates a new ISIRI Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL ISLITERAL Keyword + Gets the Functor of the Expression - + - Creates a new ISLITERAL Keyword Token + Gets the Arguments of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL ISNUMERIC Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new ISNUMERIC Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL ISURI Keyword + Represents the SPARQL STRAFTER Function - + - Creates a new ISURI Keyword Token + Creates a new STRAFTER Function - Line the Keyword occurs on - Position the Keyword occurs at + String Expression + Starts Expression - + - Token which represents the SPARQL LANG Keyword + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new LANG Keyword Token + Determines whether the Arguments are valid - Line the Keyword occurs on - Position the Keyword occurs at + String Literal + Argument Literal + - + - Token which represents the SPARQL LANGMATCHES Keyword + Gets the Variables used in the function - + - Creates a new LANGMATCHES Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL LCASE Keyword + Gets the Functor of the Expression - + - Creates a new LCASE Keyword Token + Gets the Arguments of the Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL LENGTH Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new LENGTH Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL LET Keyword + Gets the String representation of the function + - + - Creates a new LET Keyword Token + Represents the SPARQL STRBEFORE function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL LIMIT Keyword + Creates a new STRBEFORE Function + String Expression + Starts Expression - + - Creates a new LIMIT Keyword Token + Returns the value of the Expression as evaluated for a given Binding as a Literal Node - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL MAX Keyword + Determines whether the Arguments are valid + String Literal + Argument Literal + - + - Creates a new MAX Keyword Token + Gets the Variables used in the function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL MD5 Keyword + Gets the Type of the Expression - + - Creates a new MD5 Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL MEDIAN Keyword + Gets the Arguments of the Function - + - Creates a new MEDIAN Keyword Token + Gets whether an expression can safely be evaluated in parallel - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL MIN Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new MIN Keyword Token + Gets the String representation of the function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL MINUTES Keyword + Represents the SPARQL STRENDS Function - + - Creates a new MINUTES Keyword Token + Creates a new STRENDS() function - Line the Keyword occurs on - Position the Keyword occurs at + String Expression + Argument Expression - + - Token which represents the SPARQL MINUS Keyword + Determines whether the given String Literal ends with the given Argument Literal + String Literal + Argument Literal + - + - Creates a new MINUS Keyword Token - - Line the Keyword occurs on - Position the Keyword occurs at + Gets the Functor of the Expression + - + - Token which represents the SPARQL MODE Keyword + Gets the String representation of the Expression + - + - Creates a new MODE Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL MONTH Keyword + Class representing the Sparql Str() function - + - Creates a new MONTH Keyword Token + Creates a new Str() function expression - Line the Keyword occurs on - Position the Keyword occurs at + Expression to apply the function to - + - Token which represents the SPARQL NAMED Keyword + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new NAMED Keyword Token + Gets the String representation of this Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL NOT IN Keyword + Gets the Type of the Expression - + - Creates a new NOT IN Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL NMAX Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new NMAX Keyword Token + Represents the SPARQL STRLEN Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL NMIN Keyword + Creates a new STRLEN() function + Argument Expression - + - Creates a new NMIN Keyword Token + Determines the Length of the given String Literal - Line the Keyword occurs on - Position the Keyword occurs at + String Literal + - + - Token which represents the SPARQL NOT EXISTS Keyword + Gets the Functor of the Expression - + - Creates a new NOT EXISTS Keyword Token + Gets the String representation of the Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL NOW Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new NOW Keyword Token + Represents the SPARQL STRSTARTS Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL OFFSET Keyword + Creates a new STRSTARTS() function + String Expression + Argument Expression - + - Creates a new OFFSET Keyword Token + Determines whether the given String Literal starts with the given Argument Literal - Line the Keyword occurs on - Position the Keyword occurs at + String Literal + Argument Literal + - + - Token which represents the SPARQL OPTIONAL Keyword + Gets the Functor of the Expression - + - Creates a new OPTIONAL Keyword Token + Gets the String representation of the Expression - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL ORDER BY Keyword combination + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new ORDER BY Keyword Token + Represents the SPARQL SUBSTR Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL RAND Keyword + Creates a new XPath Substring function + Expression + Start - + - Creates a new RAND Keyword Token + Creates a new XPath Substring function - Line the Keyword occurs on - Position the Keyword occurs at + Expression + Start + Length - + - Token which represents the SPARQL REDUCED Keyword + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Creates a new REDUCED Keyword Token + Gets the Variables used in the function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL REGEX Keyword + Gets the String representation of the function + - + - Creates a new REGEX Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL REPLACE Keyword + Gets the Functor of the Expression - + - Creates a new REPLACE Keyword Token + Gets the Arguments of the Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL ROUND Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new ROUND Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL SAMETERM Keyword + Represents the SPARQL UCASE Function - + - Creates a new SAMETERM Keyword Token + Creates a new UCASE() function - Line the Keyword occurs on - Position the Keyword occurs at + Argument Expression - + - Token which represents the SPARQL SAMPLE Keyword + Converts the given String Literal to upper case + String Literal + - + - Creates a new SAMPLE Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL SECONDS Keyword + Gets the String representation of the Expression + - + - Creates a new SECONDS Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL SEPARATOR Keyword + Represents the SPARQL UUID Function - + - Creates a new SEPARATOR Keyword Token + Evaluates the function by generating the URN UUID form based on the given UUID - Line the Keyword occurs on - Position the Keyword occurs at + UUID + - + - Token which represents the SPARQL SERVICE Keyword + Gets the functor for the expression - + + + + - Creates a new SERVICE Keyword Token + Represents the SPARQL STRUUID Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL SHA1 Keyword + Evaluates the function by returning the string form of the given UUID + UUID + - + - Creates a new SHA1 Keyword Token + Gets the functor for the expression - Line the Keyword occurs on - Position the Keyword occurs at - + + + + - Token which represents the SPARQL SHA224 Keyword + Class representing the SPARQL CALL() function - + - Creates a new SHA224 Keyword Token + Creates a new COALESCE function with the given expressions as its arguments - Line the Keyword occurs on - Position the Keyword occurs at + Argument expressions - + - Token which represents the SPARQL SHA256 Keyword + Gets the value of the expression as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Creates a new SHA256 Keyword Token + Gets the Variables used in all the argument expressions of this function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL SHA384 Keyword + Gets the String representation of the function + - + - Creates a new SHA384 Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL SHA512 Keyword + Gets the Functor of the Expression - + - Creates a new SHA512 Keyword Token + Gets the Arguments of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL STR Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new STR Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL STRAFTER Keyword + Class representing the SPARQL COALESCE() function - + - Creates a new STRAFTER Keyword Token + Creates a new COALESCE function with the given expressions as its arguments - Line the Keyword occurs on - Position the Keyword occurs at + Argument expressions - + - Token which represents the SPARQL STRBEFORE Keyword + Gets the value of the expression as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Creates a new STRBEFORE Keyword Token + Gets the Variables used in all the argument expressions of this function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL CONTAINS Keyword + Gets the String representation of the function + - + - Creates a new CONTAINS Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL STRDT Keyword + Gets the Functor of the Expression - + - Creates a new STRDT Keyword Token + Gets the Arguments of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL STRENDS Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new STRENDS Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL STRLANG Keyword + Class representing the SPARQL IF function - + - Creates a new STRLANG Keyword Token + Creates a new IF function - Line the Keyword occurs on - Position the Keyword occurs at + Condition + Expression to evaluate if condition evaluates to true + Expression to evalaute if condition evaluates to false/error - + - Token which represents the SPARQL STRLEN Keyword + Gets the value of the expression as evaluated in the given Context for the given Binding ID + SPARQL Evaluation Context + Binding ID + - + - Creates a new STRLEN Keyword Token + Gets the enumeration of variables used in the expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL STRSTARTS Keyword + Gets the String representation of the function + - + - Creates a new STRSTARTS Keyword Token + Gets the Expression Type - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL STRUUID Keyword + Gets the Functor for the Expression - + - Creates a new STRUUID Keyword Token + Gets the Arguments of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL SUBSTR Keyword + Gets whether an expression can safely be evaluated in parallel - + - Creates a new SUBSTR Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL SUM Keyword + Namespace containing expression classes which provide functions from the XPath function library - + - Creates a new SUM Keyword Token + Namespace containing expression classes which provide cast functions from the XPath function library - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL TIMEZONE Keyword + Abstract Expression class used as the base class for implementation of XPath Casting Function expressions - + - Creates a new TIMEZONE Keyword Token + Expression to be Cast by the Cast Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL TZ Keyword + Creates a new Base XPath Cast Expression + Expression to be Cast - + - Creates a new TZ Keyword Token + Gets the value of casting the result of the inner expression - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL UCASE Keyword + Gets the String representation of this Expression + - + - Creates a new UCASE Keyword Token + Gets the enumeration of Variables involved in this expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL UNDEF Keyword + Gets the Type of the Expression - + - Creates a new UNDEF Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL UNION Keyword + Gets the Arguments of the Expression - + - Creates a new UNION Keyword Token + Gets whether an expression can safely be evaluated in parallel - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL UNSAID Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new UNSAID Keyword Token + Class representing an XPath Boolean Cast Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL URI Keyword + Creates a new XPath Boolean Cast Function Expression + Expression to be cast - + - Creates a new URI Keyword Token + Casts the value of the inner Expression to a Boolean - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL UUID Keyword + Gets the String representation of the Expression + - + - Creates a new UUID Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL VALUES Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new VALUES Keyword Token + Class representing an XPath Date Time Cast Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL WHERE Keyword + Creates a new XPath Date Time Cast Function Expression + Expression to be cast - + - Creates a new WHERE Keyword Token + Casts the value of the inner Expression to a Date Time - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL YEAR Keyword + Gets the String representation of the Expression + - + - Creates a new YEAR Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents Mathematical Plus + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Mathematical Plus Token + Class representing an XPath Decimal Cast Function - Line the Token occurs on - Position the Token occurs at - + - Token which represents Mathematical Minus + Creates a new XPath Decimal Cast Function Expression + Expression to be cast - + - Creates a new Mathematical Minus Token + Casts the Value of the inner Expression to a Decimal - Line the Token occurs on - Position the Token occurs at + Evaluation Context + Binding ID + - + - Token which represents Mathematical Multiply + Gets the String representation of the Expression + - + - Creates a new Mathematical Multiply Token + Gets the Functor of the Expression - Line the Token occurs on - Position the Token occurs at - + - Token which represents Mathematical Divide + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Mathematical Divide Token + Class representing an XPath Double Cast Function - Line the Token occurs on - Position the Token occurs at - + - Token which represents Logical Not Equals + Creates a new XPath Double Cast Function Expression + Expression to be cast - + - Creates a new Logical Not Equals Token + Casts the value of the inner Expression to a Double - Line the Token occurs on - Position the Token occurs at + Evaluation Context + Binding ID + - + - Token which represents Logical Negation + Gets the String representation of the Expression + - + - Creates a new Logical Negation Token + Gets the Functor of the Expression - Line the Token occurs on - Position the Token occurs at - + - Token which represents Logical And + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Logical And Token + Class representing an XPath Float Cast Function - Line the Token occurs on - Position the Token occurs at - + - Token which represents Logical Or + Creates a new XPath Float Cast Function Expression + Expression to be cast - + - Creates a new Logical Or Token + Casts the value of the inner Expression to a Float - Line the Token occurs on - Position the Token occurs at + Evaluation Context + Vinding ID + - + - Token which represents Relational Less Than + Gets the String representation of the Expression + - + - Creates a new Relation Less Than Token + Gets the Functor of the Expression - Line the Token occurs on - Position the Token occurs at - + - Token which represents Relational Less Than or Equal To + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new Relation Less Than or Equal To Token + Class representing an XPath Integer Cast Function - Line the Token occurs on - Position the Token occurs at - + - Token which represents Relational Greater Than + Creates a new XPath Integer Cast Function Expression + Expression to be cast - + - Creates a new Relation Greater Than Token + Casts the value of the inner Expression to an Integer - Line the Token occurs on - Position the Token occurs at + Evaluation Context + Binding ID + - + - Token which represents Greater Than or Equal To + Gets the String representation of the Expression + - + - Creates a new Relation Greater Than or Equal To Token + Gets the Functor of the Expression - Line the Token occurs on - Position the Token occurs at - + - Token which represents the SPARQL Update ADD Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new ADD Keyword Token + Class representing an XPath String Cast Function - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL Update CLEAR Keyword + Creates a new XPath String Cast Function Expression + Expression to be cast - + - Creates a new CLEAR Keyword Token + Casts the results of the inner expression to a Literal Node typed xsd:string - Line the Keyword occurs on - Position the Keyword occurs at + Evaluation Context + Binding ID + - + - Token which represents the SPARQL Update COPY Keyword + Gets the String representation of the Expression + - + - Creates a new COPY Keyword Token + Gets the Functor of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL Update CREATE Keyword + Transforms the Expression using the given Transformer + Expression Transformer + - + - Creates a new CREATE Keyword Token + Namespace containing expression classes which provide date time functions from the XPath function library - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL Update DATA Keyword + Abstract Base Class for functions which are Unary functions applied to Date Time objects in the XPath function library - + - Creates a new DATA Keyword Token + Creates a new Unary XPath Date Time function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL Update DEFAULT Keyword + Gets the numeric value of the function in the given Evaluation Context for the given Binding ID + Evaluation Context + Binding ID + - + - Creates a new DEFAULT Keyword Token + Abstract method which derived classes must implement to generate the actual numeric value for the function - Line the Keyword occurs on - Position the Keyword occurs at + Date Time + - + - Token which represents the SPARQL Update DELETE Keyword + Gets the String representation of the Function + - + - Creates a new DELETE Keyword Token + Gets the Type of the Expression - Line the Keyword occurs on - Position the Keyword occurs at - + - Token which represents the SPARQL Update DROP Keyword + Represents the XPath day-from-dateTime() function - + - Creates a new DROP Keyword Token + Creates a new XPath Day from Date Time function - Line the Keyword occurs on - Position the Keyword occurs at + Expression - + - Token which represents the SPARQL Update INSERT Keyword + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Creates a new INSERT Keyword Token + Gets the String representation of the function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL Update INTO Keyword + Gets the Functor of the Expression - + - Creates a new INTO Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL Update LOAD Keyword + Represents the XPath hours-from-dateTime() function - + - Creates a new LOAD Keyword Token + Creates a new XPath Hours from Date Time function - Line the Keyword occurs on - Position the Keyword occurs at + Expression - + - Token which represents the SPARQL Update MOVE Keyword + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Creates a new MOVE Keyword Token + Gets the String representation of the function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL Update SILENT Keyword + Gets the Functor of the Expression - + - Creates a new SILENT Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Token which represents the SPARQL Update TO Keyword + Represents the XPath minutes-from-dateTime() function - + - Creates a new TO Keyword Token + Creates a new XPath Minutes from Date Time function - Line the Keyword occurs on - Position the Keyword occurs at + Expression - + - Token which represents the SPARQL Update USING Keyword + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Creates a new USING Keyword Token + Gets the String representation of the function - Line the Keyword occurs on - Position the Keyword occurs at + - + - Token which represents the SPARQL Update WITH Keyword + Gets the Functor of the Expression - + - Creates a new WITH Keyword Token + Transforms the Expression using the given Transformer - Line the Keyword occurs on - Position the Keyword occurs at + Expression Transformer + - + - Static Class which defines the Integer Constants used for Token Types + Represents the XPath month-from-dateTime() function - + - Constants defining Token Types + Creates a new XPath Month from Date Time function + Expression - + - Constants defining Token Types + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath seconds-from-dateTime() function - + - Constants defining Token Types + Creates a new XPath Seconds from Date Time function + Expression - + - Constants defining Token Types + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath timezone-from-dateTime() function - + - Constants defining Token Types + Expression that the Function applies to - + - Constants defining Token Types + Creates a new XPath Timezone from Date Time function + Expression - + - Constants defining Token Types + Calculates the value of the function in the given Evaluation Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Calculates the effective boolean value of the function in the given Evaluation Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the Variables used in the function - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Gets the Arguments of the Expression - + - Constants defining Token Types + Gets whether an expression can safely be evaluated in parallel - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath year-from-dateTime() function - + - Constants defining Token Types + Creates a new XPath Year from Date Time function + Expression - + - Constants defining Token Types + Calculates the numeric value of the function from the given Date Time + Date Time + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Namespace containing expression classes which provide numeric functions from the XPath function library - + - Constants defining Token Types + Represents the XPath fn:abs() function - + - Constants defining Token Types + Creates a new XPath Absolute function + Expression - + - Constants defining Token Types + Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:ceiling() function - + - Constants defining Token Types + Creates a new XPath Ceiling function + Expression - + - Constants defining Token Types + Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:floor() function - + - Constants defining Token Types + Creates a new XPath Floor function + Expression - + - Constants defining Token Types + Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:round() function - + - Constants defining Token Types + Creates a new XPath Round function + Expression - + - Constants defining Token Types + Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:round() function - + - Constants defining Token Types + Creates a new XPath RoundHalfToEven function + Expression - + - Constants defining Token Types + Creates a new XPath RoundHalfToEven function + Expression + Precision - + - Constants defining Token Types + Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Namespace containing expression classes which provide string functions from the XPath function library - + - Constants defining Token Types + Abstract Base class for XPath Binary String functions - + - Constants defining Token Types + Expression the function applies over - + - Constants defining Token Types + Argument expression - + - Constants defining Token Types + Whether the argument can be null - + - Constants defining Token Types + Type validation function for the argument - + - Constants defining Token Types + Creates a new XPath Binary String function + Expression + Argument + Whether the argument may be null + Type validator for the argument - + - Constants defining Token Types + Gets the Value of the function as evaluated in the given Context for the given Binding ID + Context + Binding ID + - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the Variables used in the function - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Gets the Arguments of the Expression - + - Constants defining Token Types + Gets whether an expression can safely be evaluated in parallel - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Abstract Base Class for XPath Unary String functions - + - Constants defining Token Types + Expression the function applies over - + - Constants defining Token Types + Creates a new XPath Unary String function + Expression - + - Constants defining Token Types + Gets the Value of the function as evaluated in the given Context for the given Binding ID + Context + Binding ID + - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the Variables used in the function - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Gets the Arguments of the Expression - + - Constants defining Token Types + Gets whether an expression can safely be evaluated in parallel - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:compare() function - + - Constants defining Token Types + Creates a new XPath Compare function + First Comparand + Second Comparand - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:concat() function - + - Constants defining Token Types + Creates a new XPath Concatenation function + First Expression + Second Expression - + - Constants defining Token Types + Creates a new XPath Concatenation function + Enumeration of expressions - + - Constants defining Token Types + Gets the Value of the function as evaluated in the given Context for the given Binding ID + Context + Binding ID + - + - Constants defining Token Types + Gets the Arguments the function applies to - + - Constants defining Token Types + Gets whether an expression can safely be evaluated in parallel - + - Constants defining Token Types + Gets the Variables used in the function - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:contains() function - + - Constants defining Token Types + Creates a new XPath Contains function + Expression + Search Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:encode-for-uri() function - + - Constants defining Token Types + Creates a new XPath Encode for URI function + Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:ends-with() function - + - Constants defining Token Types + Creates a new XPath Ends With function + Expression + Suffix Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:escape-html-uri() function - + - Constants defining Token Types + Creates a new XPath Escape HTML for URI function + Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:lower-case() function - + - Constants defining Token Types + Creates a new XPath Lower Case function + Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:normalize-space() function - + - Constants defining Token Types + Creates a new XPath Normalize Space function + Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:normalize-unicode() function - + - Constants defining Token Types + Creates a new XPath Normalize Unicode function + Expression - + - Constants defining Token Types + Creates a new XPath Normalize Unicode function + Expression + Normalization Form - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:replace() function - + - Constants defining Token Types + Creates a new XPath Replace function + Text Expression + Search Expression + Replace Expression - + - Constants defining Token Types + Creates a new XPath Replace function + Text Expression + Search Expression + Replace Expression + Options Expression - + - Constants defining Token Types + Configures the Options for the Regular Expression + Node detailing the Options + Whether errors should be thrown or suppressed - + - Constants defining Token Types + Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Evaluation Context + Binding ID + - + - Constants defining Token Types + Gets the String representation of this Expression + - + - Constants defining Token Types + Gets the enumeration of Variables involved in this Expression - + - Constants defining Token Types + Gets the Type of the Expression - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Gets the Arguments of the Expression - + - Constants defining Token Types + Gets whether an expression can safely be evaluated in parallel - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:starts-with() function - + - Constants defining Token Types + Creates a new XPath Starts With function + Expression + Prefix Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:string-length() function - + - Constants defining Token Types + Creates a new XPath String Length function + Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Constants defining Token Types + Represents the XPath fn:substring-after() function - + - Constants defining Token Types + Creates a new XPath Substring After function + Expression + Search Expression - + - Constants defining Token Types + Gets the Value of the function as applied to the given String Literal and Argument + Simple/String typed Literal + Argument + - + - Constants defining Token Types + Gets the String representation of the function + - + - Constants defining Token Types + Gets the Functor of the Expression - + - Constants defining Token Types + Transforms the Expression using the given Transformer + Expression Transformer + - + - Parser for RDF/JSON Syntax + Represents the XPath fn:substring-before() function - Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Read RDF/JSON Syntax from some Stream into a Graph + Creates a new XPath Substring Before function - Graph to read into - Stream to read from + Expression + Search Expression - + - Read RDF/JSON Syntax from some Input into a Graph + Gets the Value of the function as applied to the given String Literal and Argument - Graph to read into - Input to read from + Simple/String typed Literal + Argument + - + - Read RDF/Json Syntax from some File into a Graph + Gets the String representation of the function - Graph to read into - File to read from + - + - Read RDF/JSON Syntax from some Stream using a RDF Handler + Gets the Functor of the Expression - RDF Handler to use - Stream to read from - + - Read RDF/JSON Syntax from some Input using a RDF Handler + Transforms the Expression using the given Transformer - RDF Handler to use - Input to read from + Expression Transformer + - + - Read RDF/JSON Syntax from a file using a RDF Handler + Represents the XPath fn:substring() function - RDF Handler to use - File to read from - + - Internal top level Parse method which parses the Json + Creates a new XPath Substring function - RDF Handler to use - Stream to read from + Expression + Start - + - Parser method which parses the top level Json Object which represents the overall Graph + Creates a new XPath Substring function - Parser Context + Expression + Start + Length - + - Parser method which parses Json Objects representing Triples + Returns the value of the Expression as evaluated for a given Binding as a Literal Node - Parser Context + Evaluation Context + Binding ID + - + - Parser method which parses Json Objects representing Predicate Object Lists + Gets the Variables used in the function - Parser Context - Subject of Triples which comes from the parent Json Object - + - Parser method which parses Json Arrays representing Object Lists + Gets the String representation of the function - Parser Context - Subject of Triples which comes from the Grandparent Json Object - Predicate of Triples which comes form the Parent Json Object + - + - Parser method which parses Json Objects reprsenting Object Nodes + Gets the Type of the Expression - Parser Context - Subject of Triples which comes from the Great-Grandparent Json Object - Predicate of Triples which comes form the Grandparent Json Object - + - Helper method for raising Error messages with attached Line Information + Gets the Functor of the Expression - Parser Context - Error Message - - + - Helper method for raising Error messages with attached Position Information + Gets the Arguments of the Expression - Parser Context - Error Message - Start Position - - + - Helper Method for raising the Warning event + Gets whether an expression can safely be evaluated in parallel - Warning Message - + - Event which is raised if there's a non-fatal issue with the RDF/Json Syntax + Transforms the Expression using the given Transformer + Expression Transformer + - + - Gets the String representation of the Parser which is a description of the syntax it parses + Represents the XPath fn:upper-case() function - - + - Helper class with useful constants relating to the RDF Specification + Creates a new XPath Upper Case function + Expression - + - URI for rdf:first + Gets the Value of the function as applied to the given String Literal + Simple/String typed Literal + - + - URI for rdf:rest + Gets the String representation of the function + - + - URI for rdf:nil + Gets the Functor of the Expression - + - URI for rdf:type + Transforms the Expression using the given Transformer + Expression Transformer + - + - URI for rdf:XMLLiteral + Represents the XPath boolean() function - + - URI for rdf:subject + Creates a new XPath Boolean Function + Expression to compute the Effective Boolean Value of - + - URI for rdf:predicate + Evaluates the expression + Evaluation Context + Binding ID + - + - URI for rdf:object + Gets the String representation of the function + - + - URI for rdf:Statement + Gets the Functor of the Expression - + - URI for rdf:langString the implicit type of language specified literals + Gets the Type of the Expression - + - Pattern for Valid Language Specifiers + Transforms the Expression using the given Transformer + Expression Transformer + - + - Regular Expression for Valid Language Specifiers + Represents an Unknown Function that is not supported by dotNetRDF + + + This exists as a placeholder class so users may choose to parse Unknown Functions and have them appear in queries even if they cannot be evaluated. This is useful when you wish to parse a query locally to check syntactic validity before passing it to an external query processor which may understand how to evaluate the function. Using this placeholder also allows queries containing Unknown Functions to still be formatted properly. + + - + - Determines whether a given String is a valid Language Specifier + Creates a new Unknown Function that has no Arguments - String to test - + Function URI - + - Parser for SPARQL Results JSON Format + Creates a new Unknown Function that has a Single Argument + Function URI + Argument Expression - + - Loads a Result Set from an Input Stream + Creates a new Unknown Function that has multiple Arguments - Result Set to load into - Input Stream to read from + Function URI + Argument Expressions - + - Loads a Result Set from a File + Gives null as the Value since dotNetRDF does not know how to evaluate Unknown Functions - Result Set to load into - File to load from + Evaluation Context + Binding ID + - + - Loads a Result Set from an Input + Gets the Variables used in the Function - Result Set to load into - Input to read from - + - Loads a Result Set from an Input using a Results Handler + Gets the Expression Type - Results Handler to use - Input to read from - + - Loads a Result Set from an Input Stream using a Results Handler + Gets the Function URI of the Expression - Results Handler to use - Input Stream to read from - + - Loads a Result Set from a file using a Results Handler + Gets the Arguments of the Expression - Results Handler to use - File to read from - + - Parser method which parses the Stream as Json + Gets whether an expression can safely be evaluated in parallel - Input Stream - Results Handler - + - Parser method which parses the top level Json Object which represents the overall Result Set + Gets the String representation of the Expression + - + - Parser method which parses the 'head' property of the top level Json Object which represents the Header of the Result Set + Transforms the Expression using the given Transformer + Expression Transformer + - + - Parser method which parses the 'head' property of the top level Json Object which represents the Header of the Result Set + Namespace containing expression classes representing primary constructs in SPARQL expression trees i.e. constants, modifiers and variables - + - Parser method which parses the Header Object of the Result Set + Class for representing Aggregate Expressions which have Numeric Results - + - Parser method which parses the Properties of the Header Object + Creates a new Aggregate Expression Term that uses the given Aggregate + Aggregate - + - Parser method which parses the 'vars' property of the Header Object + Evaluates the aggregate expression + Evaluation Context + Binding ID + - + - Parser method which parses the 'link' property of the Header Object + Gets the Aggregate this Expression represents - + - Parser method which parses the Body of the Result Set which may be either a 'results' or 'boolean' property of the top level Json Object + Gets the String representation of the Aggregate Expression + - + - Parser method which parses the Results Object of the Result Set + Gets the enumeration of variables that are used in the the aggregate expression - + - Parser method which parses the 'bindings' property of the Results Object + Gets the Type of the Expression - + - Parser method which parses a Binding Object which occurs in the array of Bindings + Gets the Functor of the Expression - + - Parser method which parses a Bound Variable Object which occurs within a Binding Object + Gets the Arguments of the Expression - Parser Context - Variable Name - Result Object that is being constructed from the Binding Object - - + - Parser method which parses the 'boolean' property of the Result Set + Gets whether the expression can be parallelised - + - Checks whether a JSON Token is valid as the value for a RDF term + Transforms the Expression using the given Transformer - Context + Expression Transformer - + - Skips to the end of the current object + Class for representing the All Modifier - Context - True if a warning should be issued - + - Helper method for raising Error messages with attached Line Information + Throws a NotImplementedException since this class is a placeholder and only used in parsing - Parser Context - Error Message + SPARQL Evaluation Context + Binding ID - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Returns an empty enumerable - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + Gets the Type of the Expression - + - Gets the String representation of the Parser which is a description of the syntax it parses + Gets the Functor of the Expression - - + - Parser for SPARQL Results XML Format + Gets the Arguments of the Expression - + - Loads a Result Set from an Input + Gets the String representation of the Expression - Result Set to load into - Input to read from + - + - Loads a Result Set from an Input Stream + Transforms the Expression using the given Transformer - Result Set to load into - Input Stream to read from + Expression Transformer + - + - Loads a Result Set from a File + Gets whether an expression can safely be evaluated in parallel - Result Set to load into - File to load from - + - Loads a Result Set from an Input using a Results Handler + Class for representing constant terms - Results Handler to use - Input to read from - + - Loads a Result Set from an Input using a Results Handler + Node this Term represents - Results Handler to use - Input Stream to read from - + - Loads a Result Set from a file using a Results Handler + Creates a new Constant - Results Handler to use - File to read from + Valued Node - + - Initialises the XML Reader settings + Creates a new Constant - + Node - + - Parses the XML Result Set format into a set of SPARQLResult objects + Evaluates the expression - Parser Context + Evaluation Context + Binding ID + - + - Internal Helper method which parses the child element of a <binding> element into an INode + Gets the String representation of this Expression - Parser Context - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Gets an Empty Enumerable since a Node Term does not use variables - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + Gets the Type of the Expression - + - Gets the String representation of the Parser which is a description of the syntax it parses + Gets the Functor of the Expression - - + - Static Helper Class which allows raw strings of RDF/SPARQL Results to be parsed directly + Gets the Arguments of the Expression - - The API structure for dotNetRDF means that our IRdfReader classes which are our Parsers only have to support parsing from a file or a stream. For most applications this is fine but there may be occassions when you wish to parse a small fragment of RDF and you don't want to have to put it into a file before you can parse it. - - + - Parses a raw RDF String using the given IRdfReader + Gets whether an expression can safely be evaluated in parallel - Graph to load into - Raw RDF String - Parser to use to read the data - Use this when you have a raw RDF string and you know the syntax the RDF is in - + - Parses a raw RDF String (attempts to auto-detect the format) + Node this Term represents - Graph to load into - Raw RDF String - -

- Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate and it may choose a parser which is less expressive than the actual syntax e.g. TurtleParser instead of Notation3Parser as it tends to guess downwards. -

-

- For example if you parsed a Notation 3 string that contained Graph Literals but didn't use any of the Notation 3 specific directives like @keywords it would be assumed to be Turtle but then would fail to parse -

-

- The auto-detection rules used are as follows: -

-
    -
  1. If it contains <?xml and <rdf:RDF then it's most likely RDF/XML
  2. -
  3. If it contains <html then it's most likely HTML with possibly RDFa embedded
  4. -
  5. - If it contains @prefix or @base then its Turtle/Notation 3 -
      -
    1. If it contains @keywords, @forall or @forsome then it's Notation 3
    2. -
    3. Otherwise it's Turtle
    4. -
    -
  6. -
  7. If it contains all of a set of terms and symbols that occur in RDF/JSON then it's most likely RDF/JSON. These terms are "value","type",{,},[ and ]
  8. -
  9. Otherwise try it as NTriples, NTriples has no real distinctive syntax so hard to test if it's NTriples other than by parsing it
  10. -
-
- + - Parses a raw RDF Dataset String using the given Parser + Transforms the Expression using the given Transformer - Store to load into - Raw RDF Dataset String - Parser to use + Expression Transformer + - + - Parses a raw RDF Dataset String (attempts to auto-detect the format) + Class for representing the Distinct Modifier - Store to load into - Raw RDF Dataset String - -

- Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate. -

-
- + - Parses a raw SPARQL Results String (attempts to auto-detect the format) + Throws a NotImplementedException since this class is a placeholder and only used in parsing - SPARQL Result Set to fill - Raw SPARQL Results String - -

- Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate. -

-
+ SPARQL Evaluation Context + Binding ID +
- + - Parses a raw SPARQL Results String using the given Parser + Returns an empty enumerable - SPARQL Result Set to fill - Raw SPARQL Results String - Parser to use - + - Uses the rules described in the remarks for the Parse() to return the most likely Parser + Gets the Type of the Expression - Raw RDF String - + - Uses the format detection rules to determine the most likely RDF Dataset Parser + Gets the Functor of the Expression - Raw RDF Dataset String - - + - Uses the format detection rules to return the most likely SPARQL Results parser + Gets the Arguments of the Expression - Raw SPARQL Results String - - + - Parser for parsing TriG (Turtle with Named Graphs) RDF Syntax into a Triple Store + Gets whether an expression can safely be evaluated in parallel - The Default Graph (if any) will be given the special Uri trig:default-graph - + - Creates a TriG Parser than uses the default syntax + Gets the String representation of the Expression + - + - Creates a TriG Parser which uses the specified syntax + Transforms the Expression using the given Transformer - Syntax + Expression Transformer + - + - Gets/Sets whether Tokeniser Tracing is used + Class for representing Graph Pattern Terms (as used in EXISTS/NOT EXISTS) - + - Gets/Sets the TriG syntax used + Creates a new Graph Pattern Term + Graph Pattern - + - Gets/Sets the token queue mode used + Gets the value of this Term as evaluated for the given Bindings in the given Context + + + - + - Loads the named Graphs from the TriG input into the given Triple Store + Gets the Graph Pattern this term represents - Triple Store to load into - File to load from - + - Loads the named Graphs from the TriG input into the given Triple Store + Gets the Variables used in the Expression - Triple Store to load into - Input to load from - + - Loads the named Graphs from the TriG input using the given RDF Handler + Gets the Type of the Expression - RDF Handler to use - File to load from - + - Loads the named Graphs from the TriG input using the given RDF Handler + Gets the Functor of the Expression - RDF Handler to use - Input to load from - + - Tries to parse a directive + Gets the Arguments of the Expression - - + - Tries to parse directives + Gets whether an expression can safely be evaluated in parallel - - - - This overload is needed because in some cases we may dequeue a token before we know it is a directive - - + - Helper method used to raise the Warning event if there is an event handler registered + Transforms the Expression using the given Transformer - Warning message + Expression Transformer + - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Class representing Variable value expressions - + - Gets the String representation of the Parser which is a description of the syntax it parses + Creates a new Variable Expression - + Variable Name - + - Acceptable Turtle syntaxes + Evaluates the expression + Evaluation Context + Binding ID + - + - Turtle as originally specified by the Turtle Team Submission + Gets the String representation of this Expression + - + - Turtle as standardised by the W3C RDF Working Group + Gets the enumeration containing the single variable that this expression term represents - + - Acceptable TriG syntaxes + Gets the Type of the Expression - + - TriG as originally specified + Gets the Functor of the Expression - - @base is not permitted and @prefix may only occur outside of graphs - - + - TriG as specified by the TriG Member Submission + Gets the Arguments of the Expression - - @base is permitted and both @base and @prefix may occur both inside and outside graphs but the tokens use Turtle Team Submission rules i.e. newer escape sequences and other changes in the official W3C specification of Turtle do not apply. - - + - Helper function relating to the Turtle Specifications + Gets whether an expression can safely be evaluated in parallel - Not currently used in the actual TurtleTokeniser or TurtleParser but is used for the new TriGTokeniser - + - Pattern for Valid Integers in Turtle + Transforms the Expression using the given Transformer + Expression Transformer + - + - Pattern for Valid Decimals in Turtle + Expression Factory which generates ARQ Function expressions + + + Designed to help provide feature parity with the ARQ query engine contained in Jena + + - + - Pattern for Valid Doubles in Turtle + ARQ Function Namespace - + - Pattern for determining whether a given String should be serialized as a Long Literal + Constants for ARQ Numeric functions - + - Determines whether a given String is a valid Plain Literal + Constants for ARQ Numeric functions - String to test - Turtle Syntax - - + - Determines whether a given String is a valid Plain Literal for the given Datatype + Constants for ARQ Numeric functions - Value - Datatype - Turtle Syntax - - + - Determines whether a given String is a valid Integer + Constants for ARQ Numeric functions - String to test - - + - Determines whether a given String is a valid Decimal + Constants for ARQ Graph functions - String to test - - + - Determines whether a given String is a valid Double + Constants for ARQ Graph functions - String to test - - + - Gets whether a QName is valid in Turtle (assumes Turtle as originally specified by Dave Beckett) + Constants for ARQ Graph functions - QName - - + - Gets whether the given value is a valid prefix in Turtle + Constants for ARQ String functions - Value - Turtle Syntax - - + - Gets whether the given value is the valid prefix portion of a prefixed name in Turtle + Constants for ARQ String functions - Value - Turtle Syntax - - + - Gets whether the given value is the valid local name portion of a prefixed name in Turtle + Constants for ARQ String functions - Value - Turtle Syntax - - + - Gets whether the given value matches the PN_LOCAL rule from the Turtle specification + Constants for ARQ Miscellaneous functions - Value - - + - Checks whether a given String matches the PLX rule from the Turtle W3C Specification + Constants for ARQ Miscellaneous functions - String as character array - Start Index - Resulting End Index - - + - Gets whether a character is a Hex character + Array of Extension Function URIs - Character - - + - Determines whether a given String is a valid QName + Tries to create an ARQ Function expression if the function Uri correseponds to a supported ARQ Function - String to test - Turtle Syntax - + Function Uri + Function Arguments + Scalar Arguments + Generated Expression + Whether an expression was successfully generated - + - Unescapes local name escapes in a QName + Gets the Extension Function URIs supported by this Factory - QName - Unescaped QName - + - Determines whether a given String should be serialized as a Long Literal + Gets the Extension Aggregate URIs supported by this Factory - String to test - - + - Infers the Type of a Plain Literal + Abstract base class for Unary Expressions - Plain Literal to infer the Type of - Turtle Syntax - A Uri representing the XML Scheme Data Type for the Plain Literal - + - Gets whether a character matches the PN_CHARS_BASE production from the Turtle specifications + The sub-expression of this Expression - Character - - + - Gets whether a surrogate pair matches the PN_CHARS_BASE production from the Turtle specifications + Creates a new Base Unary Expression - High surrogate - Low surrogate - + Expression - + - Gets whether a character matches the PN_CHARS production from the Turtle specification + Evaluates the expression - Character + Evaluuation Context + Binding ID - + - Gets whether a surrogate pair matches the PN_CHARS production from the Turtle specification + Gets the String representation of the Expression - High surrogate - Low surrogate - + - Gets whether a character matches the PN_CHARS_U production from the Turtle specification + Gets an enumeration of all the Variables used in this expression - Character - - + - Gets whether a surrogate pair matches the PN_CHARS_U production from the Turtle specification + Gets the Type of the Expression - High surrogate - Low surrogate - - + - Gets whether a character matches the nameStartChar production from the Turtle specification + Gets the Functor of the Expression - Character - - + - Gets whether a surrogate pair matches the nameStartChar production from the Turtle specification + Gets the Arguments of the Expression - High surrogate - Low surrogate - - + - Gets whether a character matches the nameChar production from the Turtle specification + Gets whether an expression can safely be evaluated in parallel - Character - - + - Gets whether a surrogate pair matches the nameChar production from the Turtle specification + Transforms the arguments of the expression using the given transformer - High surrogate - Low surrogate + Expression Transformer - + - Abstract Base Class for IEvent implementations + Abstract base class for Binary Expressions - + - Creates a new Event + The sub-expressions of this Expression - Event Type - Position Information - + - Creates a new Event + The sub-expressions of this Expression - Event Type - + - Gets the Type for this Event + Creates a new Base Binary Expression + Left Expression + Right Expression - + - Gets the Position Information (if any) + Evaluates the expression - - Availability of Position Information depends on the how the source document was parsed - + Evaluation Context + Binding ID + - + - Abstract Base Class for IRdfXmlEvent implementations + Gets the String representation of the Expression + - + - Creates an Event and fills in its Values + Gets an enumeration of all the Variables used in this expression - Type of the Event - Source XML that generated the Event - Position of the XML Event - + - Creates an Event and fills in its Values + Gets the Type of the Expression - Type of the Event - Source XML that generated the Event - + - Gets the XML that this Event was generated from + Gets the Functor of the Expression - + - Abstract Base Class for IRdfAEvent implementations + Gets the Arguments of the Expression - + - Creates a new RDFa Event + Gets whether an expression can safely be evaluated in parallel - Event Type - Position Info - Attributes - + - Gets the attributes of the event i.e. the attributes of the source element + Transforms the arguments of the expression using the given transformer + Expression Transformer + - + - Gets whether the Event has a given attribute + An Expression Transformer is a class that can traverse a SPARQL Expression tree and apply transformations to it - Attribute Name - - + - Gets the value of a specific attribute + Transforms the expression using this transformer - Attribute Name + Expression - + - Interface for RDF/XML events as defined by the RDF/XML Specification + Abstract implementation of an Expression Transformer which substitutes primary expressions - Used to provide polymorphism for the XML/RDF Parser - + - Gets the XML that produced the given event (if any) + Transforms an expression into a form where primary expressions may be substituted + Expression + - + - Namespace for Event classes which are used to support Event Based parsing of RDF syntaxes + Returns the substitution for a given primary expression + Expression + - + - Static Class which defines the Event Types for RDF/XML Events + Interface for implementing SPARQL custom expression factories which turn URI specified functions into SPARQL Expressions - + - Constants for Event Types + Tries to Create a SPARQL Expression for a function with the given URI and set of arguments + URI of the function + List of Arguments + Dictionary of Scalar Arguments which are supportable by aggregates when Syntax is set to SPARQL 1.1 Extended + Resulting Expression if able to generate + True if an expression is generated, false if not - + - Constants for Event Types + Gets the Extension Function URIs that this Factory provides - + - Constants for Event Types + Gets the Extension Aggregate URIs that this Factory provides - + - Constants for Event Types + Numeric Types for Sparql Numeric Expressions + All Numeric expressions in Sparql are typed as Integer/Decimal/Double - + - Constants for Event Types + Not a Number - + - Constants for Event Types + An Integer - + - Constants for Event Types + A Decimal - + - Constants for Event Types + A Single precision Floating Point - + - Constants for Event Types + A Double precision Floating Point - + - Constants for Event Types + SPARQL Expression Types - + - Constants for Event Types + The Expression is a Primary Expression which is a leaf in the expression tree - + - Constants for Event Types + The Expression is a Unary Operator which has a single argument - + - Constants for Event Types + The Expression is a Binary Operator which has two arguments - + - Constants for Event Types + The Expression is a Function which has zero/more arguments - + - Constants for Event Types + The Expression is an Aggregate Function which has one/more arguments - + - Event representing the Root Node of the Document + The Expression is a Set Operator where the first argument forms the LHS and all remaining arguments form a set on the RHS - + - Creates a new Root Event + The Expression is a Unary Operator that applies to a Graph Pattern - Base Uri of the Document - Source XML of the Document - Position Info - + - Creates a new Root Event + Interface for SPARQL Expression Terms that can be used in Expression Trees while evaluating Sparql Queries - Base Uri of the Document - Source XML of the Document - + - Gets/Sets the ElementEvent that represents the actual DocumentElement + Evalutes a SPARQL Expression for the given binding in a given context + Evaluation Context + Binding ID + + + + Newly introduced in Version 0.6.0 to replace the variety of functions that were used previously for numeric vs non-numeric versions to allow our code to be simplified and improve performance + + - + - Gets all the Child ElementEvents of the Document Root + Gets an enumeration of all the Variables used in an expression - + - Gets the Base Uri of the Node + Gets the SPARQL Expression Type - + - Gets the Language of the Node + Gets the Function Name or Operator Symbol - function names may be URIs of Keywords or the empty string in the case of primary expressions - + - Event representing a Node from the XML Document + Gets the Arguments of this Expression - + - Creates a new Element Event + Transforms the arguments of the expression using the given transformer - QName of the XML Node - Base Uri of the XML Node - Source XML of the XML Node - Position Info + Expression Transformer + - + - Creates a new Element Event + Gets whether an expression can safely be evaluated in parallel - QName of the XML Node - Base Uri of the XML Node - Source XML of the XML Node - + - Creates new Element Event + Expression Factory which generates Leviathan Function expressions - Local Name of the XML Node - Namespace Prefix of the XML Node - Base Uri of the XML Node - Source XML of the XML Node - Position Info - + - Creates new Element Event + Leviathan Function Namespace - Local Name of the XML Node - Namespace Prefix of the XML Node - Base Uri of the XML Node - Source XML of the XML Node - + - Gets the Local Name of this Element Event + Constants for Leviathan String Functions - + - Gets the Namespace of this Element Event + Constants for Leviathan String Functions - + - Gets the QName of this Element Event + Constants for Leviathan Numeric Functions - + - Gets the Child Element Events + Constants for Leviathan Numeric Functions - These correspond to the Child Nodes of the XML Node - + - Gets/Sets the Base Uri of the XML Node + Constants for Leviathan Numeric Functions - + - Gets the Attribute Events + Constants for Leviathan Numeric Functions - These correspond to the Attributes of the XML Node (with some exceptions as defined in the RDF/XML specification) - + - Gets the Namespace Attribute Events + Constants for Leviathan Numeric Functions - - These correspond to all the Namespace Attributes of the XML Node - - + - Gets/Sets the List Counter + Constants for Leviathan Numeric Functions - + - Gets/Sets the Language of this Event + Constants for Leviathan Numeric Functions - + - Gets/Sets the Subject Event of this Event + Constants for Leviathan Numeric Functions - Will be assigned according to the Parsing rules during the Parsing process and later used to generate a Subject Node - + - Gets/Sets the Subject Node of this Event + Constants for Leviathan Numeric Functions - Will be created from the Subject at some point during the Parsing process - + - Gets/Sets the Parse Type for this Event + Constants for Leviathan Numeric Functions - + - Method which sets the Uri for this Element Event + Constants for Leviathan Numeric Functions - Uri Reference to set Uri from - This can only be used on Elements which are rdf:li and thus need expanding into actual list elements according to List Expansion rules. Attempting to set the Uri on any other Element Event will cause an Error message. - + - Gets the String representation of the Event + Constants for Leviathan Numeric Functions - - + - An Event for representing the End of Elements + Constants for Leviathan Numeric Functions - + - Creates a new EndElementEvent + Constants for Leviathan Numeric Functions - + - Creates a new EndElementEvent + Constants for Leviathan Numeric Functions - + - An Event for representing Attributes of XML Node + Constants for Leviathan Numeric Functions - + - Creates a new Attribute Event from an XML Attribute + Constants for Leviathan Numeric Functions - QName of the Attribute - Value of the Attribute - Source XML of the Attribute - Position Info - + - Creates a new Attribute Event from an XML Attribute + Constants for Leviathan Numeric Functions + + + + + Constants for Leviathan Numeric Functions - QName of the Attribute - Value of the Attribute - Source XML of the Attribute - + - Creates a new Attribute Event from an XML Attribute + Constants for Leviathan Numeric Functions - Local Name of the Attribute - Namespace Prefix of the Attribute - Value of the Attribute - Source XML of the Attribute - Position Info - + - Creates a new Attribute Event from an XML Attribute + Constants for Leviathan Numeric Functions - Local Name of the Attribute - Namespace Prefix of the Attribute - Value of the Attribute - Source XML of the Attribute - + - Gets the Local Name of the Attribute + Constants for Leviathan Numeric Functions - + - Gets the Namespace Prefix of the Attribute + Constants for Leviathan Numeric Functions - + - Gets the QName of the Attribute + Constants for Leviathan Numeric Functions - + - Gets the Value of the Attribute + Constants for Leviathan Numeric Functions - + - An Event for representing Namespace Attributes of an XML Node + Constants for Leviathan Numeric Functions - + - Creates a new Namespace Attribute Event + Constants for Leviathan Numeric Functions - Namespace Prefix - Namespace Uri - Source XML - Position Info - + - Creates a new Namespace Attribute Event + Constants for Leviathan Numeric Functions - Namespace Prefix - Namespace Uri - Source XML - + - Gets the Namespace Prefix + Constants for Leviathan Boolean Aggregates - + - Gets the Namespace Uri + Constants for Leviathan Boolean Aggregates - + - An Event for representing Language Attributes of an XML Node + Constants for Leviathan Boolean Aggregates - + - Creates a new Language Attribute Event + Constants for Leviathan Numeric Aggregates - Language - Source XML - Position Info - + - Creates a new Language Attribute Event + Constants for Leviathan Numeric Aggregates - Language - Source XML - + - Gets the Language + Constants for other Leviathan Aggregate - + - An Event for representing rdf:parseType Attributes of an XML Node + Constants for other Leviathan Aggregate - + - Creates a new Parse Type Attribute Event + Array of Extension Function URIs - Parse Type - Source XML - Position Info - + - Creates a new Parse Type Attribute Event + Array of Extension Aggregate URIs - Parse Type - Source XML - + - Gets the Parse Type + Tries to create an Leviathan Function expression if the function Uri correseponds to a supported Leviathan Function + Function Uri + Function Arguments + Scalar Arguments + Generated Expression + Whether an expression was successfully generated - + - An Event for representing xml:base attributes of XML Nodes + Gets the Extension Function URIs supported by this Factory - + - Creates a new XML Base Attribute + Gets the Extension Aggregate URIs supported by this Factory - Base URI - Source XML - Position Info - + - Creates a new XML Base Attribute + Expression Factory which generates SPARQL Function expressions - Base URI - Source XML + + This supports the requirement of SPARQL 1.1 that all functions can be accessed via URI as well as by keyword. This also means that SPARQL 1.1 functions can be used in SPARQL 1.0 mode by using their URIs instead of their keywords and they are then treated simply as extension functions + - + - Gets the Base URI + Namespace Uri for SPARQL Built In Functions Namespace - + - Event for representing plain text content (XML Text Nodes) + Tries to create a SPARQL Function expression if the function Uri correseponds to a supported SPARQL Function + Function Uri + Function Arguments + Scalar Arguments + Generated Expression + Whether an expression was successfully generated - + - Creates a new Text Node + Gets the URIs of available SPARQL Functions - Textual Content of the XML Text Node - Source XML of the Node - Position Info - + - Creates a new Text Node + Gets the URIs of available SPARQL Aggregates - Textual Content of the XML Text Node - Source XML of the Node - + - Gets the Textual Content of the Event + Factory Class for generating Expressions for Sparql Extension Functions + + + Allows for users of the Library to implement and register Custom Expression Factories which can generate Expressions for their own Extension functions which they wish to use in their SPARQL queries. Custom factories may be globally scoped by registering them with the AddCustomFactory() method or locally by passing them to the three argument constructor of the CreateExpression() method. + + - + - Gets the String representation of the Event + List of Custom Expression factories - + + All the standard function libraries (XPath, Leviathan and ARQ) included in dotNetRDF are automatically registered + - + - Event for representing URIRefs + Tries to create an Expression from the given function Uri and list of argument expressions + Function Uri + List of Argument Expressions + + + + Uses only the globally scoped custom expression factories + + - + - Creates a new URIRef Event from a URIRef in an XML Attribute value or similar + Tries to create an Expression from the given function Uri and list of argument expressions - URIRef - Source XML of the URIRef - Position Info + Function Uri + List of Argument Expressions + Enumeration of locally scoped expression factories to use + + + + Globally scoped custom expression factories are tried first and then any locally scoped expression factories are used + + - + - Creates a new URIRef Event from a URIRef in an XML Attribute value or similar + Tries to create an Expression from the given function Uri and list of argument expressions - URIRef - Source XML of the URIRef + Function Uri + List of Argument Expressions + Scalar Arguments + Enumeration of locally scoped expression factories to use + + + + Globally scoped custom expression factories are tried first and then any locally scoped expression factories are used + + - + - Gets the URIRef + Registers a Custom Expression Factory + A Custom Expression Factory - + - Event for representing QNames + Gets the Global Custom Expression Factories that are in use - + - Creates a new QName Event + Expression Factory which generates XPath Function expressions - QName - Source XML of the QName - Position Info - + - Creates a new QName Event + Namespace Uri for XPath Functions Namespace - QName - Source XML of the QName - + - Gets the QName + Constant representing the XPath boolean functions - + - Event for representing the need for a Blank Node + Constant representing the XPath boolean functions - + - Creates a new Blank Node ID Event for a named Blank Node + Constants representing the names of XPath String functions - Node ID for the Blank Node - Source XML - Position Info - + - Creates a new Blank Node ID Event for a named Blank Node + Constants representing the names of XPath String functions - Node ID for the Blank Node - Source XML - + - Creates a new Blank Node ID Event for an anonymous Blank Node + Constants representing the names of XPath String functions - Source XML - Position Info - + - Creates a new Blank Node ID Event for an anonymous Blank Node + Constants representing the names of XPath String functions - Source XML - + - Gets the Blank Node ID (if any) + Constants representing the names of XPath String functions - + - An Event for representing Plain Literals + Constants representing the names of XPath String functions - + - Creates a new Plain Literal Event + Constants representing the names of XPath String functions - Value of the Literal - Language Specifier of the Literal - Source XML of the Event - Position Info - + - Creates a new Plain Literal Event + Constants representing the names of XPath String functions - Value of the Literal - Language Specifier of the Literal - Source XML of the Event - + - Gets the Value of the Plain Literal + Constants representing the names of XPath String functions - + - Gets the Langugage Specifier of the Plain Literal + Constants representing the names of XPath String functions - + - An Event for representing Typed Literals + Constants representing the names of XPath String functions - + - Creates a new Typed Literal Event + Constants representing the names of XPath String functions - Value of the Literal - DataType Uri of the Literal - Source XML of the Event - Position Info - + - Creates a new Typed Literal Event + Constants representing the names of XPath String functions - Value of the Literal - DataType Uri of the Literal - Source XML of the Event - + - Gets the Value of the Typed Literal + Constants representing the names of XPath String functions - + - Gets the DataType of the Typed Literal + Constants representing the names of XPath String functions - + - Gets the String representation of the Event + Constants representing the names of XPath String functions - - + - An Event for representing that the Event Queue should be cleared of previously queued events + Constants representing the names of XPath String functions - + - Creates a new Clear Queue Event + Constants representing the names of XPath String functions - + - A DOM Based event generator for RDF/XML parser that uses System.Xml DOM to parse events + Constants representing the names of XPath String functions - + - Creates a new DOM Based event generator + Constants representing the XPath Boolean constructor functions - XML Document - + - Creates a new DOM Based event generator + Constants representing the XPath Boolean constructor functions - Input Stream - + - Creates a new DOM Based event generator + Constants representing the XPath Numeric Functions - Input File - + - Gets all events from the XML DOM + Constants representing the XPath Numeric Functions - Parser Context - + - Given an XML Node that is the Root of the RDF/XML section of the Document Tree creates the RootEvent and generates the rest of the Event Tree by recursive calls to the GenerateEvents method + Constants representing the XPath Numeric Functions - Parser Context - XML Node that is the Root of the RDF/XML section of the Document Tree - - + - Given an XML Node creates the relevant RDF/XML Events for it and recurses as necessary + Constants representing the XPath Numeric Functions - Parser Context - The Node to create Event(s) from - The Parent Node of the given Node - - + - Checks whether a given XML Node can be discarded as it does not have any equivalent Event in the RDF/XML Syntax model + Constants representing the XPath Numeric Functions - XML Node to test - True if the Node can be ignored - Comment and Text Nodes are ignored. Text Nodes will actually become Text Events but we'll access the Text using the InnerText property of the Element Nodes instead - + - Takes the Event Tree and Flattens it into a Queue as per the rules laid out in the RDF/XML Specification + Constants representing the XPath Date Time functions - Parser Context - Event which is the Root of the Tree (not necessarily a RootEvent) - A numeric value used for Parser Tracing to indicate nesting levels of the Event Tree - + - A JIT event generator for RDF/XML parsing that uses Streaming parsing to parse the events + Constants representing the XPath Date Time functions - - Currently unimplemented stub class - - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Stream - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Stream - Base URI - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Text Reader - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Text Reader - Base URI - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Filename - + - Creates a new Streaming Event Generator + Constants representing the XPath Date Time functions - Filename - Base URI - + - Initialises the XML Reader settings + Constants representing the Normalization Form values supported by the XPath normalize-unicode() function - - + - Gets the next event from the XML stream + Constants representing the Normalization Form values supported by the XPath normalize-unicode() function - - + - Gets whether the event generator has finished generating events + Constants representing the Normalization Form values supported by the XPath normalize-unicode() function - + - Represents a Queue of events for use by event based parsers + Constants representing the Normalization Form values supported by the XPath normalize-unicode() function - + - Queue of Events + Constants representing the Normalization Form values supported by the XPath normalize-unicode() function - + - Creates a new Event Queue + Argument Type Validator for validating that a Literal either has no datatype or is a String - + - Creates a new Event Queue with the given Event Generator + Argument Type Validator for validating that a Literal has an Integer datatype - Event Generator - + - Dequeues and returns the next event in the Queue + Argument Type Validator for validating that a Literal has a Numeric datatype - - + - Adds an event to the end of the Queue + Tries to create an XPath Function expression if the function Uri correseponds to a supported XPath Function - Event + Function Uri + Function Arguments + Scalar Arguments + Generated Expression + Whether an expression was successfully generated - + - Peeks and returns the next event in the Queue + Gets the Extension Function URIs supported by this Factory - - + - Clears the Queue + Gets the Extension Aggregate URIs supported by this Factory - + - Gets the number of events currently in the Queue + + Namespace containing classes pertaining to the filtering of the results of SPARQL queries + - + - Represents a Queue of events which are streamed from an instance of a IJitEventGenerator for use by an event based parser + Interface for Classes which implement SPARQL Filter Functions - + - Creates a new Streaming Event Queue + Evaluates a Filter in the given Evaluation Context - Event Generator + Evaluation Context - + - Gets the Count of events in the queue + Gets the enumeration of Variables that are used in the Filter - + - Adds an event to the Queue + Gets the Expression that this Filter uses - Event - + - Gets the next event from the Queue and removes it from the Queue + Abstract Base class for Unary Filters that operate on a single Expression - - + - Gets the next event from the Queue while leaving the Queue unchanged + Expression which is the Argument to the Filter - - + - Interface for parser events + Creates a new Base Unary Filter + Argument to the Filter - + - Gives some Integer representing the Event Type as understood by a specific Parser implementation + Evaluates a filter in the given Evaluation Context + Evaluation Context - + - Gets the Position Information (if any) + Gets the String representation of the Filter - - Availability of Position Information depends on the how the source document was parsed - + - + - Interface for Event Generators + Gets the enumeration of Variables used in the Filter - - - An Event Generator is a class which takes an input stream which contains XML and generates a series of events from it - - - This interface is a marker interface which indicates that the class is an event generator, implementations should implement one of the concrete derived interfaces as appropriate to their mode of operation. - - - + - Interface for pre-processing event generators + Gets the inner expression this Filter uses - Event Type - Event Parser Context Type - + - Gets all available events + Filter that represents the Sparql BOUND() function - Context - + - Interface for event generators which generate all RDF/XML events in one go prior to parsing taking place + Creates a new Bound Filter + Variable Expression - + - Interface for Just-in-time event generators + Evaluates a filter in the given Evaluation Context - Event Type + Evaluation Context - + - Gets the next available event + Gets the String representation of the Filter - + - Gets whether the Event Generator has finished reading events i.e. there are no further events available + Generic Filter for Filters which take a single sub-expression as an argument - + - Interface for RDF/XML event generators which generate events as required during the parsing process + Creates a new Unary Expression Filter which filters on the basis of a single sub-expression + Expression to filter with - + - Interface for implementing Event Queues which provide Bufferable wrappers to Event Generators + Evaluates a filter in the given Evaluation Context + Evaluation Context - + - Removes the first Event from the Queue + Gets the String representation of the Filter - + - Adds an Event to the end of the Queue + Generic Filter for use where multiple Filters are applied on a single Graph Pattern - Event to add - + - Gets the first Event from the Queue without removing it + Creates a new Chain Filter - + First Filter + Second Filter - + - Gets the Event Generator that this Queue uses + Creates a new Chain Filter + Filters - + - Clears the Event Queue + Creates a new Chain Filter + First Filter + Additional Filters - + - Gets the number of Events in the Queue + Evaluates a filter in the given Evaluation Context + Evaluation Context - + - Gets/Sets whether Generator Tracing should be used + Adds an additional Filter to the Filter Chain + A Filter to add - + - Gets the Event Type of the last Event dequeued + Gets the String representation of the Filters + - + - Abstract base implementation of an Event Queue + Gets the enumeration of Variables used in the chained Filters - + - Generator used to fill the Event Queue + Gets the Inner Expression used by the Chained Filters + + Equivalent to ANDing all the Chained Filters expressions + - + - Variable indicating whether Generator Tracing is enabled + + Namespace containing classes used to apply GROUP BY clauses to SPARQL queries + - + - Type of Last Event dequeued + Interface for Classes that represent SPARQL GROUP BY clauses - + - Dequeues an Event from the Queue + Applies the Grouping to a Result Binder + Evaluation Context - - - Adds an Event to the Queue - - Event - - + - Gets the next Event from the Queue without removing it from the queue + Applies the Grouping to a Result Binder subdividing the Groups from the previous Group By clause into further Groups + Evaluation Context + Groups - + - Gets the Event Generator used by the Queue + Gets/Sets the child GROUP BY Clause - + - Clears the Event Queue + Gets the Variables used in the GROUP BY - + - Gets the number of Events in the Queue + Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables - + - Gets/Sets Tracing for the Event Queue + Gets the Expression used to GROUP BY - + - Gets the Event Type of the last Event dequeued + Gets/Sets the Variable the value of the GROUP BY expression should be bound to (may be null if not bound to anything) - + - Interface for RDFa events + Abstract Base Class for classes representing Sparql GROUP BY clauses - + - Gets the attributes of the event i.e. the attributes of the source element + Child Grouping - + - Gets whether the Event has a given attribute + Gets/Sets the Child GROUP BY Clause - Attribute Name - - + - Gets the value of a specific attribute + Applies the Grouping to the Binder - Attribute Name + Evaluation Context - + - Available Query Syntaxes + Applies the Grouping to the Binder subdividing Groups from a previous Grouping + Evaluation Context + Groups to subdivide + - + - Use SPARQL 1.0 + Gets the Variables involved in this Group By - + - Use SPARQL 1.1 + Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables - + - Use the latest SPARQL specification supported by the library (currently SPARQL 1.1) with some extensions + Gets the Expression used to GROUP BY - - - Extensions include the following: - -
    -
  • LET assignments (we recommend using the SPARQL 1.1 standards BIND instead)
  • -
  • Additional aggregates - NMAX, NMIN, MEDIAN and MODE (we recommend using the Leviathan Function Library URIs for these instead to make them usable in SPARQL 1.1 mode)
  • -
  • UNSAID alias for NOT EXISTS (we recommend using the SPARQL 1.1 standard NOT EXISTS instead
  • -
  • EXISTS and NOT EXISTS are permitted as Graph Patterns (only allowed in FILTERs in SPARQL 1.1)
  • -
-
- + - Class for parsing SPARQL Queries into SparqlQuery objects that can be used to query a Graph or Triple Store + Gets/Sets the Variable that the grouped upon value should be assigned to - + - Creates a new instance of the SPARQL Query Parser + Represents a Grouping on a given Variable - + - Creates a new instance of the SPARQL Query Parser which supports the given SPARQL Syntax + Creates a new Group By which groups by a given Variable - SPARQL Syntax + Variable Name - + - Creates a new instance of the SPARQL Query Parser using the given Tokeniser Queue Mode + Creates a new Group By which groups by a given Variable and assigns to another variable - Token Queue Mode + Variable Name + Assign Variable - + - Creates a new instance of the SPARQL Query Parser using the given Tokeniser which supports the given SPARQL Syntax + Applies a Grouping on a given Variable to the Binder - Token Queue Mode - SPARQL Syntax + Evaluation Context + - + - Gets/Sets whether Tokeniser progress is Traced to the Console + Applies a Grouping on a given Variable to the Binder Groups from a previous Grouping + Evaluation Context + Binder Group to subgroup + - + - Gets/Sets the Default Base URI for Queries parsed by this Parser instance + Gets the Variables used in the GROUP BY - + - Gets/Sets the Syntax that should be supported + Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables - + - Gets/Sets the locally scoped custom expression factories + Gets the Variable Expression Term used by this GROUP BY - + - Gets/Sets the locally scoped Query Optimiser applied to queries at the end of the parsing process + Gets the String representation of the GROUP BY - - - May be null if no locally scoped optimiser is set in which case the globally scoped optimiser will be used - - + - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Query being parsed is detected + Represents a Grouping on a given Expression - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Query being parsed is detected + Creates a new Group By which groups by a given Expression + Expression - + - Parses a SPARQL Query from a File + Applies a Grouping on a given Expression to the Binder - File containing the Query + Evaluation Context - + - Parses a SPARQL Query from an arbitrary Input Stream + Applies a Grouping on a given Variable to the Binder Groups from a previous Grouping - Input Stream + Evaluation Context + Binder Group to subgroup - + - Parses a SPARQL Query from an arbitrary Input + Gets the Fixed Variables used in the Grouping - Input - - + - Parses a SPARQL Query from a String + Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables - A SPARQL Query - - + - Parses a SPARQL Query from a SPARQL Parameterized String + Gets the Expression used to GROUP BY - A SPARQL Parameterized String - - - The SparqlParameterizedString class allows you to use parameters in a String in a manner similar to SQL Commands in the ADO.Net model. See the documentation for SparqlParameterizedString for details of this. - - + - Tries to parse a Graph Pattern from the given Parser Context + Gets the String representation of the GROUP BY - Parser Context - Whether the opening Left Curly Bracket is required - + - Interface for Parsers that support Tokeniser Tracing + + Namespace which provides classes which represent the implementation of various operators in SPARQL. This allows for some of the basic operators like + and - to be extended to allow functionality beyond the SPARQL specification such as date time arithmetic. + - + - Gets/Sets whether Tokeniser Tracing is used + + Namespace which provides implementations of which allow for embedding date time arithmetic into SPARQL queries + - + - Interface for Parsers that support Parser Tracing + Abstract base operator for date time operations - + - Gets/Sets whether Parser Tracing is used + Gets whether the arguments are applicable for this operator + Arguments + - + - Interface for parsers that use token based parsing + Abstract base operator for time span operations - + - Gets/Sets the token queue mode used + Gets whether the operator is applicable for the arguments + Arguments + - + + Represents the date time addition operator + + - Namespace for Parsing classes and variety of supporting Classes. - - - Classes here are primarily implementations of IRdfReader with some implementations of IStoreReader and a few other specialised classes. - - - Has child namespaces Events and Tokens for supporting Event and Token based Parsing. + Allows for queries to add durations to date times + + + + + Gets the operator type - + - - Namespace for Validator classes that can be used to validate various forms of syntax - + Applies the operator + Arguments + - + - Interface for classes which can validate Syntax + Represents the date time subtraction operation + + Allows queries to subtract a duration from a date time + - + - Validates the given Data + Gets the operator type - Data - - + - Interface for Validation Results + Applies the operator + Arguments + - + - Gets whether the Syntax was valid + Represents the time span addition operation + + Allows queries to add time spans together + - + - Gets an informational message about the validity/invalidity of the Syntax + Gets the operator type - + - Gets an enumeration of any warning messages + Applies the operator + Arguments + - + - Gets any validation error + Represents the time span subtraction operator + + Allows queries to subtract time spans from each other + - + - Gets any result object that was parsed from the syntax + Gets the operator type - + - Syntax Validator for RDF Dataset Formats + Applies the operator + Arguments + - + - Creates a new RDF Dataset Syntax Validator + + Namespace which provides implementations of which provide the default numeric implementations of operators as required by the SPARQL specification + - Dataset Parser - + - Determines whether the data provided is valid syntax + Represents the numeric addition operator - Data - - + - Syntax Validator for validating RDF Graph syntaxes + Gets the operator type - + - Parser to use + Applies the operator + Arguments + - + - Creates a new RDF Syntax Validator using the given Parser + Abstract base class for numeric operators - Parser - + - Validates the given data to see if it is valid RDF Syntax + Operator is applicable if at least one input and all inputs are numeric - Data + Inputs - + - Syntax Validator for RDF Graph syntaxes which is strict (any warnings are treated as errors) + Represents the numeric division operator - + - Creates a new Strict RDF Syntax Validator + Gets the operator type - Parser - + - Validates the data to see if it is valid RDF syntax which does not produce any warnings + Applies the operator - Data + Arguments - + - Syntax Validator for SPARQL Queries + Represents the numeric multiplication operator - + - Creates a new SPARQL Query Validator + Gets the operator type + + + + + Applies the operator + Arguments + - + - Creates a new SPARQL Query Validator using the given Syntax + Represents the numeric subtraction operator - Query Syntax - + - Creates a new SPARQL Query Validator using the given Query Parser + Gets the operator type - Query Parser - + - Validates whether the given Data is a valid SPARQL Query + Applies the operator - Data + Arguments - + - Syntax Validator which validates SPARQL Results formats + Abstract base class for SPARQL Operators which also makes their configuration serializable - + - Creates a new SPARQL Results Format validator that uses the given parser + Gets the operator this implementation represents - SPARQL Results Parser - + - Validates the syntax to see if it is valid SPARQL Results + Gets whether the operator can be applied to the given inputs - Data to validate - + Inputs + True if applicable to the given inputs - + - A Syntax Validator for validating SPARQL Update Commands + Applies the operator + Inputs + - + - Validates whether the given data is a SPARQL Update Command + Serializes the configuration of the operator - Data - + Serialization Context - + - Represents Syntax Validation Results + Interface which represents an operator in SPARQL e.g. + - + - Creates new Syntax Validation Results + Gets the Operator this is an implementation of - Whether the Syntax was valid - Validation Message - + - Creates new Syntax Validation Results + Gets whether the operator can be applied to the given inputs - Whether the Syntax was valid - Validation Message - Results Object + Inputs + True if applicable to the given inputs - + - Creates new Syntax Validation Results + Applies the operator to the given inputs - Whether the Syntax was valid - Validation Message - Results Object - Enumeration of Warnings + Inputs + + Thrown if an error occurs in applying the operator - + - Creates new Syntax Validation Results + Registry of SPARQL Operators - Whether the Syntax was valid - Validation Message - Results Object - Enumeration of Warnings - Error that occurred - + - Creates new Syntax Validation Results + Initializes the Operators registry - Whether the Syntax was valid - Validation Message - Error that occurred - + - Creates new Syntax Validation Results + Registers a new operator - Validation Message - Error that occurred + Operator - + - Whether the Syntax was valid + Removes the registration of an operator by instance reference + Operator Reference - + - Gets the Validation Message + Removes the registration of an operator by instance type of the operator + Operator - + - Gets the Warnings that were produced + Resets Operator registry to default state - + - Gets the Error that occurred + Returns whether the given operator is registered + Operator + + + Checking is done both by reference and instance type so you can check if an operator is registered even if you don't have the actual reference to the instance that registered + - + - Gets the Result Object that was produced + Gets all registered Operators + - + - Possible NTriples syntax modes + Gets all registered operators for the given Operator Type + Operator Type + - + - The original NTriples syntax as specified in the original RDF specification test cases specification + Tries to return the operator which applies for the given inputs + Operator Type + Operator + Inputs + - + - Standardized NTriples as specified in the RDF 1.1 NTriples specification + Possible SPARQL operand types - + - Parser for NTriples syntax + Addition - Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Creates a new instance of the parser + Subtraction - + - Creates a new instance of the parser + Multiplication - NTriples syntax to parse - + - Creates a new instance of the parser using the given token queue mode + Division - Token Queue Mode - + - Creates a new instance of the parser using the given syntax and token queue mode + + Namespace containing classes that are used in the Optimisation of SPARQL Queries. Includes the interfaces IQueryOptimiser and IAlgebraOptimiser which can be used to implement custom query optimisation. + - - Token Queue Mode - NTriples syntax to parse - + - Controls whether Tokeniser progress will be traced by writing output to the Console + Abstract Base Class for Algebra Transformers where the Transformer may care about the depth of the Algebra in the Algebra Tree - + - Controls whether Parser progress will be traced by writing output to the Console + Attempts to optimise an Algebra to another more optimal form + Algebra + - + - Gets/Sets the token queue mode used + Transforms the Algebra to another form tracking the depth in the Algebra tree + Algebra + Depth + - + - Gets/Sets the desired NTriples syntax + Determines whether the Optimiser can be applied to a given Query + Query + - + - Parses NTriples Syntax from the given Input Stream into Triples in the given Graph + Determines whether the Optimiser can be applied to a given Update Command Set - Graph to create Triples in - Arbitrary Input Stream to read input from + Command Set + - + - Parses NTriples Syntax from the given Input into Triples in the given Graph + A basic abstract implementation of a Query Optimiser - Graph to create Triples in - Arbitrary Input to read input from + + + Derived implementations may use override the virtual properties to control what forms of optimisation are used. Derived implementations must override the GetRankingComparer() method, optimisers which do not wish to change the order of Triple Patterns should return the NoReorderCompaper in their implementation as a basic sort of Triple Patterns is done even if ShouldReorder is overridden to return false + + - + - Parses NTriples Syntax from the given File into Triples in the given Graph + Causes the Graph Pattern to be optimised if it isn't already - Graph to create Triples in - Name of the file containing Turtle Syntax - Simply opens an StreamReader and uses the overloaded version of this function + Graph Pattern + Variables that have occurred prior to this Pattern - + - Parses NTriples Syntax from the given Input Stream using a RDF Handler + Gets a comparer on Triple Patterns that is used to rank Triple Patterns - RDF Handler to use - Input Stream to read input from + + + By overriding this in derived classes you can change how the Optimiser weights different patterns and thus the resultant ordering of Triple Patterns + - + - Parses NTriples Syntax from the given Input using a RDF Handler + Controls whether the Optimiser will attempt to reorder Triple Patterns - RDF Handler to use - Input to read input from + + It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. + - + - Parses NTriples Syntax from the given file using a RDF Handler + Controls whether the Optimiser will split Filters - RDF Handler to use - File to read from + + + If a Filter contains an and expression it may be split into its constituent parts and applied separately. This option only applies if filter placement also applies. + + + Defaults to false since it is unclear if this actually benefits performance + + - + - Tries to parse a URI + Controls whether the Optimiser will place Filters - Context - URI - URI Node if parsed successfully + + It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. + - + - Helper method for raising informative standardised Parser Errors + Controls whether the Optimiser will place Assignments - The Error Message - The Token that is the cause of the Error - + + It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. + - + - Internal Helper method which raises the Warning event if an event handler is registered to it + Tries to reorder patterns when the initial ordering is considered poor - Warning Message + Graph Pattern + Variables that are desired + Point at which to start looking for better matches + Point at which to move the better match to - + - Event which is raised when there is a non-fatal issue with the NTriples being parsed + Tries to place filters at the earliest point possible i.e. the first point after which all required variables have occurred + Graph Pattern + Filter to place + - + - Gets the String representation of the Parser which is a description of the syntax it parses + Tries to place assignments at the earliest point possible i.e. the first point after which all required variables have occurred + Graph Pattern + Assignment (LET/BIND) - + - Helper Class which defines some Test Functions for testing the Unicode Category of Characters + An Algebra Optimiser that optimises Algebra to use LazyBgp's wherever possible - + - Start of high surrogate range + Optimises an Algebra to a form that uses LazyBgp where possible + Algebra + Depth + + + + By transforming a query to use LazyBgp we can achieve much more efficient processing of some forms of queries + + - + - End of high surrogate range + Determines whether the query can be optimised for lazy evaluation + Query + - + - Start of low surrogate range + Returns that the optimiser does not apply to SPARQL Updates + Updates + - + - End of low surrogate range + An Algebra Optimiser that optimises Algebra to use AskBgp's wherever possible - + - Checks whether a given Character is considered a Letter + Optimises an Algebra to a form that uses AskBgp where possible - Character to Test + Algebra + Depth + + + By transforming a query to use AskBgp we can achieve much more efficient processing of some forms of queries + + - + - Checks whether a given Character is considered a Letter or Digit + Determines whether the query can be optimised for ASK evaluation - Character to Test + Query - + - Checks whether a given Character is considered a Letter Modifier + Returns that the optimiser does not apply to SPARQL Updates - Character to Test + Updates - + - Checks whether a given Character is considered a Digit + An Algebra Optimiser which implements the Filtered Product optimisation - Character to Test + + + A filtered product is implied by any query where there is a product over a join or within a BGP around which there is a Filter which contains variables from both sides of the product. So rather than computing the entire product and then applying the filter we want to push filter application into the product computation. + + + + + + Optimises the Algebra to use implict joins where applicable + + Algebra - + - Indicates whether the specified object is a high surrogate. + Returns that this optimiser is applicable to all queries - - - true if the numeric value of the parameter ranges from U+D800 through U+DBFF; otherwise, false. - - The Unicode character to evaluate. 1 + Query + - + - Indicates whether the specified object is a low surrogate. + Returns that this optimiser is applicable to all updates - - - true if the numeric value of the parameter ranges from U+DC00 through U+DFFF; otherwise, false. - - The character to evaluate. 1 + Updates + - + - Converts the value of a UTF-16 encoded surrogate pair into a Unicode code point. + An Algebra Optimiser is a class that can transform a SPARQL algebra from one form to another typically for optimisation purposes - - - The 21-bit Unicode code point represented by the and parameters. - - A high surrogate code point (that is, a code point ranging from U+D800 through U+DBFF). - A low surrogate code point (that is, a code point ranging from U+DC00 through U+DFFF). - is not in the range U+D800 through U+DBFF, or is not in the range U+DC00 through U+DFFF. - 1 - + - Converts a Hex Escape into the relevant Unicode Character + Optimises the given Algebra - Hex code + Algebra to optimise + + Important: An Algebra Optimiser must guarantee to return an equivalent algebra to the given algebra. In the event of any error the optimiser should still return a valid algebra (or at least the original algebra) + - + - Converts a Hex Escape into the relevant UTF-16 codepoints + Determines whether an Optimiser is applicable based on the Query whose Algebra is being optimised - + SPARQL Query - + - Static Helper Class for dereferencing URIs and attempting to parse the results of a HTTP GET request to the URI into RDF + Determines whether an Optimiser is applicable based on the Update Command Set being optimised - -

Caching

- - As of the 0.2.2 release the loader has support for caching retrieved data locally built into it (for Graphs only), caching is done using ETags where the remote server provides them or just by a user-defineable 'freshness' criteria (i.e. number of hours retrieved resources should be cached for). By default this caching happens in the system temporary directory which means it is non-persistent i.e. if you run your application using dotNetRDF it may cache stuff during the session but once the application is closed the operating system may freely delete the cached data. If you wish to have a persistent cache then you can use the CacheDirectory property to set your own cache directory. Even when you set your own cache directory dotNetRDF will delete obsolete data from it over time though this will only happen when a new request invalidates previously cached data. - - - If you wish to completely control the Cache you can implement your own IUriLoaderCache implementation and use it by setting the Cache property - -
+ Update Command Set +
- + - Gets/Sets the Directory used for caching Graphs loaded from URIs + An Algebra Optimiser which implements the Identity Filter optimisation - + - Gets/Sets the amount of time Graphs are cached for + Optimises the Algebra to use Identity Filters where applicable - - This duration only applies to URIs which don't return an ETag as part of the HTTP response when they are deferenced - + Algebra + - + - Gets/Sets the Cache that is in use + Determines whether an expression is an Identity Expression - - Setting the Cache to null does not disable it, to disable caching use the Options.UriLoaderCaching property. - + Expression + Variable + Term + Whether it is an equals expression (true) or a same term expression (false) + - + - Determines whether the RDF behind the given URI is cached + Returns that this optimiser is applicable to all queries - URI + Query - - - Note: This does not guarantee that the cached content will be used if you load from the URI using the UriLoader. Whether the cached copy is used will depend on whether - - - + - Gets/Sets an optional User Agent string that will be appended to HTTP Requests + Returns that this optimiser is applicable to all updates + Updates + - + - Attempts to load a RDF Graph from the given URI into the given Graph + An Algebra Optimiser which implements the Implicit Join optimisation - Graph to assert Triples in - URI to attempt to get RDF from - Attempts to select the relevant Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can just open a HTTP Stream yourself and pass it to an instance of the correct Parser. - - - In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. - - - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + An implict join is implied by a query like the following: - - - - - Attempts to load a RDF Graph from the given URI into the given Graph - - Graph to assert Triples in - URI to attempt to get RDF from - Parser to use - +
+            SELECT *
+            WHERE
+            {
+              ?x a ?type .
+              ?y a ?type .
+              FILTER (?x = ?y) .
+            }
+            
- Uses the supplied parser to attempt parsing regardless of the actual Content Type returned + Such queries can be very expensive to calculate, the implict join optimisation attempts to substitute one variable for the other and use a BIND to ensure both variables are visible outside of the graph pattern affected i.e. the resulting query looks like the following: +
+            SELECT *
+            WHERE
+            {
+              ?x a ?type .
+              ?x a ?type .
+              BIND (?x AS ?y)
+            }
+            
- In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + Under normal circumstances this optimisation is only used when the implict join is denoted by a SAMETERM expression or the optimiser is sure the variables don't represent literals (they never occur in the Object position) since when value equality is involved substituing one variable for another changes the semantics of the query and may lead to unexpected results. Since this optimisation may offer big performance benefits for some queries (at the cost of potentially incorrect results) this form of the optimisation is allowed when you set to true. - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + This optimiser is also capable of generating special algebra to deal with the case where there is an implicit join but the substitution based optimisation does not apply because variables cannot be substituted into the inner algebra, in this case a is generated instead.
- + - Attempts to load a RDF Graph from the given URI using a RDF Handler + Optimises the Algebra to use implict joins where applicable - RDF Handler to use - URI to attempt to get RDF from - - - Attempts to select the relevant Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can just open a HTTP Stream yourself and pass it to an instance of the correct Parser. - - - In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. - - + Algebra + - + - Attempts to load a RDF Graph from the given URI using a RDF Handler + Determines whether an expression is an Implicit Join Expression - RDF Handler to use - URI to attempt to get RDF from - Parser to use - - - Uses the supplied parser to attempt parsing regardless of the actual Content Type returned - - - In the event that the URI is a File URI the FileLoader will be used instead - - - If the URI is a Data URI then the DataUriLoader will be used instead. - - + Expression + LHS Variable + RHS Variable + Whether the expression is an equals (true) or a same term (false) + - + - Attempts to load a RDF dataset from the given URI into the given Triple Store + Returns that this optimiser is applicable to all queries - Triple Store to load into - URI to attempt to get a RDF dataset from - Parser to use to parse the RDF dataset - - - If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can explicitly pass in the parser to use. - - + Query + - + - Attempts to load a RDF dataset from the given URI into the given Triple Store + Returns that this optimiser is applicable to all updates - Triple Store to load into - URI to attempt to get a RDF dataset from - - - Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - + Updates + - + - Attempts to load a RDF dataset from the given URI using a RDF Handler + A Query Optimiser is a class which optimises Graph Patterns in a Query by reordering Graph Patterns - RDF Handler to use - URI to attempt to get a RDF dataset from - Parser to use to parse the RDF dataset - - - If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can explicitly pass in the parser to use. - - - + - Attempts to load a RDF dataset from the given URI using a RDF Handler + Attempts to optimise the given Graph Pattern - RDF Handler to use - URI to attempt to get a RDF dataset from - - - Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - + Graph Pattern + Variables that are present prior to the pattern - + - Raises warning messages + An optimizer that handles a special case for ORDER BY + DISTINCT combinations which can significantly improve performance by eliminating duplicates prior to sorting when the default SPARQL behaviour is to do a potentially costly sort over many duplicates and then eliminate distincts. - Warning Message + + Only applies to queries which meet the following criteria: +
    +
  • Has an ORDER BY and a DISTNCT on the same level of the query
  • +
  • Selects a fixed list of variables i.e. not a SELECT DISTINCT *
  • +
  • All variables used in the ORDER BY expressions also occur in the project list
  • +
+
- + - Raises store warning messages + Optimizes the given algebra - Warning Message + Algebra + Optimized algebra - + - Event which is raised when a parser that is invoked by the UriLoader notices a non-fatal issue with the RDF syntax + Returns true if the query is a SELECT DISTINCT or SELECT REDUCED and has an ORDER BY + Query + - + - Event which is raised when a store parser that is invoked by the UriLoader notices a non-fatal issue with the RDF dataset syntax + Returns that this is not applicable to updates + Update commands + - + - Attempts to load a RDF Graph from a URI asynchronously + An Algebra Optimiser which looks for unions and joins that can be evaluated in parallel to improve query evaluation speed in some cases - Graph to assert triple in - URI to load from - Parser to use - Callback to invoke when the operation completes - State to pass to the callback - Uses the supplied parser to attempt parsing regardless of the actual Content Type returned - - - In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + Using this feature allows you to use experimental parallel SPARQL evaluation optimisations which may improve query evaluation speed for some queries. A query must either use UNION or have joins which are disjoint in order for any parallel evaluation to take place. - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + Users should be aware that using this optimiser may actually increase evaluation speed in some cases e.g. where either side of a disjoint join will return empty especially when it is the left hand side that will do so. - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + Also note that while use of this optimiser should not cause queries to return incorrect results as it does not change the semantics of the evaluation as it only parallelises independent operators we cannot guarantee that all parallelised queries will return identical results to their non-parallelised counterparts. If you find a query that you believe is giving incorrect results when used with this optimiser please test without the optimiser enabled to check that the apparent incorrect result is not an artifact of this optimisation. - + - Attempts to load a RDF Graph from a URI asynchronously + Optimises the algebra to use parallelised variants of Join and Union where possible - Graph to assert triple in - URI to load from - Callback to invoke when the operation completes - State to pass to the callback - - - Will attempt to autodetect the format of the RDF based on the Content-Type header of the HTTP response - - - In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. - - - Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Algebra + - + - Attempts to load a RDF Graph from a URI asynchronously using an RDF Handler + Returns that the optimser is applicable to all queries - RDF Handler to use - URI to load from - Parser to use - Callback to invoke when the operation completes - State to pass to the callback - - - Uses the supplied parser to attempt parsing regardless of the actual Content Type returned - - - In the event that the URI is a File URI the FileLoader will be used instead - - - If the URI is a Data URI then the DataUriLoader will be used instead. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Query + - + - Attempts to load a RDF Graph from a URI asynchronously using an RDF Handler + Returns that the optimiser is not applicable to updates - RDF Handler to use - URI to load from - Callback to invoke when the operation completes - State to pass to the callback - - - Attempts to autodetect the RDF format based on the Content-Type header of the HTTP response - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Updates + - + - Attempts to load a RDF dataset asynchronously from the given URI into the given Triple Store + An algebra optimiser that looks for property functions specified by simple triple patterns in BGPs and replaces them with actual property function patterns - Triple Store to load into - URI to attempt to get a RDF dataset from - Parser to use to parse the RDF dataset - Callback to invoke when the operation completes - State to pass to the callback - - - If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can explicitly pass in the parser to use. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - - + - Attempts to load a RDF dataset asynchronously from the given URI into the given Triple Store + Optimises the algebra to include property functions - Triple Store to load into - URI to attempt to get a RDF dataset from - Callback to invoke when the operation completes - State to pass to the callback - - - Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Algebra + - + - Attempts to load a RDF dataset asynchronously from the given URI using a RDF Handler + Returns that the optimiser is applicable - RDF Handler to use - URI to attempt to get a RDF dataset from - Parser to use to parse the RDF dataset - Callback to invoke when the operation completes - State to pass to the callback - - - If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If you know ahead of time the Content Type you can explicitly pass in the parser to use. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Query + - + - Attempts to load a RDF dataset asynchronously from the given URI using a RDF Handler + Returns that the optimiser is applicable - RDF Handler to use - URI to attempt to get a RDF dataset from - Callback to invoke when the operation completes - State to pass to the callback - - - Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. - - - If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. - - + Update Commands + - + - Possible RDF/XML Parse Types + Default SPARQL Query Optimiser - + - No specific Parse Type is specified (Default Parsing Rules will be used) + Gets the Default Comparer for Triple Patterns to rank them + - + - Literal Parse Type + SPARQL Query Optimiser which does no reordering - + - Resource Parse Type + Gets that Triple Patterns should not be reordered - + - Collection Parse Type + Gets a comparer which does not change the order of Triple Patterns + - + - Other Parse Type + A Comparer which ranks all Triple Patterns as equal - This is never used since any other Parse Type encountered is assumed to be Literal as per the RDF/XML Specification - + - Possible RDF/XML Parser Modes + Compares two Triple Patterns are always returns that they are ranking equal + First Triple Pattern + Second Triple Pattern + - + - Uses DOM Based parsing (not supported under Silverlight) + Static Helper class which provides global registry of Algebra Optimisers and the global Query Optimiser - + - Uses Streaming Based parsing (default) + Namespace URI for the Optimiser Statistics vocabulary - + - Parser for RDF/XML syntax + Gets/Sets the global Query Optimiser that is used by default + + + Note: May be overridden by the Optimiser setting on a SparqlQueryParser + + + Unlike previous releases a Query may be reoptimised using a different optimiser if desired by calling the Optimise() method again and providing a different Optimiser. This may not always fully reoptimise the query since the first optimisation will have caused any Filters and Assignments to be placed in the Triple Pattern + + + Warning: Setting this to null has no effect, to disable automatic optimisation use the global property Options.QueryOptimisation. Even with this option disabled a Query can still be optimised manually by calling its Optimise() method. + + - + - Controls whether Parser progress will be traced by writing output to the Console + Gets the global Algebra Optimisers that are in use + + + Unlike Query Optimisation multiple Algebra Optimisations may apply. Algebra optimisers may also be specified and apply locally by the use of the relevant properties on the SparqlQueryParser and SparqlQuery classes. Those specified on a parser will automatically be passed through to all queries parsed by the parser. Locally specified optimisers apply prior to globally specified ones. + + - + - Creates a new RDF/XML Parser + Adds a new Algebra Optimiser + Optimiser - + - Creates a new RDF/XML Parser which uses the given parsing mode + Removes an Algebra Optimiser - RDF/XML Parse Mode + - + - Reads RDF/XML syntax from some Stream into the given Graph + Resets Optimisers to default settings - Graph to create Triples in - Input Stream - + - Reads RDF/XML syntax from some Input into the given Graph + The Strict Algebra Optimiser is an optimiser that takes our BGPs which typically contain placed FILTERs and BINDs and transforms them into their strict algebra form using Filter() and Extend() - Graph to create Triples in - Input to read from - + - Reads RDF/XML syntax from some File into the given Graph + Optimises BGPs in the Algebra to use Filter() and Extend() rather than the embedded FILTER and BIND - Graph to create Triples in - Filename of File containg XML/RDF - Simply opens a Stream for the File then calls the other version of Load to do the actual parsing + Algebra to optimise + - + - Reads RDF/XML syntax from some Stream using a RDF Handler + Returns that the optimiser is applicable to all queries - RDF Handler to use - Input Stream + Query + - + - Reads RDF/XML syntax from some Input using a RDF Handler + Returns that the optimiser is applicable to all updates - RDF Handler to use - Input to read from + Updates + - + - Reads RDF/XML syntax from a file using a RDF Handler + An optimiser for walking algebra and expression trees and replacing a Variable with another Variable or a Constant - RDF Handler to use - File to read from - + - Reads RDF/XML from the given XML Document + Create a transform that replaces one variable with another - Graph to load into - XML Document + Find Variable + Replace Variable - + - Helper Method for raising the Warning event + Create a transform that replaces a variable with a constant - Warning Message + Find Variable + Replace Constant - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Gets/Sets whethe the Transformer is allowed to replace objects + + + The transformer will intelligently select this depending on whether it is replacing with a constant (defaults to true) or a variable (defaults to false), when replacing a variable the behaviour changes automatically. If you set it explicitly the transformer will respect your setting regardless. + + - + - Function which does the actual Parsing by invoking the various steps of the Parser + Attempts to do variable substitution within the given algebra - Parser Context + Algebra + - + - Starts the Parsing of the flattened Event Tree by calling the appropriate Grammar Production based on the type of the First Event in the Queue + Returns false because this optimiser is never globally applicable + Query + - + - Implementation of the RDF/XML Grammar Production 'doc' + Returns false because this optimiser is never globally applicable - Parser Context - Root Event to start applying Productions from + Update Commands + - + - Implementation of the RDF/XML Grammar Production 'RDF' + Tries to substitute variables within primary expressions - Parser Context - RDF Element to apply Production to + Expression + - + - Implementation of the RDF/XML Grammar Production 'nodeElementList' + Abstract implementation of an algebra optimiser and expression transformer which optimises the algebra to replace any Node terms with Virtual Node terms for more efficient querying of virtualised RDF data - Parser Context - Queue of Events to apply the Production to + Node ID Type + Graph ID Type - + + + Virtual RDF Provider + + + + + Creates a new Virtual Algebra Optimiser + + Virtual RDF Provider + + + + Optimises the algebra so that all Node terms are virtualised + + Algebra + + + - Implementation of the RDF/XML Grammar Production 'nodeElement' + Transforms an expression so Node terms are virtualised - Parser Context - Queue of Events that make up the Node Element and its Children to apply the Production to + Expression + - + - Implementation of the RDF/XML Grammar Production 'propertyEltList' + Substitutes a primary expression which is a Node term for a virtual Node term - Parser Context - Queue of Events to apply the Production to - Parent Event (ie. Node) of the Property Elements + Expression + - + - Implementation of the RDF/XML Grammar Production 'propertyElt' + Creates a virtual Node based on a given Value - Parser Context - Queue of Events that make up the Property Element and its Children - Parent Event (ie. Node) of the Property Element + Node ID + Value + - + - Implementation of the RDF/XML Grammar Production 'resourcePropertyElt' + Returns that the optimiser is applicable to all queries - Parser Context - Queue of Events that make up the Resource Property Element and its Children - Parent Event (ie. Node) of the Property Element + Query + - + - Implementation of the RDF/XML Grammar Production 'literalPropertyElt' + Returns that the optimiser is applicable to all updates - Parser Context - Queue of Events that make up the Literal Property Element and its Children - Parent Event (ie. Node) of the Property Element + Updates + - + - Implementation of the RDF/XML Grammar Production 'parseTypeLiteralPropertyElt' + A concrete implementation of a Virtual Algebra Optimiser where the virtual IDs are simply integers - Parser Context - Queue of Events that make up the Literal Parse Type Property Element and its Children - Parent Event (ie. Node) of the Property Element - + - Implementation of the RDF/XML Grammar Production 'parseTypeResourcePropertyElt' + Creates a new Simple Virtual Algebra Optimiser - Parser Context - Queue of Events that make up the Resource Parse Type Property Element and its Children - Parent Event (ie. Node) of the Property Element + Virtual RDF provider - + - Implementation of the RDF/XML Grammar Production 'parseTypeCollectionPropertyElt' + Creates a new Virtual Node using the virtual RDF provider this optimiser was instantiated with - Parser Context - Queue of Events that make up the Collection Parse Type Property Element and its Children - Parent Event (ie. Node) of the Property Element + Virtual Node ID + Node Value + - + - Implementation of the RDF/XML Grammar Production 'emptyPropertyElt' + The Weighted Optimiser is a Query Optimiser that orders Triple Patterns based on weighting computed calculated against - Parser Context - Element Event for the Empty Property Element - Parent Event (ie. Node) of the Property Element - + - Applies the Namespace Attributes of an Element Event to the Namespace Map + Default Weight for Subject Terms - Parser Context - Element Event - + - Resolves a Uri Reference into a Uri Node against a given Base Uri + Default Weight for Predicate Terms - Parser Context - Uri Reference to Resolve - Base Uri to Resolve against - - + - Reifies a Triple + Default Weight for Object Terms - Parser Context - Uri Reference for the Reified Triple - Subject of the Triple - Predicate of the Triple - Object of the Triple - + - Helper function which inserts an Element back on the front of a Queue + Default Weight for Variables - Queue to insert onto the Front of - Event to put on the front of the Queue - + - Applies List Expansion to the given Event + Creates a new Weighted Optimiser - Element to apply List Expansion to - Uri Reference for the List Item - List Expansion only works on Element Events - + - Validates that an ID is correctly formed and has only been used once in the context of a given Subject + Creates a new Weighted Optimiser which reads weights from the given RDF Graph - Parser Context - ID to Validate - Subject that the ID pertains to + Graph - + - Tracing function used when Parse Tracing is enabled + Creates a new Weighted Optimiser which reads weights from the given RDF Graph - Production + Graph + Default Subject Weight + Default Predicate Weight + Default Object Weight - + - Tracing function used when Parse Tracing is enabled + Gets the comparer used to order the Triple Patterns based on their computed weightings - Production - + - + - Gets the String representation of the Parser which is a description of the syntax it parses + Represents Weightings for the WeightedOptimiser - - + - Static Helper Class which contains a set of Functions which model Name and Character validations as laid - out in the W3C XML and XML Namespaces specification + + Namespace containing classes used to order the results of SPARQL queries + - - These are needed in the XML/RDF Parser - - Also contains the Date Time format string used to format .Net's DateTime type into a String whose format conforms to the XML Schema Datatypes specification - - - - http://www.w3.org/TR/REC-xml/ - http://www.w3.org/TR/REC-xml-names/ - http://www.w3.org/TR/xmlschema-2/ - + - Namespace for XML + Interface for classes that represent SPARQL ORDER BY clauses + A SPARQL Order By clause provides a list of orderings, when parsed into the dotNetRDF model this is represented as a single ISparqlOrderBy for the first term in the clause chained to ISparqlOrderBy's for each subsequent term via the Child property. - + - Namespace for XML Namespaces + Gets/Sets the Child Ordering that applies if the two Objects are considered equal - + - Namespace for XML Schema + Sets the Evaluation Context for the Order By - + - Date Time Formatting string which meets the specified format for xsd:dateTime + Sets whether the Ordering is Descending - - Use with the DateTime.ToString() method to format a DateTime into appropriate string format - - + - Date Time Formatting string which meets the specified format for xsd:dateTime, this formatting string is imprecise in the sense that it does not preserve the fractional seconds. + Gets whether the Ordering is simple (i.e. applies on variables only) - - Use with the DateTime.ToString() method to format a DateTime into appropriate string format - - + - Date Time Formatting string which meets the specified format for xsd:date + Gets all the Variables used in the Ordering - - Use with the DateTime.ToString() method to format a DateTime into appropriate string format - - + - Date Time Formatting string which meets the the specified format for xsd:time + Gets the Expression used to do the Ordering - - Use with the DateTime.ToString() method to format a DateTime into appropriate string format - - + - Date Time Formatting string which meets the the specified format for xsd:time, this formatting string is imprecise in the sense that it does not preserve the fractional seconds. + Generates a Comparer than can be used to do Ordering based on the given Triple Pattern - - Use with the DateTime.ToString() method to format a DateTime into appropriate string format - + Triple Pattern + - + - Data Type Uri Constants for XML Schema Data Types + Base Class for implementing Sparql ORDER BYs - + - Data Type Uri Constants for XML Schema Data Types + Holds the Child Order By (if any) - + - Data Type Uri Constants for XML Schema Data Types + Stores the Evaluation Context - + - Data Type Uri Constants for XML Schema Data Types + Modifier used to make ordering Descending + Implementations derived from this class should multiply their comparison results by the modifier to automatically provide Ascending/Descending order - + - Data Type Uri Constants for XML Schema Data Types + Gets/Sets the Child Order By - + - Data Type Uri Constants for XML Schema Data Types + Sets the Evaluation Context for the Ordering - + - Data Type Uri Constants for XML Schema Data Types + Sets the Ordering to Descending - + - Data Type Uri Constants for XML Schema Data Types + Gets whether the Ordering is Simple - + - Data Type Uri Constants for XML Schema Data Types + Gets all the Variables used in the Ordering - + - Data Type Uri Constants for XML Schema Data Types + Gets the Expression used in the Ordering - + - Data Type Uri Constants for XML Schema Data Types + Abstract Compare method which derived classes should implement their ordering in + A Set + A Set + - + - Data Type Uri Constants for XML Schema Data Types + Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Triple Pattern + - + - Data Type Uri Constants for XML Schema Data Types + Gets the String representation of the Order By + - + - Data Type Uri Constants for XML Schema Data Types + An ORDER BY which orders on the values bound to a particular variable - + - Data Type Uri Constants for XML Schema Data Types + Creates a new Ordering based on the Value of a given Variable + Variable to order upon - + - Data Type Uri Constants for XML Schema Data Types + Compares Sets on the basis of their values for the Variable the class was instaniated with + A Set + A Set + - + - Data Type Uri Constants for XML Schema Data Types + Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Triple Pattern + - + - Data Type Uri Constants for XML Schema Data Types + Gets whether the Ordering is Simple - + - Data Type Uri Constants for XML Schema Data Types + Gets all the Variables used in the Ordering - + - Data Type Uri Constants for XML Schema Data Types + Gets the Variable Expression Term used in the Ordering - + - Data Type Uri Constants for XML Schema Data Types + Gets the String representation of the Order By + - + - Data Type Uri Constants for XML Schema Data Types + An ORDER BY which orders based on the values of a Sparql Expression - + - Data Type Uri Constants for XML Schema Data Types + Creates a new Order By using the given Expression + Expression to order by - + - Data Type Uri Constants for XML Schema Data Types + Orders the sets based on the values resulting from evaluating the expression for both solutions + A Set + A Set + - + - Data Type Uri Constants for XML Schema Data Types + Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Triple Pattern + - + - Data Type Uri Constants for XML Schema Data Types + Gets whether the Ordering is Simple - + - Array of Constants for Data Types that are supported by the Literal Node CompareTo method + Gets all the Variables used in the Ordering - + - Returns whether a String is a Name as defined by the W3C XML Specification + Gets the Expression used for Ordering - String to test - - + - Returns whether a String is a NCName as defined by the W3C XML Namespaces Specification + Gets the String representation of the Order By - String to test - http://www.w3.org/TR/REC-xml-names/#NT-NCName - + - Returns whether a Character is a NameChar as defined by the W3C XML Specification + + Contains the classes which model property paths in SPARQL, they can be used to both represent and evaluate a property path as part of a SPARQL query. + - Character to Test - - http://www.w3.org/TR/REC-xml/#NT-NameChar - + - Returns whether a Character is a NameChar as defined by the W3C XML Specification + Represents Alternative Paths - Character to test - - http://www.w3.org/TR/REC-xml/#NT-NameChar - + - Returns whether the given Type refers to one of the types supported by the LiteralNode CompareTo method + Creates a new Alternative Path - Data Type Uri - + LHS Path + RHS Path - + - Returns whether the given Type refers to one of the types supported by the LiteralNode CompareTo method + Gets the String representation of the Path - Data Type Uri - + - Gets the Data Type Uri of the given Node if it has a supported type + Converts a Path into its Algebra Form - Node + Path Transformation Context - - - Only ILiteralNode's can have a Data Type - - - The function only returns the Data Type Uri (as a String) if the Data Type of the Literal is one of the supported Data Types - - - + - Gets the Compatible Supported Data Type assuming the two Nodes are Literals with support types and that those types are compatible + Abstract Base Class for Binary Path operators - A Node - A Node - Whether the compatible type should be the wider type - - - - Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics - - - + - Gets the Compatible Supported Data Type assuming the two Nodes are Literals with support types and that those types are compatible + Parts of the Path - A Node - A Node - - - - Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics - - - + - Gets the Compatible Supported Data Type for the two Data Types + Parts of the Path - A Data Type - A Data Type - - - - Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics - - - + - Gets the Compatible Supported Data Type for the two Data Types + Creates a new Binary Path - A Data Type - A Data Type - Whether the compatible type should be the wider type - - - - Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics - - + LHS Path + RHS Path - + - Parser for reading SPARQL Results that have been serialized in the SPARQL Results CSV format + Gets the LHS Path component - + - Loads a Result Set from an Input Stream + Gets the RHS Path component - Result Set to load into - Input Stream to read from - + - Loads a Result Set from a File + Converts a Path into its Algebra Form - Result Set to load into - File to load from + Path Transformation Context + - + - Loads a Result Set from an Input + Gets the String representation of the Path - Result Set to load into - Input to read from + - + - Loads a Result Set from an Input Stream using a Results Handler + Abstract Base Class for Unary Path operators - Results Handler to use - Input Stream to read from - + - Loads a Result Set from a File using a Results Handler + Path - Results Handler to use - Filename to load from - + - Loads a Result Set from an Input using a Results Handler + Creates a new Unary Path - Results Handler to use - Input to read from + Path - + - Event which is raised when the parser encounters a non-fatal issue with the syntax being parsed + Gets the Inner Path - + - Gets the String representation of the Parser + Converts a Path into its Algebra Form + Path Transformation Context - + - Parser for reading SPARQL Results that have been serialized in the SPARQL Results TSV format + Gets the String representation of the Path + - + - Loads a Result Set from an Input Stream + Represents a Cardinality restriction on a Path - Result Set to load into - Input Stream to read from - + - Loads a Result Set from a File + Creates a new Cardinality Restriction - Result Set to load into - File to load from + Path - + - Loads a Result Set from an Input + Gets the Minimum Cardinality of the Path - Result Set to load into - Input to read from - + - Loads a Result Set from an Input Stream using a Results Handler + Gets the Maximum Cardinality of the Path - Results Handler to use - Input Stream to read from - + - Loads a Result Set from a File using a Results Handler + Represents a Fixed Cardinality restriction on a Path - Results Handler to use - Filename to load from - + - Loads a Result Set from an Input using a Results Handler + Creates a new Fixed Cardinality restriction - Results Handler to use - Input to read from + Path + N - + - Event which is raised when the parser encounters a non-fatal issue with the syntax being parsed + Gets the Maximum Cardinality of the Path - + - Gets the String representation of the Parser + Gets the Minimum Cardinality of the Path - - + - An extended for use in parsing + Converts a Path into its Algebra Form + Path Transformation Context + - + - Gets whether the end of the stream has been reached + Gets the String representation of the Path + - + - Creates a new Blocking Text Reader + Represents a Zero or More cardinality restriction on a Path - Text Reader to wrap - Buffer Size - - If the given TextReader is already a Blocking Text Reader this is a no-op - - + - Creates a new Blocking Text Reader + Creates a new Zero or More cardinality restriction - Text Reader to wrap - - If the given TextReader is already a Blocking Text Reader this is a no-op - + Path - + - Creates a new Blocking Text Reader + Gets the Maximum Cardinality of the Path - Input Stream - Buffer Size - + - Creates a new Blocking Text Reader + Gets the Minimum Cardinality of the Path - Input Stream - + - Creates a new Blocking Text Reader + Gets the String representation of the Path - Input reader - + - Creates a new Blocking Text Reader + Converts a Path into its Algebra Form - Input reader - Buffer Size + Path Transformation Context - + - Creates a new non-blocking Text Reader + Represents a Zero or One cardinality restriction on a Path - Input reader - - + - Creates a new non-blocking Text Reader + Creates a new Zero or One cardinality restriction - Input reader - Buffer Size - + Path - + - Abstract class representing a text reader that provides buffering on top of another text reader + Gets the Maximum Cardinality of the Path - + - Default Buffer Size + Gets the Minimum Cardinality of the Path - + - Buffer array + Gets the String representation of the Path + - + - Current buffer position + Converts a Path into its Algebra Form + Path Transformation Context + - + - Current buffer size (may be less than length of buffer array) + Represents a One or More cardinality restriction on a Path - + - Whether underlying reader has been exhausted + Creates a new One or More cardinality restriction + Path - + - Underlying reader + Gets the Maximum Cardinality of the Path - + - Creates a buffered reader + Gets the Minimum Cardinality of the Path - - - + - Requests that the buffer be filled + Gets the String representation of the Path + - + - Reads a sequence of characters from the buffer in a blocking way + Converts a Path into its Algebra Form - Buffer - Index at which to start writing to the Buffer - Number of characters to read - Number of characters read + Path Transformation Context + - + - Reads a sequence of characters from the buffer + Represents a N or More cardinality restriction on a Path - Buffer - Index at which to start writing to the Buffer - Number of characters to read - Number of characters read - + - Reads a single character from the underlying Text Reader + Creates a new N or More cardinality restriction - Character read or -1 if at end of input + Path + Minimum Cardinality - + - Peeks at the next character from the underlying Text Reader + Gets the Maximum Cardinality of the Path - Character peeked or -1 if at end of input - + - Gets whether the end of the input has been reached + Gets the Minimum Cardinality of the Path - + - Closes the reader and the underlying reader + Gets the String representation of the Path + - + - Disposes of the reader and the underlying reader + Converts a Path into its Algebra Form - Whether this was called from the Dispose() method + Path Transformation Context + - + - The BlockingTextReader is an implementation of a designed to wrap other readers which may or may not have high latency and thus ensures that premature end of input bug is not experienced. + Represents a Zero to N cardinality restriction on a Path - - - This is designed to avoid premature detection of end of input when the input has high latency and the consumer tries to read from the input faster than it can return data. This derives from and ensures the buffer is filled by calling the ReadBlock() method of the underlying TextReader thus avoiding the scenario where input appears to end prematurely. - - - + - Creates a new Blocking Text Reader + Creates a new Zero to N cardinality restriction - Text Reader to wrap - Buffer Size + Path + Maximum Cardinality - + - Creates a new Blocking Text Reader + Gets the Maximum Cardinality of the Path - Text Reader to wrap - + - Creates a new Blocking Text Reader + Gets the Minimum Cardinality of the Path - Input Stream - Buffer Size - + - Creates a new Blocking Text Reader + Gets the String representation of the Path - Input Stream + - + - Fills the Buffer + Converts a Path into its Algebra Form + Path Transformation Context + - + - The NonBlockingTextReader is an implementation of a designed to wrap other readers where latency is known not to be a problem and we don't expect to ever have an empty read occur before the actual end of the stream + Represents a N to M cardinality restriction on a Path - - Currently we only use this for file and network streams, you can force this to never be used with the global static option - - + - Fills the buffer in a non-blocking manner + Creates a new N to M cardinality restriction + Path + Minimum Cardinality + Maximum Cardinality - + - A Class for parsing RDF data from Data URIs + Gets the Maximum Cardinality of the Path - - - Data URIs use the data: scheme and are defined by the IETF in RFC 2397 and provide a means to embed data directly in a URI either in Base64 or ASCII encoded format. This class can extract the data from such URIs and attempt to parse it as RDF using the StringParser - - - The parsing process for data: URIs involves first extracting and decoding the data embedded in the URI - this may either be in Base64 or ASCII encoding - and then using the StringParser to actually parse the data string. If the data: URI defines a MIME type then a parser is selected (if one exists for the given MIME type) and that is used to parse the data, in the event that no MIME type is given or the one given does not have a corresponding parser then the StringParser will use its basic heuristics to attempt to auto-detect the format and select an appropriate parser. - - - If you attempt to use this loader for non data: URIs then the standard UriLoader is used instead. - - - + - Loads RDF data into a Graph from a data: URI + Gets the Minimum Cardinality of the Path - Graph to load into - URI to load from - - Invokes the normal UriLoader instead if a the URI provided is not a data: URI - - Thrown if the metadata portion of the URI which indicates the MIME Type, Character Set and whether Base64 encoding is used is malformed - + - Loads RDF data using an RDF Handler from a data: URI + Gets the String representation of the Path - RDF Handler - URI to load from - - Invokes the normal UriLoader instead if a the URI provided is not a data: URI - - Thrown if the metadata portion of the URI which indicates the MIME Type, Character Set and whether Base64 encoding is used is malformed + - + - Static Helper Class for loading Graphs and Triple Stores from Embedded Resources + Converts a Path into its Algebra Form + Path Transformation Context + - + - Loads a Graph from an Embedded Resource + Represents an Inverse Path - Graph to load into - Assembly Qualified Name of the Resource to load - Parser to use (leave null for auto-selection) - + - Loads a Graph from an Embedded Resource + Creates a new Inverse Path - RDF Handler to use - Assembly Qualified Name of the Resource to load - Parser to use (leave null for auto-selection) + Path - + - Loads a Graph from an Embedded Resource + Gets the String representation of the Path - RDF Handler to use - Assembly Qualified Name of the Resource to load + - + - Loads a Graph from an Embedded Resource + Converts a Path into its Algebra Form - Graph to load into - Assembly Qualified Name of the Resource to load - - Parser will be auto-selected - + Path Transformation Context + - + - Internal Helper method which does the actual loading of the Graph from the Resource + Represents a Path expression in SPARQL - RDF Handler to use - Assembly to get the resource stream from - Full name of the Resource (without the Assembly Name) - Parser to use (if null then will be auto-selected) - + - Loads a RDF Dataset from an Embedded Resource + Converts a Path into its Algebra Form - Store to load into - Assembly Qualified Name of the Resource to load - Parser to use (leave null for auto-selection) + Path Transformation Context + - + - Loads a RDF Dataset from an Embedded Resource + Gets the String representation of a Path - Store to load into - Assembly Qualified Name of the Resource to load - - Parser will be auto-selected - + - + - Loads a RDF Dataset from an Embedded Resource + Represents a Negated Property Set - RDF Handler to use - Assembly Qualified Name of the Resource to load - Parser to use (leave null for auto-selection) - + - Loads a RDF Dataset from an Embedded Resource + Creates a new Negated Property Set - RDF Handler to use - Assembly Qualified Name of the Resource to load + Negated Properties + Inverse Negated Properties - + - Internal Helper method which does the actual loading of the Triple Store from the Resource + Gets the Negated Properties - RDF Handler to use - Assembly to get the resource stream from - Full name of the Resource (without the Assembly Name) - Parser to use (if null will be auto-selected) - + - Interface for RDFa Vocabularies + Gets the Inverse Negated Properties - + - Gets whether a Vocabulary contains a Term + Converts a Path into its Algebra Form - Term + Path Transformation Context - + - Resolves a Term in the Vocabulary + Gets the String representation of the Path - Term - + - Adds a Term to the Vocabulary + Class representing a potential path used during the evaluation of complex property paths - Term - URI - + - Adds a Namespace to the Vocabulary + Creates a new Potential Path - Prefix - Namespace URI + Start Point of the Path + Current Point on the Path - + - Merges another Vocabulary into this one + Creates a new Potential Path which is a copy of an existing Path - Vocabulary + Potentuak Path - + - Gets/Sets the Vocabulary URI + Gets the Start of the Path - + - Gets the Term Mappings + Gets/Sets the Current Point of the Path - in the case of a complete Path this is the end of the Path - + - Gets the Namespace Mappings + Gets/Sets whether the Path is complete - + - Vocabulary for XHTML+RDFa (and HTML+RDFa) + Gets/Sets whether the Path is a dead-end + + + This may be useful information as it can help stop us uneccessarily regenerating partial paths which are dead ends + + - + - Gets whether the Vocabulary contains a Term + Gets/Sets whether the Path is Partial - Term - + + While this may seem something of a misnomer what this represents is that the path is only part of the overall path so in the case of a sequence path we'll make all incomplete paths from the first part of the sequence as partial so they can't be themselves completed but they can be used to form complete paths + - + - Resolves a Term in the Vocabulary + Gets/Sets the Length of the Path - Term - - + - Adds a Term to the Vocabulary + Gets the Hash Code for the potential path - Term - URI - Thrown since this vocabulary is fixed and cannot be changed + - + - Adds a Namespace to the Vocabulary + Checks whether the other object is an equivalent potential path - Prefix - Namespace URI - Thrown since this vocabulary is fixed and cannot be changed + Object to test + - + - Merges another Vocabulary into this one + Gets the String representation of the path - Vocabulary - Thrown since this vocabulary is fixed and cannot be changed + - + - Gets the Term Mappings + Evaluation Context for evaluating complex property paths in SPARQL - + - Gets the Namespace Mappings + Creates a new Path Evaluation Context + SPARQL Evaluation Context + Start point of the Path + End point of the Path - + - Gets/Sets the Vocabulary URI + Creates a new Path Evaluation Context copied from the given Context - Set throws this since this vocabulary is fixed and cannot be changed + Path Evaluation Context - + - Represents a dynamic vocabulary for RDFa + Gets the SPARQL Evaluation Context - + - Creates a new set of Term Mappings + Gets/Sets whether this is the first part of the Path to be evaluated - + - Creates a new set of Term Mappings with the given Vocabulary URI + Gets/Sets whether this is the last part of the Path to be evaluated - Vocabulary URI - + - Creates a new set of Term Mappings from the given Vocabulary + Gets/Sets whether the Path is currently reversed - Vocabulary - + - Merges another Vocabulary into this one + Gets the hash set of incomplete paths generated so far - Vocabulary - + - Gets whether the Vocabulary contains a Term + Gets the hash set of complete paths generated so far - Term - - + - Resolves a Term in the Vocabulary + Gets the pattern which is the start of the path - Term - - + - Adds a Namespace to the Vocabulary + Gets the pattern which is the end of the path - Prefix - Namespace URI - + - Adds a Term to the Vocabulary + Gets whether pattern evaluation can be aborted early - Term - URI + + Useful when both the start and end of the path are fixed (non-variables) which means that we can stop evaluating once we find the path (if it exists) + - + - Gets the Term Mappings + Gets/Sets whether new paths can be introduced when not evaluating the first part of the path + + + This is required when we have a path like ?x foaf:knows* /foaf:knows ?y and ?x is not bound prior to the path being executed. Since we permit zero-length paths we should return the names of everyone even if they don't know anyone + + + The cases where ?x is already bound are handled elsewhere as we can just introduce zero-length paths for every existing binding for ?x + + - + - Gets the Namespace Mappings + Adds a new path to the list of current incomplete paths + Path - + - Gets/Sets the Vocabulary URI + Adds a new path to the list of complete paths + Path - + - Static Helper class which can be used to validate IRIs according to RFC 3987 + Transform Context class that is used in the Path to Algebra Transformation process - - Some valid IRIs may be rejected by these validating functions as the IRI specification allows character codes which are outside the range of the .Net char type - - + - Gets whether a string matches the IRI production + Creates a new Path Transform Context - String - + Subject that is the start of the Path + Object that is the end of the Path - + - Gets whether a string matches the ihier-part production + Creates a new Path Transform Context from an existing context - String - + Context - + - Gets whether a string matches the IRI-reference production + Returns the BGP that the Path Transform produces - String - + - Gets whether a string matches the absolute-IRI production + Gets the next available temporary variable - String - + - Gets whether a string matches the irelative-ref production + Adds a Triple Pattern to the Path Transform - String - + Triple Pattern - + - Gets whether a string matches the irelative-part production + Gets the Next ID to be used - String - - + - Gets whether a string matches the iauthority production + Gets/Sets the Subject of the Triple Pattern at this point in the Path Transformation - String - - + - Gets whether a string matches the userinfo production + Gets/Sets the Object of the Triple Pattern at this point in the Path Transformation - String - - + - Gets whether a string matches the ihost production + Gets/Sets the Object at the end of the Pattern - String - - + - Gets whether a string matches the ireg-name production + Resets the current Object to be the end Object of the Path - String - - + - Gets whether a string matches the ipath production + Gets/Sets whether this is the Top Level Pattern - String - - + - Gets whether a string matches the ipath-abempty production + Creates a Triple Pattern - String + Subject + Property Path + Object - + - Gets whether a string matches the ipath-absolute production + Represents a Predicate which is part of a Path - String - - + - Gets whether a string matches the ipath-noscheme production + Creates a new Property - String - + Predicate - + - Gets whether a string matches the ipath-rootless production + Gets the Predicate this part of the Path represents - String - - + - Gets whether a string matches the ipath-empty production + Evaluates the Path using the given Path Evaluation Context - String - + Path Evaluation Context - + - Gets whether a string matches the isegment production + Gets the String representation of the Path - String - + - Gets whether a string matches the isegment-nz production + Converts a Path into its Algebra Form - String + Path Transformation Context - + - Gets whether a string matches the isegment-nz-nc production + Represents a standard forwards path - String - - + - Gets whether a string matches the ipchar production + Creates a new Sequence Path - String - + LHS Path + RHS Path - + - Gets whether a string matches the iquery production + Gets the String representation of the Path - String - + - Gets whether a string matches the ifragment production + Converts a Path into its Algebra Form - String + Path Transformation Context - + - Gets whether a character matches the iunreserved production + Namespace for Pattern Classes that are used in the Graph and Triple matching process for executing SPARQL queries on IInMemoryQueryableStore objects - Character - - + - Gets whether a character matches the ucschar production + Base class for representing all types of Triple Patterns in SPARQL queries - Character - - - Not all strings that will match the official ucschar production will be matched by this function as the ucschar production permits character codes beyond the range of the .Net char type - - + - Gets whether a string matches the scheme production + Stores the list of variables that are used in the Pattern - String - - + - Gets whether a string matches the port production + Evaluates the Triple Pattern in the given Evaluation Context - String - + Evaluation Context - + - Gets whether a string matches the IP-literal production + Returns whether the Triple Pattern is an accept all - String - - + - Gets whether a string matches the IPvFuture production + Gets the Triple Pattern Type - String - - + - Gets whether a string matches the IPv6address production + Gets the List of Variables used in the Pattern - String - + + These are sorted in alphabetical order + - + - Gets whether a string matches the h16 production + Gets the enumeration of floating variables in the pattern i.e. variables that are not guaranteed to have a bound value - String - - + - Gets whether a string matches the ls32 production + Gets the enumeration of fixed variables in the pattern i.e. variables that are guaranteed to have a bound value - String - - + - Gets whether a string matches the IPv4address production + Compares a Triple Pattern to another Triple Pattern - String + Other Triple Pattern + + + The aim of this function is to sort Triple Patterns into what is hopefully an optimal order such that during execution the query space is restricted as early as possible. + + + The basic rules of this are as follows: +
    +
  1. Patterns with fewer variables should be executed first
  2. +
  3. Patterns using the same variables should be executed in sequence
  4. +
  5. Patterns using indexes which are considered more useful should be executed first
  6. +
+
+
- + - Gets whether a string matches the dec-octet production + Gets whether a Triple Pattern is Thread Safe when evaluated - String - + + Almost all Triple Patterns are Thread Safe unless they are subquery patterns which themselves are not thread safe + - + - Gets whether a string matches the pct-encoded production + Gets whether the Pattern has no blank variables - String - - + - Gets whether a character matches the unreserved production + Gets the String representation of the Pattern - Character - + - Gets whether a character matches the reserved production + Represents a set of Bindings for a SPARQL Query or part thereof i.e. represents the VALUES clause - Character - - + - Gets whether a character matches the gen-delims production + Creates a new Empty Bindings Pattern - Character - - + - Gets whether a character matches the sub-delims production + Creates a new Bindings Pattern - Character - + Variables - + - Gets whether a character matches the HEXDIG terminal + Gets the enumeration of Variables - Character - - + - Interface for Caches that can be used to cache the result of loading Graphs from URIs + Get the enumeration of fixed variables i.e. those guaranteed to be bound - - - Warning: Only available in Builds for which caching is supported e.g. not supported under Silverlight - - - Implementors should take care to implement their caches such that any errors in the cache do not bubble up outside of the cache. If the cache encounters any error when caching data or retrieving data from the cache it should indicate that the cached data is not available - - - + - Gets/Sets the Cache Directory that is in use + Gets the enumeration of floating variables i.e. those not guaranteed to be bound - - - Non-filesystem based caches are free to return String.Empty or null but MUST NOT throw any form or error - - - + - Gets/Sets how long results should be cached + Gets the enumeration of Tuples - - This only applies to downloaded URIs where an ETag is not available, where ETags are available ETag based caching SHOULD be used - - + - Clears the Cache + Adds a Tuple to the Bindings pattern + - + - Gets the ETag for the given URI + Converts a Bindings Clause to a Multiset - URI - Thrown if there is no ETag for the given URI - - - Calling code MUST always use the HasETag() method prior to using this method so it should be safe to throw the KeyNotFoundException if there is no ETag for the given URI - - - + - Gets the path to the locally cached copy of the Graph from the given URI + Gets the String representation of the Pattern - URI - + - Gets whether there is an ETag for the given URI + Represents a Tuple in a BINDINGS clause - URI - - + - Is there a locally cached copy of the Graph from the given URI which is not expired + Creates a new Binding Tuple - URI - Whether the local copy is required to meet the Cache Freshness (set by the Cache Duration) - + Variables + Values - + - Remove the ETag record for the given URI + Gets the enumeration of Variable-Value pairs - URI - + - Removes a locally cached copy of a URIs results from the Cache + Gets the Value for a Variable - URI + Variable + - + - Associates an ETag (if any) with the Request and Response URIs plus returns an IRdfHandler that can be used to write to the cache + Gets whether this is an empty tuple - URI from which the RDF Graph was requested - The actual URI which responded to the request - ETag of the response (if any) - Either an instance of an IRdfHandler that will do the caching or null if no caching is possible - + - Parser for Notation 3 syntax + Gets whether the Tuple is complete i.e. has no undefined entries - - - Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - The Uri for log:implies + Gets whether the given variable is bound for this tuple i.e. is not UNDEF + Variable + True if the variable exists in the tuple and is bound, false otherwise - + - The Uri for owl:sameAs + Gets the String representation of the Tuple + - + - Creates a new Notation 3 Parser + Class for representing BIND assignments in SPARQL Queries - + - Creates a new Notation 3 Parser which uses the given Token Queue Mode + Creates a new BIND Pattern - Queue Mode for Tokenising + Variable to assign to + Expression which generates a value which will be assigned to the variable - + - Gets/Sets whether Parsing Trace is written to the Console + Evaluates a BIND assignment in the given Evaluation Context + Evaluation Context - + - Gets/Sets whether Tokeniser Trace is written to the Console + Gets the Pattern Type - + - Gets/Sets the token queue mode used + Returns that this is not an accept all since it is a BIND assignment - + - Loads a Graph by reading Notation 3 syntax from the given input + Gets the Expression that is used to generate values to be assigned - Graph to load into - Stream to read from - + - Loads a Graph by reading Notation 3 syntax from the given input + Gets the Name of the Variable to which values will be assigned - Graph to load into - Input to read from - + - Loads a Graph by reading Notation 3 syntax from the given file + Returns an empty enumeration as any evaluation error will result in an unbound value so we can't guarantee any variables are bound - Graph to load into - File to read from - + - Loads RDF using a RDF handler by reading Notation 3 syntax from the given input + Returns the variable being assigned to as any evaluation error will result in an unbound value so we can't guarantee it is bound - RDF Handler to use - Stream to read from - + - Loads RDF using a RDF handler by reading Notation 3 syntax from the given input + Gets whether the Pattern uses the Default Dataset - RDF Handler to use - Input to read from - + - Loads RDF using a RDF handler by reading Notation 3 syntax from the given file + Returns true as a BIND can never contain a Blank Variable - RDF Handler to use - File to read from - + - Internal method which does the parsing of the input + Gets the string representation of the LET assignment - Parser Context + - + - Tries to parse declarations + Compares this Bind to another Bind - Parse Context + Bind to compare to + Just calls the base compare method since that implements all the logic we need - + - Tries to parse forAll quantifiers + Compares this Bind to another Bind - Parser Context + Bind to compare to + Just calls the base compare method since that implements all the logic we need - + - Tries to parse forSome quantifiers + Pattern which matches temporary variables - Parser Context - + - Tries to parse Triples + Creates a new Pattern representing a Blank Node - Parser Context + Blank Node ID - + - Tries to parse Predicate Object lists + Creates a new Pattern representing a Blank Node - Parse Context - Subject of the Triples - Whether this is a Blank Node Predicate Object list + Blank Node ID + Whether to force rigorous evaluation - + - Tries to parse Object lists + Gets the Blank Node ID - Parse Context - Subject of the Triples - Predicate of the Triples - Whether this is a Blank Node Object list - Indicates whether the asserted triples should have it's subject and object swapped - + - Tries to parse Collections + Checks whether the given Node is a valid value for the Temporary Variable - Parser Context - Blank Node which is the head of the collection + Evaluation Context + Node to test + - + - Tries to parse a Graph Literal + Constructs a Node based on the given Set - + Construct Context + - + - Tries to parse Literal Tokens into Literal Nodes + Gets the String representation of this Pattern - Parser Context - Literal Token - + - Helper method which raises the Warning event if there is an event handler registered + Gets the Temporary Variable Name of this Pattern - - + - Event which is raised when the parser detects issues with the input which are non-fatal + Class for representing Filter Patterns in SPARQL Queries + + A Filter Pattern is any FILTER clause that can be executed during the process of executing Triple Patterns rather than after all the Triple Patterns and Child Graph Patterns have been executed + - + - Gets the String representation of the Parser which is a description of the syntax it parses + Creates a new Filter Pattern with the given Filter - + Filter - + - Static Helper Class for loading RDF Files into Graphs/Triple Stores + Evaluates a Filter in the given Evaluation Context + Evaluation Context - + - Loads the contents of the given File into a Graph providing the RDF format can be determined + Gets the Pattern Type - Graph to load into - File to load from - - - The FileLoader first attempts to select a RDF Parser by examining the file extension to select the most likely MIME type for the file. This assumes that the file extension corresponds to one of the recognized file extensions for a RDF format the library supports. If this suceeds then a parser is chosen and will be used to attempt to parse the input. - - - Should this fail then the contents of the file will be read into a String, the StringParser is then used to attempt to parse it. The StringParser uses some simple rules to guess which format the input is likely to be and chooses a parser based on it's guess. - - - Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If a File URI is assigned it will always be an absolute URI for the file - - - + - Loads the contents of the given File into a Graph using the given RDF Parser + Returns that the Pattern is not an accept all (since it's a Filter) - Graph to load into - File to load from - Parser to use - - - Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If a File URI is assigned it will always be an absolute URI for the file - - - + - Loads the contents of the given File using a RDF Handler providing the RDF format can be determined + Gets the Filter that this Pattern uses - RDF Handler to use - File to load from - - - The FileLoader first attempts to select a RDF Parser by examining the file extension to select the most likely MIME type for the file. This assumes that the file extension corresponds to one of the recognized file extensions for a RDF format the library supports. If this suceeds then a parser is chosen and will be used to attempt to parse the input. - - - Should this fail then the contents of the file will be read into a String, the StringParser is then used to attempt to parse it. The StringParser uses some simple rules to guess which format the input is likely to be and chooses a parser based on it's guess. - - - + - Loads the contents of the given File using a RDF Handler using the given RDF Parser + Returns the empty enumerable as don't know which variables will be bound - RDF Handler to use - File to load from - Parser to use - - - Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - - - If a File URI is assigned it will always be an absolute URI for the file - - - + - Loads the contents of the given File into a Triple Store providing the RDF dataset format can be determined + Returns the empty enumerable as don't know which variables will be bound - Triple Store to load into - File to load from - Parser to use to parse the given file - - - If the parameter is set to null then the FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. - - - + - Loads the contents of the given File into a Triple Store providing the RDF dataset format can be determined + Gets whether the Pattern uses the Default Dataset - Triple Store to load into - File to load from - - - The FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. - - - + - Loads the contents of the given File using a RDF Handler providing the RDF dataset format can be determined + Returns true as a FILTER cannot contain blank variables - RDF Handler to use - File to load from - Parser to use to parse the given file - - If the parameter is set to null then the FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. - + Technically blank nodes may appear in a FILTER as part of an EXISTS/NOT EXISTS clause but in that case they would not be visible outside of the FILTER and so are not relevant - + - Loads the contents of the given File using a RDF Handler providing the RDF dataset format can be determined + Compares a filter pattern to another - RDF Handler to use - File to load from - - - The FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. - - + Pattern + - + - Raises warning messages + Compares a filter pattern to another - Warning Message + Pattern + - + - Raises Store Warning messages + Returns the string representation of the Pattern - Warning Message + - + - Event which is raised when the parser invoked by the FileLoader detects a non-fatal issue with the RDF syntax + Pattern which matches the Blank Node with the given Internal ID regardless of the Graph the nodes come from - + - Event which is raised when the Store parser invoked by the FileLoader detects a non-fatal issue with the RDF syntax + Creates a new Fixed Blank Node Pattern + ID - + - Static Helper class containing useful methods for Parsers + Gets the Blank Node ID - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Checks whether the pattern accepts the given Node - Parser Context - Token to resolve + SPARQL Evaluation Context + Node to test - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Returns a Blank Node with a fixed ID scoped to whichever graph is provided - Parser Context - Token to resolve - Whether when the default prefix is used but not defined it can fallback to the Base URI - + Construct Context - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Gets the String representation of the Pattern Item - Parser Context - Token to resolve - Whether when the default prefix is used but not defined it can fallback to the Base URI - QName unescaping function - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Class for representing Graph Patterns in Sparql Queries - Parser Context - Token to resolve - - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Creates a new Graph Pattern - RDF Handler - Token to resolve - - - It is not recommended to use this overload since an IRdfHandler cannot resolve QNames - - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Creates a new Graph Pattern copied from an existing Graph Pattern - Results Handler - Token to resolve - - - It is not recommended to use this overload since an IRdfHandler cannot resolve QNames - + Graph Pattern - + - Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails + Adds a Triple Pattern to the Graph Pattern respecting any BGP breaks - Graph - Token to resolve - + Triple Pattern - + - Helper method for raising informative standardised Parser Errors + Adds an Assignment to the Graph Pattern respecting any BGP breaks - The Error Message - The Token that is the cause of the Error - + Assignment Pattern - + - Helper function which generates standardised Error Messages + Adds a Filter to the Graph Pattern - Error Message - Event causing the Error - + Filter - + - Helper function which generates standardised Error Messages + Resets the set of Unplaced Filters to be a new set of - Error Message - The Production where the Error occurred - Event causing the Error - + Filters - + - Throws a RdfParsingTerminatedException which is used to tell the parser that it should stop parsing. + Adds a child Graph Pattern to the Graph Pattern respecting any BGP breaks - + Graph Pattern - + - Represents Position Information from Parsers + Adds inline data to a Graph Pattern respecting any BGP breaks + - + - Creates a new set of Position Information + Tells the Graph Pattern that any subsequent Graph/Triple Patterns added go in a new BGP - Line - Column - + - Creates a new set of Position Information + Swaps the position of the two given Triple Patterns - Line - Start Column - End Column + First Position + Second Position + + Intended for use by Query Optimisers + - + - Creates a new set of Position Information + Inserts a Filter at a given position - Start Line - End Line - Start Column - End Column + Filter + Position to insert at + + Intended for use by Query Optimisers + - + - Creates a new set of Position Information form some XML Line Information + Inserts an Assignment at a given position - XML Line Information + Assignment + Position to insert at + + Intended for use by Query Optimisers + - + - Gets the Start Line + Gets/Sets whether the Graph Pattern is Optional - + - Gets the End Line + Gets/Sets whether the Graph Pattern is Filtered - + - Gets the Start Column + Gets/Sets whether the Graph Pattern is a Union of its Child Graph Patterns - + - Gets the End Column + Gets/Sets whether the Graph Pattern operates on a specific Graph - + - Possible RDFa Syntaxes + Gets whether this is an empty Graph Pattern - + - RDFa 1.0 + Gets/Sets whether the Graph Pattern is an EXISTS clause - + - RDFa 1.1 + Gets/Sets whether the Graph Pattern is a NOT EXISTS clause - + - Auto-detect - assumes RDFa 1.1 + Gets/Sets whether the Graph Pattern is a MINUS clause - + - Auto-detect - assumes RDFa 1.0 + Gets/Sets whether the Graph Pattern is a SERVICE clause - + - Class for reading RDF embedded as RDFa from within HTML web pages + Gets whether Optimisation has been applied to this query - - The RDFa parser uses a HTML parser (Html Agility Pack) that is highly tolerant of real-world HTML and so is able to extract RDFa from pages that are not strictly valid HTML/XHTML - + This only indicates that an Optimiser has been applied to the Pattern. You can always reoptimise by calling the Optimise() method with an optimiser of your choice on the query to which this Pattern belongs - + - XHTML Vocab Namespace + Gets whether Evaluation Errors in this Graph Pattern are suppressed (currently only valid with SERVICE) - + - URI for the XHTML+RDFa DTD + Gets whether this Graph Pattern contains an Inline Data block (VALUES clause) - + - Namespace URI for XHTML + Determines whether the Graph Pattern has any kind of Modifier (GRAPH, MINUS, OPTIONAL etc) applied - + - Namespace URI for RDFa + Gets/Sets the FILTER that applies to this Graph Pattern - + - RDFa Version Constants + Gets/Sets the Graph Specifier that applies to this Graph Pattern + + This property is also used internally for SERVICE specifiers to save adding an additional property unnecessarily + - + - RDFa Version Constants + Checks whether this Pattern has any Child Graph Patterns - + - RDFa Version Constants + Gets the Last Child Graph Pattern of this Pattern and removes it from this Pattern - + - RDFa Version Constants + Gets the Child Graph Patterns of this Pattern - + - Creates a new RDFa Parser which will auto-detect which RDFa version to use (assumes 1.1 if none explicitly specified) + Gets the Triple Patterns in this Pattern - + - Creates a new RDFa Parser which will use the specified RDFa syntax + Gets whether this Pattern can be simplified - RDFa Syntax Version - + - Parses RDFa by extracting it from the HTML from the given input + Gets whether this Graph Pattern is a Sub-query which can be simplified - Graph to load into - Stream to read from - + - Parses RDFa by extracting it from the HTML from the given input + Gets whether the Graph Pattern uses the Default Dataset - Graph to load into - Input to read from + + Graph Patterns generally use the Default Dataset unless they are a GRAPH pattern or they contain a Triple Pattern, child Graph Pattern or a FILTER/BIND which does not use the default dataset + - + - Parses RDFa by extracting it from the HTML from the given file + Gets the enumeration of Filters that apply to this Graph Pattern which will have yet to be placed within the Graph Pattern - Graph to load into - File to read from - + - Parses RDFa by extracting it from the HTML from the given input + Gets the enumeration of LET assignments that are in this Graph Pattern which will be placed appropriately later - RDF Handler to use - Stream to read from - + - Parses RDFa by extracting it from the HTML from the given input + Gets the Variables used in the Pattern - RDF Handler to use - Input to read from - + - Parses RDFa by extracting it from the HTML from the given input + Gets the inline data (VALUES block if any) - RDF Handler to use - File to read from - + - Resolves a CURIE to a Node + Optimises the Graph Pattern using the current global optimiser - Parser Context - Evaluation Context - CURIE - - + - Resolves an Attribute which may be a CURIE/URI to a Node + Optimises the Graph Pattern using the given optimiser - Parser Context - Evaluation Context - URI/CURIE - + Query Optimiser + + + Important: If a Pattern has already been optimized then calling this again is a no-op. + + + For finer grained control of what gets optimized you can use to disable automatic optimisation and then manually call this method as necessary + + - + - Resolves an Attribute which may be a Term/CURIE/URI to a Node where one/more of the values may be special values permissible in a complex attribute + Optimises the Graph Pattern using the given optimiser and with the given variables + + Query Optimiser + Variables + + + Important: If a Pattern has already been optimized then calling this again is a no-op. + + + For finer grained control of what gets optimized you can use to disable automatic optimisation and then manually call this method as necessary + + + The vars parameter contains Variables mentioned in the parent Graph Pattern (if any) that can be used to guide optimisation of child graph patterns + + + + + + Gets the String representation of the Graph Pattern - Parser Context - Evaluation Context - URI/CURIE/Term - + - Parses an complex attribute into a number of Nodes + Gets the Algebra representation of the Graph Pattern - Parser Context - Evaluation Context - Attribute Value - - A complex attribute is any attribute which accepts multiple URIs, CURIEs or Terms - - + + + Interface for Triple Patterns + + + + + Evaluates the Triple Pattern in the given Evaluation Context + + Query Evaluation Context + + - Parses an attribute into a number of Nodes from the CURIEs contained in the Attribute + Gets the Pattern Type - Parser Context - Evaluation Context - Attribute Value - - + - Internal Helper for raising the Warning Event + Gets whether the Pattern accepts all - Warning Message + + Indicates that a Pattern is of the form ?s ?p ?o + - + - Event which is raised when there is a non-fatal error with the input being read + Gets the List of Variables used in the Pattern - + - Static Helper class for providing Constants and Helper functions for use by RDF/XML parsers + Gets the enumeration of floating variables in the pattern i.e. variables that are not guaranteed to have a bound value - + - Checks whether a Uri Reference is an absolute Uri + Gets the enumeration of fixed variables in the pattern i.e. variables that are guaranteed to have a bound value - Uri Reference to Test - - Implemented by seeing if the Uri Reference starts with a Uri scheme specifier - + - Array containing the Core Syntax Terms + Gets whether a Triple Pattern uses the Default Dataset when evaluated + + Almost all Triple Patterns use the Default Dataset unless they are sub-query patterns which themselves don't use the Default Dataset or they contain an expression (in the case of BIND/LET/FILTERs) which does not use the Default Dataset + - + - Array containing the other Syntax Terms + Gets whether a Triple Pattern does not contain any Blank Variables - + - Array containing the Old Syntax Terms + Interface for Triple Patterns that can be used in a CONSTRUCT pattern - + - Array containing Syntax Terms where the rdf: Prefix is mandated + Constructs a Triple from a Set based on this Triple Pattern + Construct Context + - + - Checks whether a given QName is a Core Syntax Term + Gets the Subject of the Pattern - QName to Test - True if the QName is a Core Syntax Term - + - Checks whether a given QName is a Syntax Term + Gets the Predicate of the Pattern - QName to Test - True if the QName is a Syntax Term - + - Checks whether a given QName is a Old Syntax Term + Gets the Object of the Pattern - QName to Test - True if the QName is a Old Syntax Term - + - Checks whether a given QName is valid as a Node Element Uri + Gets whether the Pattern contains no Variables of any kind - QName to Test - True if the QName is valid - + - Checks whether a given QName is valid as a Property Element Uri + Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) - QName to Test - True if the QName is valid - + - Checks whether a given QName is valid as a Property Attribute Uri + Inteface for Triple Patterns that do simple pattern matching - QName to Test - True if the QName is valid - + - Checks whether a given Local Name is potentially ambigious + Gets the Index type that should be used in Pattern execution - Local Name to Test - True if the Local Name is ambigious - This embodies Local Names which must have an rdf prefix - + - Checks whether a given URIRef is encoded in Unicode Normal Form C + Gets the Subject of the Pattern - URIRef to Test - True if the URIRef is encoded correctly - + - Checks whether a given Base Uri can be used for relative Uri resolution + Gets the Predicate of the Pattern - Base Uri to Test - True if the Base Uri can be used for relative Uri resolution - + - Determines whether a QName is valid for use in RDF/XML + Gets the Object of the Pattern - QName - - + - Checks whether an attribute is an rdf:ID attribute + Gets the Triples that match this pattern - Attribute to Test - True if is an rdf:ID attribute - Does some validation on ID value but other validation occurs at other points in the Parsing + Evaluation Context + - + - Checks whether an attribute is an rdf:nodeID attribute + Gets whether a given triple is accepted by this pattern - Attribute to Test - True if is an rdf:nodeID attribute - Does some validation on ID value but other validation occurs at other points in the Parsing + Context + Triple + - + - Checks whether an attribute is an rdf:about attribute + Creates a set from a Triple - Attribute to Test - True if is an rdf:about attribute + Triple + - + - Checks whether an attribute is an property attribute + Interface for Triple Patterns that apply filters - Attribute to Test - True if is an property attribute - + - Checks whether an attribute is an rdf:resource attribute + Gets the filter to apply - Attribute to Test - True if is an rdf:resource attribute - + - Checks whether an attribute is an rdf:datatype attribute + Interface for Triple Patterns that represent Assignment operators - Attribute to Test - True if is an rdf:datatype attribute - + - Validates that an ID is a valid NCName + Gets the Assignment Expression that is used - ID Value to Test - True if the ID is valid - + - Validates that a URIReference is valid + Name of the Variable which is assigned to - URIReference to Test - True - - Currently partially implemented, some invalid Uri References may be considered valid - - + - Parser for reading SPARQL Results which have been encoded in the RDF schema for Result Sets and serialized as RDF + Interface for Triple Patterns that do sub-queries - + - Creates a new SPARQL RDF Parser which will use auto-detection for determining the syntax of input streams/files + Gets the sub-query - + - Creates a new SPARQL RDF Parser which will use the given RDF Parser + Interface for Triple Patterns that do property paths - RDF Parser - + - Loads a SPARQL Result Set from RDF contained in the given Input + Gets the Subject of the Pattern - SPARQL Result Set to populate - Input to read from - - Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - - + - Loads a SPARQL Result Set from RDF contained in the given Stream + Gets the property path - SPARQL Result Set to populate - Stream to read from - - Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - - + - Loads a SPARQL Result Set from RDF contained in the given File + Gets the Object of the Pattern - SPARQL Result Set to populate - File to read from - - Uses the FileLoader to load the RDF from the file which will attempt to determine the format of the RDF based on the file extension unless the parser was instantiated with a specific IRdfReader to use - - + - Loads a SPARQL Results from RDF contained in the given Input using a Results Handler + Interface for Triple Patterns that do property functions - Results Handler to use - Input to read from - - Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - - + - Loads a SPARQL Results from RDF contained in the given Stream using a Results Handler + Gets the Subject arguments of the function - Results Handler to use - Stream to read from - - Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - - + - Loads a SPARQL Results from RDF contained in the given file using a Results Handler + Gets the Object arguments of the function - Results Handler to use - File to read from - - Uses the FileLoader to load the RDF from the file which will attempt to determine the format of the RDF based on the file extension unless the parser was instantiated with a specific IRdfReader to use - - + - Internal method which actually parses the Result Set by traversing the RDF Graph appropriately + Gets the property function - Parser Context - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Gets the original triple patterns that made up this pattern - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + Class for representing LET Patterns in SPARQL Queries - + - Gets the String representation of the Parser which is a description of the syntax it parses + Creates a new LET Pattern - + Variable to assign to + Expression which generates a value which will be assigned to the variable - + - Class for parsing SPARQL Update commands into SparqlUpdateCommandSet objects that can be used to modify a Triple Store + Evaluates a LET assignment in the given Evaluation Context + Evaluation Context - + - Gets/Sets whether Tokeniser Tracing is used + Gets the Pattern Type - + - Gets/Sets the locally scoped custom expression factories + Returns that this is not an accept all since it is a LET assignment - + - Gets/Sets the Default Base URI used for Updated Commands parsed by this parser instance + Gets the Expression that is used to generate values to be assigned - + - Gets/Sets the locally scoped Query Optimiser applied to graph patterns in update commands at the end of the parsing process + Gets the Name of the Variable to which values will be assigned - - - May be null if no locally scoped optimiser is set in which case the globally scoped optimiser will be used - - - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Update Commands being parsed is detected + Returns an empty enumeration as any evaluation error will result in an unbound value so we can't guarantee any variables are bound - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Update Commands being parsed is detected + Returns the variable being assigned to as any evaluation error will result in an unbound value so we can't guarantee it is bound - + - Parses a SPARQL Update Command Set from the input stream + Gets whether the Pattern uses the Default Dataset - Input Stream - - + - Parses a SPARQL Update Command Set from the input + Returns true as a LET can never contain Blank Nodes - Input - - + - Parses a SPARQL Update Command Set from the given file + Gets the string representation of the LET assignment - File - + - Parses a SPARQL Update Command Set from the given String + Compares this Let to another Let - SPARQL Update Commands - + Let to compare to + Just calls the base compare method since that implements all the logic we need - + - Parses a SPARQL Update Command Set from the given String + Compares this Let to another Let - SPARQL Update Commands - + Let to compare to + Just calls the base compare method since that implements all the logic we need - + - Parser for Turtle syntax + Pattern which matches specific Nodes - Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Creates a new Turtle Parser + Creates a new Node Match Pattern + Exact Node to match - + - Creates a new Turtle Parser + Creates a new Node Match Pattern - Turtle Syntax + Exact Node to match + Whether to force rigorous evaluation regardless of the global setting - + - Creates a new Turtle Parser which uses the given Token Queue Mode + Checks whether the given Node matches the Node this pattern was instantiated with - Queue Mode for Turtle + Evaluation Context + Node to test + - + - Creates a new Turtle Parser which uses the given Token Queue Mode + Constructs a Node based on the given Set - Queue Mode for Turtle - Turtle Syntax + Construct Context - + - Gets/Sets whether Parsing Trace is written to the Console + Gets a String representation of the Node + - + - Gets/Sets whether Tokeniser Trace is written to the Console + Gets the Node that this Pattern matches - + - Gets/Sets the token queue mode used + Class for representing property function patterns in SPARQL Query - + - Loads a Graph by reading Turtle syntax from the given input + Creates a new Property Function pattern - Graph to load into - Stream to read from + Function information + Property Function - + - Loads a Graph by reading Turtle syntax from the given input + Creates a new Property Function pattern - Graph to load into - Input to read from + Original Triple Patterns + Subject Arguments + Object Arguments + Property Function - + - Loads a Graph by reading Turtle syntax from the given file + Gets the Pattern Type - Graph to load into - File to read from - + - Loads RDF by reading Turtle syntax from the given input using a RDF Handler + Gets the Subject arguments - RDF Handle to use - Stream to read from - + - Loads RDF by reading Turtle syntax from the given input using a RDF Handler + Gets the Object arguments - RDF Handle to use - Input to read from - + - Loads RDF by reading Turtle syntax from the given file using a RDF Handler + Gets the original triple patterns - RDF Handle to use - File to read from - + - Internal method which does the parsing of the input + Gets the property function - Parser Context - + - Tries to parse Base/Prefix declarations + Returns the empty enumerable as cannot guarantee any variables are bound - Parse Context - Whether declarations are Turtle style (if false SPARQL style is assumed) - + - Tries to parse Triples + Returns all variables mentioned in the property function as we can't guarantee they are bound - Parser Context - + - Tries to parse Predicate Object lists + Evaluates the property function - Parse Context - Subject of the Triples - Whether this is a Blank Node Predicate Object list + Evaluation Context - + - Tries to parse Object lists + Returns false because property functions are not accept-alls - Parse Context - Subject of the Triples - Predicate of the Triples - Whether this is a Blank Node Object list - + - Tries to parse Collections + Returns true if none of the - Parser Context - Blank Node which is the head of the collection - + - Tries to parse Literal Tokens into Literal Nodes + Compares a property function pattern to another - Parser Context - Literal Token + Pattern - + - Helper method which raises the Warning event if there is an event handler registered + Compares a property function pattern to another - + Pattern + - + - Event which is raised when the parser detects issues with the input which are non-fatal + Gets the string representation of the pattern + - + - Gets the String representation of the Parser which is a description of the syntax it parses + Class for representing property patterns in SPARQL Queries - - + - Possible NQuads Syntax modes + Creates a new Property Path Pattern + Subject + Property Path + Object - + - The original NQuads specification + Gets the pattern type - + - Standardized NQuads as specified in the RDF 1.1 NQuads specification + Gets the Subject of the Property Path - + - Parser for parsing NQuads (NTriples with an additional Context i.e. Named Graphs) + Gets the Property Path - - - The Default Graph (if any) will be given the special Uri nquads:default-graph - - - NQuads permits Blank Nodes and Literals to be used as Context, since the library only supports Graphs named with URIs these are translated into URIs of the following form: - -
-            nquads:bnode:12345678
-            
-
-            nquads:literal:87654321
-            
- - In these URIs the numbers are the libraries hash codes for the node used as the Context. - -
- + - Creates a new NQuads parser + Gets the Object of the Property Path - + - Creates a new NQuads parser + Gets the enumeration of fixed variables in the pattern i.e. variables that are guaranteed to have a bound value - NQuads syntax mode - + - Creates a new NQuads parser + Gets the enumeration of floating variables in the pattern i.e. variables that are not guaranteed to have a bound value - Token Queue Mode - + - Creates a new NQuads parser + Evaluates a property path pattern - Token Queue Mode - NQuads syntax mode + Evaluation Context - + - Gets/Sets whether Tokeniser Tracing is used + Gets whether the Pattern accepts all Triple Patterns - + - Gets/Sets the token queue mode used + Returns false a property path may always contain implicit blank variables - + - Gets/Sets the NQuads syntax mode + Compares a property path pattern to another + Pattern + - + - Loads a RDF Dataset from the NQuads input into the given Triple Store + Compares a property path pattern to another - Triple Store to load into - File to load from + Pattern + - + - Loads a RDF Dataset from the NQuads input into the given Triple Store + Gets the String representation of the Pattern - Triple Store to load into - Input to load from + - + - Loads a RDF Dataset from the NQuads input using a RDF Handler + Class for representing Node Patterns in Sparql Queries - RDF Handler to use - File to load from - + - Loads a RDF Dataset from the NQuads input using a RDF Handler + Binding Context for Pattern Item - RDF Handler to use - Input to load from - + - Converts syntax enumeration values from NQuads to NTriples + Checks whether the Pattern Item accepts the given Node in the given Context - NQuads Syntax + Evaluation Context + Node to test - + - Tries to parse a URI + Constructs a Node based on this Pattern for the given Set - RDF Handler - URI - URI Node if parsed successfully + Construct Context + - + - Helper method used to raise the Warning event if there is an event handler registered + Sets the Binding Context for the Pattern Item - Warning message - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Gets/Sets whether rigorous evaluation is used, note that this setting may be overridden by the global option - + - Gets the String representation of the Parser which is a description of the syntax it parses + Gets the String representation of the Pattern - + - Parser for SPARQL Boolean results as Plain Text + Gets the Variable Name if this is a Variable Pattern or null otherwise - + - Loads a Result Set from an Input Stream + Gets/Sets whether the Variable is repeated in the Pattern - Result Set to load into - Input Stream to read from - + - Loads a Result Set from an Input Stream + Class for representing Sub-queries which occur as part of a SPARQL query - Result Set to load into - File to read from - + - Loads a Result Set from an Input + Creates a new Sub-query pattern which represents the given sub-query - Result Set to load into - Input to read from + Sub-query - + - Loads a Result Set from an Input using a Results Handler + Gets the Sub-Query - Results Handler to use - Input to read from - + - Loads a Result Set from an Input Stream using a Results Handler + Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value - Results Handler to use - Input Stream to read from - + - Loads a Result Set from a file using a Results Handler + Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value - Results Handler to use - File to read from - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Gets the pattern type - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + Evaluates a Sub-query in the given Evaluation Context + Evaluation Context - + - Gets the String representation of the Parser which is a description of the syntax it parses + Returns that the Pattern is not an accept all since it is a Sub-query - - + - Parser for parsing TriX (a named Graph XML format for RDF) + Gets whether the Sub-query is Thread Safe - - - The Default Graph (if any) will be given the special Uri trix:default-graph - - - TriX permits Graphs to be named with Blank Node IDs, since the library only supports Graphs named with URIs these are converted to URIs of the form trix:local:ID - - - + - Current W3C Namespace Uri for TriX + Returns true as while a sub-query may contain blank node variables they will not be in scope here - + - Loads the RDF Dataset from the TriX input into the given Triple Store + Compares a sub-query pattern to another - Triple Store to load into - File to load from + Pattern + - + - Loads the RDF Dataset from the TriX input into the given Triple Store + Compares a sub-query pattern to another - Triple Store to load into - Input to load from + Pattern + - + - Loads the RDF Dataset from the TriX input using a RDF Handler + Gets the string representation of the sub-query - RDF Handler to use - File to load from + - + - Loads the RDF Dataset from the TriX input using a RDF Handler + Class for representing Triple Patterns in SPARQL Queries - RDF Handler to use - Input to load from - + - Helper method for raising informative standardised Parser Errors + Creates a new Triple Pattern - The Error Message - The Node that is the cause of the Error + Subject Pattern + Predicate Pattern + Object Pattern + + + + Gets whether a given Triple is accepted by this Pattern in the given Context + + Evaluation Context + Triple to test - + - Helper method used to raise the Warning event if there is an event handler registered + Gets the pattern type - Warning message - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Gets the Index Type we will use for this Pattern - + - Gets the String representation of the Parser which is a description of the syntax it parses + Subject Pattern - - + - Provides caching services to the UriLoader class + Predicate Pattern - + - Creates a new Cache which uses the system temporary directory as the cache location + Object Pattern - + - Creates a new Cache which uses the given directory as the cache location + Returns all variables mentioned as a match guarantees all variables are bound - Directory - + - Gets/Sets how long results should be cached + Returns an empty enumeration as a match guarantees all variables are bound + + + + + Returns whether the Triple Pattern is an accept all - This only applies to downloaded URIs where an ETag is not available, where ETags are available proper ETag based caching is used + True if all three Pattern Items are VariablePattern and all the Variables names are distinct - + - Gets/Sets the Cache Directory that is used + Evaluates a Triple Pattern in the given Evaluation Context + Evaluation Context - + - Initialises the Cache as required + Gets the Enumeration of Triples that should be assessed for matching the pattern + Evaluation Context + - + - Clears the Cache + Takes an enumerable and extracts Triples which match this pattern as results + SPARQL Evaluation Context + Enumerable of Triples - + - Gets whether there is an ETag for the given URI + Generates a Result Set for a Triple that matches the Pattern - URI + Triple - + - Gets the ETag for the given URI + Constructs a Triple from a Set based on this Triple Pattern - URI + Construct Context - Thrown if there is no ETag for the given URI - + - Remove the ETag record for the given URI + Gets whether the Pattern contains no Variables of any kind - URI - + - Removes a locally cached copy of a URIs results from the Cache + Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) - URI - + - Is there a locally cached copy of the Graph from the given URI which is not expired + Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) - URI - Whether the local copy is required to meet the Cache Freshness (set by the Cache Duration) - - + - Gets the path to the locally cached copy of the Graph from the given URI + Compares a triple pattern to another - URI + Pattern - - This method does not do any cache expiry calculations on the file. This is due to the fact that we'll store local copies of Graphs for which we have ETags and when using ETags we rely on the servers knowledge of whether the resource described by the URI has changed rather than some arbitrary caching duration that we/the user has set to use. - - + - Internal class which parses SPARQL Paths into path expressions + Compares a triple pattern to another + Pattern + - + - Class for representing errors that occur while querying RDF + Gets the String representation of this Pattern + - + - Creates a new RDF Query Exception + Possible Types of Triple Pattern - Error Message - + - Creates a new RDF Query Exception + Simple pattern matching - Error Message - Exception that caused this Exception - + - Class for representing Timeout errors that occur while querying RDF + FILTER application - + - Creates a new RDF Query Timeout Exception + BIND assignment - Error Message - + - Class for representing Exceptions occurring in RDF reasoners + LET assignment - + - Creates a new RDF Reasoning Exception + Sub-query - Error Message - + - Creates a new RDF Reasoning Exception + Property Path - Error Message - Exception that caused this exception - + - Class for representing Termination errors + Property Function - + - Creates a new RDF Query Termination Exception + Comparer for Triple Pattern Types - + - Class for representing Path Found terminations + Compares two triple pattern types + Pattern Type + Pattern Type + - + - Creates a new Path Found exception + Pattern which matches Variables - + - Algebra operator which combines a Filter and a Product into a single operation for improved performance and reduced memory usage + Creates a new Variable Pattern + Variable name - + - Creates a new Filtered Product + Creates a new Variable Pattern - LHS Algebra - RHS Algebra - Expression to filter with + Variable name + Whether to force rigorous evaluation - + - Gets the LHS Algebra + Checks whether the given Node is a valid value for the Variable in the current Binding Context + Evaluation Context + Node to test + - + - Gets the RHS Algebra + Constructs a Node based on the given Set + Construct Context + The Node which is bound to this Variable in this Solution - + - Transforms the inner algebra with the given optimiser + Gets the String representation of this pattern - Algebra Optimiser - + - Transforms the LHS algebra only with the given optimiser + Gets the Name of the Variable this Pattern matches - Algebra Optimiser - - + - Transforms the RHS algebra only with the given optimiser + Namespace which provide classes relating to the property function extension point of SPARQL - Algebra Optimiser - - + - Evaluates the filtered product + Interface for Property Function factories - Evaluation Context + + + + Gets whether the factory considers the given URI as representing a property function + + URI - + - Gets the Variables used in the Algebra + Tries to create a property function pattern with the given information + Function information + Property Function pattern + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Interface for SPARQL property functions - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets the Function URI - + - Converts the algebra back into a query + Evaluates the function in the given context + Context - + - Converts the algebra back into a Graph Pattern + Gets the variables used in the function - - + - Gets the string represenation of the algebra + Factory for creating property functions - - + - Implementation of a multiset which is suitable for multiple threads to write to in parallel, useful for parallelizing certain operations + Gets the number of globally registered factories - + - Creates a new Partionted Multiset + Adds a globally registered factory - Number of partitions - Partition Size + Factory - + - Gets the next Base ID to be used + Removes a globally registered factory + + Factory + + + + Gets whether a factory is registered + Factory Type - + - Does a Union of this Multiset and another Multiset + Gets whether a factory is registered - Other Multiset + Factory - + - Determines whether a given Value is present for a given Variable in any Set in this Multiset + Gets whether a URI is considered a property function by the global factories - Variable - Value + Function URI - + - Returns whether a given Variable is present in any Set in this Multiset + Gets whether a URI is considered a property function by any global/local factory - Variable + Function URI + Locally scoped factories - + - Determines whether this Multiset is disjoint with another Multiset + Tries to create a property function - Other Multiset + Property Function information + Property Function - + - Adds a Set to the multiset + Tries to create a property function + + Property Function information + Locally Scoped factories + Property Function + + + + + Helper Class containing functions useful in working with property functions - Set - - Assumes the caller has set the ID of the set appropriately and will use this to determine which partition to add to - - + - Adds a Variable to the multiset + Used to extract the patterns that make up property functions - Variable + Triple Patterns + - + - Sets the variable ordering for the multiset + Used to extract the patterns that make up property functions - Variable Ordering + Triple Patterns + Locally scoped factories + - + - Removes a Set from the multiset + Used to help extract the patterns that make up a property function pattern - Set ID + Key + Subject + Patterns + Function Information + Argument List to add discovered arguments to - + - Gets whether the multiset is empty + Represents information about a property function - + - Gets the number of sets in the multiset + Creates new function information + Function URI - + - Gets the variables in the multiset + Gets the function URI - + - Gets the sets in the multiset + Gets the triple patterns that compose the property function - + - Gets the Set IDs in the mutliset + Gets the subject arguments to the function - + - Gets a Set from the multiset + Gets the object arguments to the function - - - + - Removes temporary variables from all sets the multiset + Represents an Group of Bindings which is used when executing Queries with GROUP BY clauses - + - Removes a specific variable from all sets in the multiset + Creates a new Binding Group - Variable - + - Algebra that represents the application of a Property Function + Creates a new Binding Group which is a sub-group of the given Parent Group + Parent Group - + - Creates a new Property function algebra + Creates a new Binding Group from the specified IDs - Inner algebra - Property Function + IDs - + - Gets the Inner Algebra + Adds a Binding ID to the Group + ID - + - Transforms this algebra with the given optimiser + Gets the Enumerator for the Binding IDs in the Group - Optimiser - + - Evaluates the algebra in the given context + Gets the Enumerator for the Binding IDs in the Group - Evaluation Context - + - Gets the variables used in the algebra + Gets the Binding IDs in the Group - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Adds a Variable Assignment to the Group + Variable + Value - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets the Variable Assignments for the Group - + - Throws an error because property functions cannot be converted back to queries + Gets a String summarising the group - + - Throws an error because property functions cannot be converted back to graph patterns + Exposes method for assigning a name to an expression variable - - + - Gets the string representation of the algebra + Set the expression's variable name - + the parent query or graph pattern builder - + - Comparer for checking whether sets are distinct, check may either be using the entire set or by using only a subset of variables + Adds additional to DESCRIBE - + - Creates a new comparer that compares across all variables + Adds additional to DESCRIBE - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - Creates a new comparer that compare only on the specific variables + Provides methods to supply the "then" expression for the IF function call - Variables - + - Determines whether the given sets are equal + Sets the second parameter of the IF function call - First Set - Second Set - True if sets are equal, false otherwise - + - Gets the hash code for a set + Provides methods to supply the "else" expression for the IF function call - Set - Hash Code - + - Represents a fixed set of solutions + Sets the third parameter of the IF function call - + - Creates a new fixed set of solutions + Represents a SPARQL aggregate - Table - + - Returns the fixed set of solutions + Initializes a new instance of the class. - Evaluation Context - - + - Gets the variables used in the algebra + Represents a blank node RDF term expression - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Wraps the as a blank node expression - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Represents a logical expression - + - Throws an error as this cannot be converted back into a query + Wraps the as a boolean expression - - + - Throws an error as this cannot be converted back into a graph pattern + Represents a IRI expression - - + - Gets the string representation of the algebra + Wraps the as an IRI expression - - + - - Contains the classes which model the mapping of SPARQL queries into the SPARQL Algebra. This namespace is a key component of the new Leviathan SPARQL engine introduced in the 0.2.x builds of dotNetRDF - + Represents a literal expression - + - Static Helper class containing extensions used in the Algebra evaluation process + Wraps the as a literal expression - + - Calculates the product of two mutlisets asynchronously with a timeout to restrict long running computations + Returns the constant value of this expression formatted as a simple literal expression - Multiset - Other Multiset - Timeout, if <=0 no timeout is used and product will be computed sychronously - + A new LiteralExpression whose value is the constant value of this epression - + - Delegate for generating product of two multisets asynchronously + Creates a typed literal term - Multiset - Other Multiset - Mutliset to generate the product in - Stop Token - + - Method for generating product of two multisets asynchronously + Creates an untyped literal term (simple literal) - Multiset - Other Multiset - Mutliset to generate the product in - Stop Token - + - Token passed to asynchronous code to allow stop signalling + Represents a numeric expression of known type - + - Gets/Sets whether the code should stop + Wraps the as a typed numeric expression - - Once set to true cannot be reset - - + - Represents a BGP which is a set of Triple Patterns + Represents a numeric expression of undefined type - - - An Ask BGP differs from a BGP in that rather than evaluating each Triple Pattern in turn it evaluates across all Triple Patterns. This is used for ASK queries where we are only concerned with whether a BGP matches and not in the specific solutions - - - An Ask BGP can only contain concrete Triple Patterns and/or FILTERs and not any of the other specialised Triple Pattern classes - - - + - Creates a Streamed BGP containing a single Triple Pattern + Wraps the as a numeric expression - Triple Pattern - + - Creates a Streamed BGP containing a set of Triple Patterns + Represents a SPARQL expression (variable, function, operator, term or aggregate) - Triple Patterns - + - Determines whether a Triple Pattern can be evaluated using a Lazy ASK approach + The undelrying expression - Triple Pattern - - + - Gets the number of Triple Patterns in the BGP + Represents a RDF term expression (IRI, literal or blank node) - + - Gets the Triple Patterns in the BGP + Wraps the as an RDF term expression - + - Gets the Variables used in the Algebra + Represents a SPARQL expression which is not an aggregate - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Initializes a new instance of the class. + The expression. - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a call to the IN function + the list of SPARQL expressions - + - Gets whether the BGP is the emtpy BGP + Creates a greater than operator usage - + - Evaluates the BGP against the Evaluation Context + Creates a less than operator usage - Evaluation Context - - + - Gets the String representation of the Algebra + Creates a greater than or equal operator usage - - + - Converts the Algebra back to a SPARQL Query + Creates a less than or equal operator usage - - + - Converts the BGP back to a Graph Pattern + Represents a typed literal - - + - Represents a Union + Wraps the as a typed literal expression - - - An Ask Union differs from a standard Union in that if it finds a solution on the LHS it has no need to evaluate the RHS - - - + - Creates a new Ask Union + Represents an expression, which evaluates to a variable - LHS Pattern - RHS Pattern - + - Evaluates the Ask Union + Gets the represented by this variable expression - Evaluation Context - - + - Gets the Variables used in the Algebra + Creates a builder of a normal graph patterns - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a builder of a graph pattern + MINUS, GRAPH, SERVICE etc. - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Provides methods for creating aggregates expressions - + - Gets the LHS of the Join + Gets a builder which builds a DISTICT aggregate - + - Gets the RHS of the Join + Creates a SAMPLE aggregate - + - Gets the String representation of the Algebra + Creates a SAMPLE aggregate - - + - Converts the Algebra back to a SPARQL Query + Creates a SAMPLE aggregate - - + - Converts the Union back to Graph Patterns + Provides methods for creating DESCRIBE queries - - + - Transforms both sides of the Join using the given Optimiser + Adds additional to DESCRIBE - Optimser - - + - Transforms the LHS of the Join using the given Optimiser + Adds additional to DESCRIBE - Optimser - - + + + Interface for creating DESCRIBE queries + + + + + Adds triple patterns to the SPARQL query or graph pattern + + + - Transforms the RHS of the Join using the given Optimiser + Adds triple patterns to the SPARQL query or graph pattern - Optimser - - + - Abstract Base Class for representing Multisets + Provides methods for creating aggregates expressions but only those allowing DISTINCT - + - List of IDs that is used to return the Sets in order if the Multiset has been sorted + Creates a SUM aggregate - + - The number of results that would be returned without any limit clause to a query or -1 if not supported. Defaults to the same value as the Count member + Creates a SUM aggregate - + - Joins this Multiset to another Multiset + Creates a SUM aggregate - Other Multiset - - + - Does a Left Join of this Multiset to another Multiset where the Join is predicated on the given Expression + Creates a SUM aggregate - Other Multiset - Expression - - + - Does an Exists Join of this Multiset to another Multiset where the Join is predicated on the existence/non-existence of a joinable solution on the RHS + Creates a AVG aggregate - Other Multiset - Whether a solution must exist in the Other Multiset for the join to be made - - + - Does a Minus Join of this Multiset to another Multiset where any joinable results are subtracted from this Multiset to give the resulting Multiset + Creates a AVG aggregate - Other Multiset - - + - Does a Product of this Multiset and another Multiset + Creates a AVG aggregate - Other Multiset - - + - Does a Union of this Multiset and another Multiset + Creates a AVG aggregate - Other Multiset - - + - Determines whether the Multiset contains the given Value for the given Variable + Creates a MIN aggregate - Variable - Value - - + - Determines whether the Multiset contains the given Variable + Creates a MIN aggregate - Variable - - + - Determines whether the Mutliset is disjoint with the given Multiset + Creates a MIN aggregate - Multiset - - + - Adds a Set to the Mutliset + Creates a MIN aggregate - Set to add - + - Adds a Variable to the Multiset + Creates a MAX aggregate - Variable - + - Sets the variable ordering for the multiset + Creates a MAX aggregate - Variable Ordering - + - Removes a Set (by ID) from the Multiset + Creates a MAX aggregate - ID - + - Sorts a Set based on the given Comparer + Creates a MAX aggregate - Comparer on Sets - + - Returns whether the Multiset is Empty + Creates a GROUP_CONCAT aggregate - + - Gets the Count of Sets in the Multiset + Creates a GROUP_CONCAT aggregate - + - Trims the Multiset of Temporary Variables + Creates a GROUP_CONCAT aggregate - + - Trims the Multiset by removing all Values for the given Variable + Creates a COUNT(*) aggregate - Variable - + - Gets the Variables in the Multiset + Creates a COUNT aggregate - + - Gets the Sets in the Multiset + Creates a COUNT aggregate - + - Gets the IDs of Sets in the Multiset + Creates a COUNT aggregate - + - Retrieves the Set with the given ID + Creates a COUNT aggregate - ID - - + - Gets the string representation of the multiset (intended for debugging only) + Provides methods for building SPARQL expressions, including aggregates - - + - Represents a BGP which is a set of Triple Patterns + Provides methods for building graph patterns - + - Creates a new empty BGP + Adds another child graph pattern builder. - + - Creates a BGP containing a single Triple Pattern + Adds another child graph pattern builder. - Triple Pattern - + - Creates a BGP containing a set of Triple Patterns + Creates a UNION of multiple graph patterns. If is null or empty, + acts as a call to the method. - Triple Patterns - + - Gets the number of Triple Patterns in the BGP + Creates a UNION of multiple graph patterns. If is null or empty, + acts as a call to the method. - + - Gets the Triple Patterns in the BGP + Adds triple patterns to the SPARQL query or graph pattern - + - Evaluates the BGP against the Evaluation Context + Adds triple patterns to the SPARQL query or graph pattern - Evaluation Context - - + - Gets the Variables used in the Algebra + Adds an OPTIONAL graph pattern to the SPARQL query or graph pattern - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Adds a FILTER to the SPARQL query or graph pattern - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Adds a FILTER expression to the SPARQL query or graph pattern - + - Gets whether the BGP is the emtpy BGP + Adds a MINUS graph pattern to the SPARQL query or graph pattern - + - Returns the String representation of the BGP + Adds a GRAPH graph pattern to the graph pattern - - + - Converts the Algebra back to a SPARQL Query + Adds a GRAPH graph pattern to the graph pattern - - + - Converts the BGP to a Graph Pattern + Adds a SERVICE graph pattern to the graph pattern - - + - Represents a BINDINGS modifier on a SPARQL Query + Adds a BIND variable assignment to the graph pattern - + - Creates a new BINDINGS modifier + Adds a "normal" child graph pattern - Bindings - + - Evaluates the BINDINGS modifier + Adds a "normal" child graph pattern - Evaluation Context - - + - Gets the Variables used in the Algebra + Addsa "normal" child graph pattern - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Provides methods for building SPARQL expressions, but not aggregates - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a call to the REGEX function - + - Gets the Bindings + Creates a call to the REGEX function - + - Gets the String representation of the Algebra + Creates a call to the REGEX function - - + - Converts the Algebra back to a SPARQL Query + Creates a call to the REGEX function - - + - Convers the Algebra back to a Graph Pattern + Creates a call to the REGEX function - - + - Represents an Extend operation which is the formal algebraic form of the BIND operation + Creates a call to the REGEX function - + - Creates a new Extend operator + Creates a call to the REGEX function - Pattern - Expression - Variable to bind to - + - Gets the Variable Name to be bound + Creates a call to the REGEX function - + - Gets the Assignment Expression + Creates a call to the REGEX function - + - Gets the Inner Algebra + Creates a call to the REGEX function - + - Transforms the Inner Algebra using the given Optimiser + Creates a call to the STRLEN function with a variable parameter - Optimiser - + a SPARQL variable - + - Evaluates the Algebra in the given context + Creates a call to the STRLEN function with a string literal parameter - Evaluation Context - + a string literal parameter - + - Gets the variables used in the algebra + Creates a call to the SUBSTR function with a string literal and variable parameters + a string literal parameter + 1-based start index - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a call to the SUBSTR function with a string literal and interger expression parameters + a string literal parameter + a SPARQL variable - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a call to the SUBSTR function with a string literal and interger parameters + a string literal parameter + 1-based start index - + - Converts the Algebra to a Query + Creates a call to the SUBSTR function with a variable and interger expression parameters - + a SPARQL variable + 1-based start index - + - Converts the Algebra to a Graph Pattern + Creates a call to the SUBSTR function with a variable and interger parameters - + a SPARQL variable + 1-based start index - + - Gets the String representation of the Extend + Creates a call to the SUBSTR function with two variable parameters - + a SPARQL variable + a SPARQL variable - + - Interface for Sets which represents a possible solution during SPARQL evaluation + Creates a call to the SUBSTR function with a string literal and variable parameters + a string literal parameter + 1-based start index + substring length - + - Adds a Value for a Variable to the Set + Creates a call to the SUBSTR function with a string literal and interger expression parameters - Variable - Value + a string literal parameter + a SPARQL variable + substring length - + - Checks whether the Set contains a given Variable + Creates a call to the SUBSTR function with a string literal and interger parameters - Variable - + a string literal parameter + 1-based start index + substring length - + - Gets whether the Set is compatible with a given set based on the given variables + Creates a call to the SUBSTR function with a variable and interger expression parameters - Set - Variables - + a SPARQL variable + 1-based start index + substring length - + - Gets whether the Set is minus compatible with a given set based on the given variables + Creates a call to the SUBSTR function with a variable and interger parameters - Set - Variables - + a SPARQL variable + 1-based start index + substring length - + - Gets/Sets the ID of the Set + Creates a call to the SUBSTR function with two variable parameters + a SPARQL variable + a SPARQL variable + substring length - + - Removes a Value for a Variable from the Set + Creates a call to the SUBSTR function with a string literal and two integer expressions parameters - Variable + a string literal parameter + 1-based start index + substring length - + - Retrieves the Value in this set for the given Variable + Creates a call to the SUBSTR function with a string literal, variable and interger expression parameters - Variable - Either a Node or a null + a string literal parameter + a SPARQL variable + substring length - + - Gets the Values in the Set + Creates a call to the SUBSTR function with a string literal, interger and integer expression parameters + a string literal parameter + 1-based start index + substring length - + - Gets the Variables in the Set + Creates a call to the SUBSTR function with a variable, interger expression and integer expression parameters + a SPARQL variable + 1-based start index + substring length - + - Joins the set to another set + Creates a call to the SUBSTR function with a variable, interger and a numeric expression parameters - Other Set - + a SPARQL variable + 1-based start index + substring length - + - Copies the Set + Creates a call to the SUBSTR function with two variable parameters - + a SPARQL variable + a SPARQL variable + substring length - + - Abstract Base Class for implementations of the ISet interface + Creates a call to the SUBSTR function with a string literal, interger expression and a numeric expression parameters + a string literal parameter + 1-based start index + substring length - + - Adds a Value for a Variable to the Set + Creates a call to the SUBSTR function with a string literal, interger expression and a variable parameters - Variable - Value + a string literal parameter + a SPARQL variable + substring length - + - Checks whether the Set contains a given Variable + Creates a call to the SUBSTR function with a string literal, interger and a variable parameters - Variable - + a string literal parameter + 1-based start index + substring length - + - Gets whether the Set is compatible with a given set based on the given variables + Creates a call to the SUBSTR function with a variable, interger expression and a variable parameters - Set - Variables - + a SPARQL variable + 1-based start index + substring length - + - Gets whether the Set is minus compatible with a given set based on the given variables + Creates a call to the SUBSTR function with a variable, interger and a variable parameters - Set - Variables - + a SPARQL variable + 1-based start index + substring length - + - Gets/Sets the ID of the Set + Creates a call to the SUBSTR function with three variable parameters + a SPARQL variable + a SPARQL variable + substring length - + - Removes a Value for a Variable from the Set + Creates a call to the LANGMATCHES function - Variable - + - Retrieves the Value in this set for the given Variable + Creates a call to the LANGMATCHES function - Variable - Either a Node or a null - + - Gets the Values in the Set + Creates a call to the LANGMATCHES function - + - Gets the Variables in the Set + Creates a call to the LANGMATCHES function - + - Joins the set to another set + Creates a call to the LANGMATCHES function - Other Set - - + - Copies the Set + Creates a call to the LANGMATCHES function - - + - Gets whether the Set is equal to another set + Creates a call to the isIRI function with an expression parameter - Set to compare with - + any SPARQL expression - + - Gets whether the Set is equal to another object + Creates a call to the isIRI function with a variable parameter - Object to compare with - + name of variable to check - + - Gets the Hash Code of the Set + Creates a call to the isBlank function with an expression parameter - + any SPARQL expression - + - Gets the String representation of the Set + Creates a call to the isBlank function with a variable parameter - + name of variable to check - + - Represents a BGP which is a set of Triple Patterns + Creates a call to the isLiteral function with an expression parameter - - - A Lazy BGP differs from a BGP in that rather than evaluating each Triple Pattern in turn it evaluates across all Triple Patterns. This is used for queries where we are only want to retrieve a limited number of solutions - - - A Lazy BGP can only contain concrete Triple Patterns and/or FILTERs and not any of other the specialised Triple Pattern classes - - + any SPARQL expression - + - Creates a Streamed BGP containing a single Triple Pattern + Creates a call to the isLiteral function with a variable parameter - Triple Pattern + name of variable to check - + - Creates a Streamed BGP containing a set of Triple Patterns + Creates a call to the isNumeric function with an expression parameter - Triple Patterns + any SPARQL expression - + - Creates a Streamed BGP containing a single Triple Pattern + Creates a call to the isNumeric function with a variable parameter - Triple Pattern - The number of Results the BGP should attempt to return + name of variable to check - + - Creates a Streamed BGP containing a set of Triple Patterns + Creates a call to the STR function with a variable parameter - Triple Patterns - The number of Results the BGP should attempt to return + a SPARQL variable - + - Evaluates the BGP against the Evaluation Context + Creates a call to the STR function with a literal expression parameter - Evaluation Context - + a SPARQL literal expression - + - Gets the String representation of the Algebra + Creates a call to the STR function with an variable parameter - + an RDF IRI term - + - Represents a Union + Creates a call to the LANG function with a variable parameter - - - A Lazy Union differs from a standard Union in that if it finds sufficient solutions on the LHS it has no need to evaluate the RHS - - + a SPARQL variable - + - Creates a new Lazy Union + Creates a call to the LANG function with a literal expression parameter - LHS Pattern - RHS Pattern + a SPARQL literal expression - + - Creates a new Lazy Union + Creates a call to the DATATYPE function with a literal expression parameter - LHS Pattern - RHS Pattern - The number of results that the Union should attempt to return + a SPARQL literal expression + depending on will use a different flavour of datatype function - + - Evaluates the Lazy Union + Creates a parameterless call to the BNODE function - Evaluation Context - - + - Gets the Variables used in the Algebra + Creates a call to the BNODE function with a simple literal parameter + a SPARQL simple literal - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a call to the BNODE function with a string literal parameter + a SPARQL string literal - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a call to the STRDT function with a simple literal and a IRI expression parameters + a SPARQL simple literal + datatype IRI - + - Gets the LHS of the Join + Creates a call to the STRDT function with a simple literal and a parameters + a SPARQL simple literal + datatype IRI - + - Gets the RHS of the Join + Creates a call to the STRDT function with a simple literal and a variable parameters + a SPARQL simple literal + datatype IRI - + - Gets the String representation of the Algebra + Creates a call to the STRDT function with a simple literal and a IRI expression parameters - + a literal + datatype IRI - + - Converts the Algebra back to a SPARQL Query + Creates a call to the STRDT function with a simple literal and a IRI expression parameters - + a literal + datatype IRI - + - Converts the Union back to Graph Patterns + Creates a call to the STRDT function with a simple literal and a parameters - + a literal + datatype IRI - + - Transforms both sides of the Join using the given Optimiser + Creates a call to the STRDT function with a variable and a parameters - Optimser - + a literal + datatype IRI - + - Transforms the LHS of the Join using the given Optimiser + Creates a call to the STRDT function with a variable and a parameters - Optimser - + a literal + datatype IRI - + - Transforms the RHS of the Join using the given Optimiser + Creates a call to the STRDT function with a variable and a IRI expression parameters - Optimser - + a literal + datatype IRI - + - Represents a Distinct modifier on a SPARQL Query + Creates a call to the UUID function - + - Creates a new Distinct Modifier + Creates a call to the StrUUID function - Pattern - + - Creates a new Distinct Modifier + Creates a call to the BOUND function with a variable parameter - Inner Algebra - Whether to ignore temporary variables + a SPARQL variable - + - Evaluates the Distinct Modifier + Creates a call to the BOUND function with a variable parameter - Evaluation Context - + a SPARQL variable name - + - Gets the Variables used in the Algebra + Creates a call to the IF function with an expression for the first parameter + conditional clause expression - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a call to the IF function with a variable for the first parameter + conditional clause variable expression - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a call of the COALESCE function with a variable number of expression parameters + SPARQL expressions - + - Gets the Inner Algebra + Creates a call of the EXISTS function + a function, which will create the graph pattern parameter - + - Gets the String representation of the Algebra + Creates a call of the SAMETERM function with two expression parameters - + a SPARQL expression + a SPARQL expression - + - Converts the Algebra back to a SPARQL Query + Creates a call of the SAMETERM function with variable and expression parameters - + a variable name + a SPARQL expression - + - Throws an exception since a Distinct() cannot be converted back to a Graph Pattern + Creates a call of the SAMETERM function with expression and variable parameters - - Thrown since a Distinct() cannot be converted to a Graph Pattern + a SPARQL expression + a variable name - + - Transforms the Inner Algebra using the given Optimiser + Creates a call of the SAMETERM function with two variable parameters - Optimiser - + a variable name + a variable name - + - Represents a Reduced modifier on a SPARQL Query + SPARQL syntax verions to use when creating expressions - + - Creates a new Reduced Modifier + Creates a SPARQL variable - Pattern - + - Evaluates the Reduced Modifier + Creates a string constant - Evaluation Context - - + - Gets the Variables used in the Algebra + Creates a numeric constant - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a numeric constant - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a numeric constant - + - Gets the Inner Algebra + Creates a numeric constant - + - Gets the String representation of the Algebra + Creates a boolean constant - - + - Converts the Algebra back to a SPARQL Query + Creates a numeric constant - - + - Throws an exception since a Reduced() cannot be converted back to a Graph Pattern + Creates a numeric constant - - Thrown since a Reduced() cannot be converted to a Graph Pattern - + - Transforms the Inner Algebra using the given Optimiser + Creates a numeric constant - Optimiser - - + - Represents a Filter + Creates a datetime constant - + - Creates a new Filter + Creates an IRI constant - Algebra the Filter applies over - Filter to apply - + - Applies the Filter over the results of evaluating the inner pattern + Builds a SPARQL constructor function call - Evaluation Context - - + - Gets the Variables used in the Algebra + Interface for building SPARQL queries - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Gets the query type of the generated SPARQL query. - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets the builder associated with the root graph pattern. - + - Gets the Filter to be used + Gets the prefix manager, which allows adding prefixes to the query or graph pattern - + - Gets the Inner Algebra + Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) - + - Gets the String representation of the FILTER + Applies a LIMIT - + Limit value. Pass negative to disable LIMIT - + - Converts the Algebra back to a SPARQL Query + Applies an OFFSET - - + - Converts the Algebra back to a Graph Pattern + Adds ascending ordering by a variable to the query - - + - Transforms the Inner Algebra using the given Optimiser + Adds ascending ordering by a variable to the query - Optimiser - - + - Represents a GRAPH clause + Adds descending ordering by a variable to the query - + - Creates a new Graph clause + Adds descending ordering by a variable to the query - Pattern - Graph Specifier - + - Evaluates the Graph Clause by setting up the dataset, applying the pattern and then generating additional bindings if necessary + Adds ascending ordering by an expression to the query - Evaluation Context - - + - Gets the Variables used in the Algebra + Adds descending ordering by an expression to the query - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Adds a GROUP BY clause to the query. - + - Gets the Graph Specifier + Adds a GROUP BY clause to the query. - + - Gets the Inner Algebra + Adds a GROUP BY clause to the query. - + - Gets the String representation of the Algebra + Adds a HAVING clause to the query. - - + - Converts the Algebra back to a SPARQL Query + Builds and returns a - - + - Converts the Algebra back to a Graph Pattern + Adds a BIND variable assignment to the root graph pattern - - + - Transforms the Inner Algebra using the given Optimiser + Factory interface for getting an - Optimiser - - + - Represents a Grouping + Creates a - + - Creates a new Group By + Interface for creating SELECT queries - Pattern - Grouping to use - Aggregates to calculate - + - Evaluates a Group By by generating a GroupMultiset from the Input Multiset + Adds additional SELECT return - SPARQL Evaluation Context - - + - Gets the Variables used in the Algebra + Adds additional SELECT return - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Adds additional SELECT expression - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) - + - Gets the Inner Algebra + Provides methods for building triple patterns - + - Gets the Grouping that is used + Sets a variable as - - If the Query supplied in the SparqlEvaluationContext is non-null and has a GROUP BY clause then that is applied rather than the clause with which the GroupBy algebra is instantiated - - + - Gets the Aggregates that will be applied + Sets a variable as - + - Gets the String representation of the + Depending on the generic parameter type, sets a literal, a QName or a blank node as - + Either a variable name, a literal, a QName or a blank node identifier + A relevant prefix/base URI must be added to to accept a QName - + - Converts the Algebra back to a SPARQL Query + Depending on the 's type, sets a literal, a QName or a blank node as - + A relevant prefix/base URI must be added to to accept a QName - + - Throws an exception since GroupBy() cannot be converted to a Graph Pattern + Sets a as - - Thrown since GroupBy() cannot be converted to a GraphPattern - + - Transforms the Inner Algebra using the given Optimiser + Sets a as - Optimiser - - + - Multiset which represents a Grouping of Sets from another Multiset + Class responsible for creating s - + - Creates a new Group Multiset + Provides methods for building queries with a fluent style API - Multiset which contains the sets that are being grouped + + + A is mutable by definition so calling any of the extension methods in this API will cause the existing query it is called on to be changed. You can call on an existing query to create a new copy if you want to make different queries starting from the same base query + + - + - Gets the enumeration of the Groups in the Multiset + Gets or sets the namespace mappings for the SPARQL query being built - + - Gets the enumeration of the IDs of Sets in the group with the given ID + Creates a new ASK query - Group ID - - + - Gets the Group with the given ID + Creates a new CONSTRUCT query - Group ID - - + - Adds a Group to the Multiset + Creates a new CONSTRUCT WHERE query - - + - Adds a Set to the Group Multiset + Creates a new SELECT * query - Set - Thrown since this action is invalid on a Group Multiset - + - Gets the Multiset which contains the Sets who are the members of the Groups this Multiset represents + Creates a new SELECT query which will return the given + + query result variables - + - Represents a Having Clause + Creates a new SELECT query which will return the given + + query result variables - + - Creates a new Having Clause + Creates a new SELECT query which will return an expression - Pattern - Having Clause - + - Evaluates the Having Clause + Creates a new query, which will DESCRIBE the given - Evaluation Context - - + - Gets the Variables used in the Algebra + Creates a new query, which will DESCRIBE the given - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Applies a LIMIT + Limit value. Pass negative to disable LIMIT - + - Gets the Inner Algebra + Applies an OFFSET - + - Gets the HAVING clause used + Adds ascending ordering by a variable to the query - - If the Query supplied in the SparqlEvaluationContext is non-null and has a HAVING clause then that is applied rather than the clause with which the Having algebra is instantiated - - + - Gets the String representation of the Algebra + Adds ascending ordering by a variable to the query - - + - Converts the Algebra back to a SPARQL Query + Adds descending ordering by a variable to the query - - + - Throws an exception since a Having() cannot be converted back to a Graph Pattern + Adds descending ordering by a variable to the query - - Thrown since a Having() cannot be converted to a Graph Pattern - + - Transforms the Inner Algebra using the given Optimiser + Adds ascending ordering by an expression to the query - Optimiser - - + - Represents the Identity Multiset + Adds descending ordering by an expression to the query - + + + + + + + + + + + + + + + + + + + - Joins the Multiset to another Multiset + Build a simple DESCRIBE query without the WHERE part. - Other Multiset - - The other Multiset - - + - Left Joins the Multiset to another Multiset + Add a group graph pattern or a sub query to the query. - Other Multiset - Expression which the Join is predicated on - The other Multiset + - + - Exists Joins the Multiset to another Multiset + Add a group graph pattern or a sub query to the query. - Other Multiset - Whether solutions must exist in the Other Multiset for the Join to suceed - + - + - Minus Joins this Multiset to another Multiset + Adds additional SELECT - Other Multiset - - + - Generates the Product of this Set and another Multiset + Adds additional SELECT expression - Other Multiset - The other Multiset - + - Generates the Union of this Set and another Multiset + Adds additional SELECT - Other Multiset - The other Multiset - + - Returns True since the Identity Multiset is considered to contain all values + Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) - Variable - Value - - + - Returns False since the Identity Multiset contains no Variables + Provides methods for casting expressions to XPath types - Variable - - + - Returns False since the Identity Multiset is not disjoint with anything + Creates a cast to xsd:integer - Other Multiset - - + - Adds a Set to the Multiset + Creates a cast to xsd:double - Set - Thrown since this operation is invalid on an Identity Multiset - + - Adds a Variable to the Multiset + Creates a cast to xsd:decimal - Variable - Thrown since this operation is invalid on an Identity Multiset - + - Sets the variable ordering for the multiset + Creates a cast to xsd:dateTime - Variable Ordering - + - Removes a Set to the Multiset + Creates a cast to xsd:float - Set ID - Thrown since this operation is invalid on an Identity Multiset - + - Returns false as the Identity Multiset is not considered empty + Creates a cast to xsd:boolean - + - Returns an empty enumerable as the Identity Multiset contains no Variables + Creates a cast to xsd:string - + - Returns an empty enumerable as the Identity Multiset contains no Sets + Class responsible for setting the object part of triple patterns - + - Returns an empty enumerable as the Identity Multiset contains no Sets + Sets a SPARQL variable as - + - Gets the Set with the given ID + Sets a SPARQL variable as - Set ID - - Thrown since the Identity Multiset contains no Sets - + - Interface for classes that represent the SPARQL Algebra and are used to evaluate queries + Depending on the generic parameter type, sets a literal, a QName or a blank node as + Either a variable name, a literal, a QName or a blank node identifier + A relevant prefix/base URI must be added to to accept a QName - + - Evaluates the Algebra in the given Context + Depending on the 's type, sets a literal, a QName or a blank node as - Evaluation Context - - + - Gets the enumeration of Variables used in the Algebra + Sets a as - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Sets a plain literal as - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Sets a literal with language tag as - + - Converts the Algebra back to a SPARQL Query + Sets a typed literal as - - Thrown if the given Algebra cannot be converted to a SPARQL Query - + - Converts the Algebra back to a Graph Pattern + Sets a as - - Thrown if the given Algebra cannot be converted to a Graph Pattern - + - Interface for SPARQL Algebra constructs which are unary operators i.e. they apply over a single inner Algebra + Class responsible for setting the predicate part of triple patterns - + - Gets the Inner Algebra + Sets a SPARQL variable as - + - Transforms the Inner Algebra using the given Optimiser + Sets a SPARQL variable as - Optimiser - - - The operator should retain all it's existing properties and just return a new version of itself with the inner algebra having had the given optimiser applied to it - - + - Marker Interface for SPARQL Algebra constructs which are terminal operators i.e. they contain no inner algebra operators + Sets a as - + - Represents an Algebra construct which is a BGP + Sets a as - + - Gets the Number of Patterns in the BGP + Sets a as using a QName + A relevant prefix/base URI must be added to - + - Gets the Triple Patterns in the BGP + Sets a as using a - + - Represents an Algebra construct which is a Filter + Represents the level of Query Explanation that is desired - + - Gets the Filter + Specifies No Explanations - + - Represents an Algebra construct which is an Abstract Join (i.e. any kind of Join over two algebra operators) + Specifies Explanations are output to Debug - - Specific sub-interfaces are used to mark specific kinds of Join - - + - Gets the LHS of the Join + Specifies Explanations are output to Trace - + - Gets the RHS of the Join + Specifies Explanations are output to Console Standard Output - + - Transforms both sides of the Join using the given Optimiser + Specifies Explanations are output to Console Standard Error - Optimser - - - The join should retain all it's existing properties and just return a new version of itself with the two sides of the join having had the given optimiser applied to them - - + - Transforms the LHS of the Join using the given Optimiser + Specifies Explanations are output to Debug and Console Standard Output - Optimser - - - The join should retain all it's existing properties and just return a new version of itself with LHS side of the join having had the given optimiser applied to them - - + - Transforms the RHS of the Join using the given Optimiser + Specifies Explanations are output to all - Optimser - - - The join should retain all it's existing properties and just return a new version of itself with RHS side of the join having had the given optimiser applied to them - - + - Represents an Algebra construct which is a Join + Show the Thread ID of the Thread evaluating the query (useful in multi-threaded environments) - + - Represents an Algebra construct which is a Left Join + Show the Depth of the Algebra Operator - + - Gets the Filter used on the Join + Show the Type of the Algebra Operator - + - Represents an Algebra construct which is a Union + Show the Action being performed (makes it clear whether the explanation marks the start/end of an operation) - + - Represents an Algebra construct which is a Minus + Shows Timings for the Query - + - Represents an Algebra construct which is an Exists Join + Show Intermediate Result Counts at each stage of evaluation - + - Gets whether the Join requires compatible solutions to exist on the RHS + Shows Basic Information (Depth, Operator and Action) - + - Represents a LeftJoin predicated on the existence/non-existence of joinable sets on the RHS for each item on the LHS + Shows Default Information (Thread ID, Depth, Operator and Action) - + - Creates a new Exists Join + Shows All Information - LHS Pattern - RHS Pattern - Whether a joinable set must exist on the RHS for the LHS set to be preserved - + - Evaluates an ExistsJoin + Shows an analysis of BGPs prior to evaluating them - Evaluation Context - + + This lets you see how many joins, cross products, filters, assignments etc must be applied in each BGP + - + - Gets the Variables used in the Algebra + Shows an analysis of Joins prior to evaluating them + + This lets you see whether the join is a join/cross product and in the case of a Minus whether the RHS can be ignored completely + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Shows an analysis of Named Graphs used by a Graph clause prior to evaluating them + + This lets you see how many graphs a given Graph clause will operate over. As the Graph clause in SPARQL is defined as the union of evaluating the inner operator over each named graph in the dataset graph clauses applied to datasets with many named graphs can be expensive. + - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Sets whether Evaluation should be simulated (means timings will not be accurate but allows you to explain queries without needing actual data to evaluate them against) - + - Gets the LHS of the Join + Shows all analysis information - + - Gets the RHS of the Join + Basic Explanation Level (Console Standard Output and Basic Information) - + - Gets whether this is an EXISTS join + Default Explanation Level (Default Outputs and Default Information) - + - Gets the String representation of the Algebra + Detailed Explanation Level (Default Outputs and All Information) - - + - Converts the Algebra back to a SPARQL Query + Full Explanation Level (All Outputs, All Information and All Analysis) - - + - Converts the Algebra back to a Graph Pattern + Basic Explanation Level with Query Evaluation simulated - - + - Transforms both sides of the Join using the given Optimiser + Default Explanation Level with Query Evaluation simulated - Optimser - - + - Transforms the LHS of the Join using the given Optimiser + Detailed Explanation Level with Query Evaluation simulated - Optimser - - + - Transforms the RHS of the Join using the given Optimiser + Full Explanation Level with Query Evaluation simulated - Optimser - - + - Represents a LeftJoin predicated on an arbitrary filter expression + A Query Processor which evaluates queries while printing explanations to any/all of Debug, Trace, Console Standard Output and Console Standard Error - + - Creates a new LeftJoin where there is no Filter over the join + Creates a new Explain Query Processor that will use the Default Explanation Level - LHS Pattern - RHS Pattern + Dataset - + - Creates a new LeftJoin where there is a Filter over the join + Creates a new Explain Query Processor with the desired Explanation Level - LHS Pattern - RHS Pattern - Filter to decide which RHS solutions are valid + Dataset + Explanation Level - + - Evaluates the LeftJoin + Creates a new Explain Query Processor that will use the Default Explanation Level - Evaluation Context - + Triple Store - + - Gets the Variables used in the Algebra + Creates a new Explain Query Processor with the desired Explanation Level + Triple Store + Explanation Level - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Gets/Sets the Explanation Level - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Determines whether a given Flag is present + Flag + - + - Gets the Filter that applies across the Join + Prints Analysis + Algebra + SPARQL Evaluation Context - + - Gets the LHS of the Join + Prints BGP Analysis + Analysis - + - Gets the RHS of the Join + Prints Join Analysis + Join - + - Gets the String representation of the Algebra + Prints Expalantions - + StringBuilder to output to - + - Converts the Algebra back to a SPARQL Query + Prints Explanations - + String to output - + - Converts the Algebra back to a SPARQL Query + Explains the start of evaluating some algebra operator - + Algebra + Context - + - Transforms both sides of the Join using the given Optimiser + Explains the evaluation of some action - Optimser - + Algebra + Context + Action - + - Transforms the LHS of the Join using the given Optimiser + Explains the end of evaluating some algebra operator - Optimser - + Algebra + Context - + - Transforms the RHS of the Join using the given Optimiser + Explains and evaluates some algebra operator - Optimser + Algebra Operator Type + Algebra + Context + Evaluator Function - + - Represents a Join + Processes an Ask + Ask + SPARQL Evaluation Context - + - Creates a new Join + Processes a BGP - Left Hand Side - Right Hand Side + BGP + SPARQL Evaluation Context - + - Creates either a Join or returns just one of the sides of the Join if one side is the empty BGP + Processes a Bindings modifier - Left Hand Side - Right Hand Side - + Bindings + SPARQL Evaluation Context - + - Evalutes a Join + Processes a Distinct modifier - Evaluation Context - + Distinct modifier + SPARQL Evaluation Context - + - Gets the Variables used in the Algebra + Processes an Exists Join + Exists Join + SPARQL Evaluation Context - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes an Extend + Extend + SPARQL Evaluation Context - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes a Filter + Filter + SPARQL Evaluation Context - + - Gets the LHS of the Join + Processes a Graph + Graph + SPARQL Evaluation Context - + - Gets the RHS of the Join + Processes a Group By + Group By + SPARQL Evaluation Context - + - Gets the String representation of the Join + Processes a Having - + Having + SPARQL Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Join - + Join + SPARQL Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a LeftJoin - + Left Join + SPARQL Evaluation Context - + - Transforms both sides of the Join using the given Optimiser + Processes a Minus - Optimser - + Minus + SPARQL Evaluation Context - + - Transforms the LHS of the Join using the given Optimiser + Processes a Negated Property Set - Optimser + Negated Property Set + SPARQL Evaluation Context - + - Transforms the RHS of the Join using the given Optimiser + Processes a Null Operator - Optimser + Null Operator + SPARQL Evaluation Context - + - Represents a Union + Processes a One or More Path + Path + SPARQL Evaluation Context + - + - Creates a new Union + Processes an Order By - LHS Pattern - RHS Pattern + + SPARQL Evaluation Context - + - Evaluates the Union + Processes a Property Path - + Path + SPARQL Evaluation Context - - - Gets the Variables used in the Algebra - - - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes a Reduced modifier + Reduced modifier + SPARQL Evaluation Context - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes a Select + Select + SPARQL Evaluation Context - + - Gets the LHS of the Join + Processes a Select Distinct Graphs + Select Distinct Graphs + SPARQL Evaluation Context - + - Gets the RHS of the Join + Processes a Service + Service + SPARQL Evaluation Context - + - Gets the String representation of the Algebra + Processes a Slice modifier - + Slice modifier + SPARQL Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Subquery + Subquery + SPARQL Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Union - + Union + SPARQL Evaluation Context - + - Transforms both sides of the Join using the given Optimiser + Processes a Unknown Operator - Optimser - + Unknown Operator + SPARQL Evaluation Context - + - Transforms the LHS of the Join using the given Optimiser + Processes a Zero Length Path - Optimser + Path + SPARQL Evaluation Context - + - Transforms the RHS of the Join using the given Optimiser + Processes a Zero or More Path - Optimser + Path + SPARQL Evaluation Context - - - Represents a Negated Property Set in the SPARQL Algebra - - - + - Creates a new Negated Property Set + A Class for connecting to multiple remote SPARQL Endpoints and federating queries over them with the data merging done locally - Path Start - Path End - Negated Properties - Whether this is a set of Inverse Negated Properties + + + Queries are federated by executing multiple requesting simultaneously and asynchronously against the endpoints in question with the data then merged locally. The merging process does not attempt to remove duplicate data it just naively merges the data. + + - + - Creates a new Negated Property Set + Creates a new Federated SPARQL Endpoint using a given Endpoint - Path Start - Path End - Negated Properties + Endpoint - + - Gets the Path Start + Creates a new Federated SPARQL Endpoint using the given Endpoints + Endpoints - + - Gets the Path End + Creates a new Federated SPARQL Endpoint by creating a new SparqlRemoteEndpoint for the given URI + Endpoint URI - + - Gets the Negated Properties + Creates a new Federated SPARQL Endpoint by creating a SparqlRemoteEndpoint for each of the given URI + Endpoint URIs - + - Gets whether this is a set of Inverse Negated Properties + Adds a additional endpoint to be used by this endpoint + Endpoint - + - Evaluates the Negated Property Set + Adds an additional endpoint to be used by this endpoint - SPARQL Evaluation Context - + Endpoint URI - + - Gets the Variables used in the Algebra + Removes a given endpoint from this endpoint + Endpoint - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Removes all endpoints with the given URI from this endpoint + Endpoint URI - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Gets/Sets whether a failed request on one endpoint should cause the entire request to fail + + + By default if a request on any of the endpoint fails or times out then the entire request will fail + + - + - Transforms the Algebra back into a SPARQL QUery + Gets/Sets the maximum number of endpoints this endpoint will issue queries to at any one time - - + - Transforms the Algebra back into a Graph Pattern + Makes a Query to a Sparql Endpoint and returns the raw Response + Sparql Query String + Thrown if more than one endpoint is in use since for any federated endpoint which used more than one endpoint there is no logical/sensible way to combine the result streams - + - Gets the String representation of the Algebra + Makes a Query to a Sparql Endpoint and returns the raw Response + Sparql Query String + MIME Types to use for the Accept Header + Thrown if more than one endpoint is in use since for any federated endpoint which used more than one endpoint there is no logical/sensible way to combine the result streams - + - Represents a part of the algebra that has been determined to not return any results in advance and so can be replaced with this operator which always returns null + Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + SPARQL Query String + RDF Graph - Primarily intended for use with Algebra Optimisers which are rewriting the algebra to run against an out of memory dataset (e.g. SQL based) where it may be easily possible to determine if a triple pattern will match in advance of actually returning the matches. + + The query is executed by sending it federating it to all the endpoints this endpoint contains using simultaneous asychronous calls. Once these calls complete the results are naivley merged together (no duplicate data removal) and returned as a single result. + + + By default if any of the endpoints used return an error then the entire query will fail and an exception will be thrown, this behaviour can be overridden by setting the IgnoreFailedRequests property to be true in which case the result will be the merge of the results from all endpoints which successfully provided a result. + + Thrown if any of the requests to the endpoints fail + Thrown if not all the requests complete within the set timeout - - - Creates a new Null Operator - - Variables in the algebra that this null is replacing - - + - Evaluates the Null operator which of course always returns a NullMultiset + Makes a Query where the expected result is a Graph i.e. a CONSTRUCT or DESCRIBE query - Evaluation Context - + RDF Handler to process the results + SPARQL Query - + - Gets the variables used in this algebra + Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries + Sparql Query String + A Sparql Result Set + + + The query is executed by sending it federating it to all the endpoints this endpoint contains using simultaneous asychronous calls. Once these calls complete the results are naivley merged together (no duplicate data removal) and returned as a single result. + + + By default if any of the endpoints used return an error then the entire query will fail and an exception will be thrown, this behaviour can be overridden by setting the IgnoreFailedRequests property to be true in which case the result will be the merge of the results from all endpoints which successfully provided a result. + + + Thrown if any of the requests to the endpoints fail + Thrown if not all the requests complete within the set timeout - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries + Results Handler to process the results + SPARQL Query String + Thrown if any of the requests to the endpoints fail + Thrown if not all the requests complete within the set timeout - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Serializes the Endpoint's Configuration + Configuration Serialization Context - + - Throws an error since a null operator cannot be transformed back into a query + Interface for SPARQL Query Processors - + + + A SPARQL Query Processor is a class that knows how to evaluate SPARQL queries against some data source to which the processor has access + + + The point of this interface is to allow for end users to implement custom query processors or to extend and modify the behaviour of the default Leviathan engine as required. + + - + - Throws an error since a null operator cannot be transformed back into a query + Processes a SPARQL Query returning a IGraph instance or a SparqlResultSet depending on the type of the query - + SPARQL Query + + Either an IGraph instance of a SparqlResultSet depending on the type of the query + - + - Gets the string representation of the algebra + Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate - + RDF Handler + Results Handler + SPARQL Query - + - Represents a One or More Path (e.g. rdf:type+) in the SPARQL Algebra + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback - + - Creates a new One or More Path + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes - Path Start - Path End - Path + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback - + - Evaluates the One or More Path + Interface for SPARQL Query Algebra Processors - SPARQL Evaluation Context - + + A SPARQL Query Algebra Processor is a class which knows how to evaluate the + + Type of intermediate results produced by processing an Algebra operator + Type of context object providing evaluation context - + - Gets the String representation of the Algebra + Processes SPARQL Algebra - + Algebra + Evaluation Context - + - Transforms the Algebra back into a Graph Pattern + Processes an Ask - + Ask + Evaluation Context - + - Represents a Union which will be evaluated in parallel + Processes a BGP + BGP + Evaluation Context - + - Creates a new Union + Processes a Bindings modifier - LHS Pattern - RHS Pattern + Bindings + Evaluation Context - + - Evaluates the Union + Processes a Distinct modifier - - + Distinct modifier + Evaluation Context - + - Gets the Variables used in the Algebra + Processes an Exists Join + Exists Join + Evaluation Context - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes an Extend + Extend + Evaluation Context + - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes a Filter + Filter + Evaluation Context - + - Gets the LHS of the Join + Processes a Graph + Graph + Evaluation Context - + - Gets the RHS of the Join + Processes a Group By + Group By + Evaluation Context - + - Gets the String representation of the Algebra + Processes a Having - + Having + Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Join - + Join + Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a LeftJoin - + Left Join + Evaluation Context - + - Transforms both sides of the Join using the given Optimiser + Processes a Minus - Optimser - + Minus + Evaluation Context - + - Transforms the LHS of the Join using the given Optimiser + Processes a Negated Property Set - Optimser + Negated Property Set + Evaluation Context - + - Transforms the RHS of the Join using the given Optimiser + Processes a Null Operator - Optimser + Null Operator + Evaluation Context - + - Represents a Join which will be evaluated in parallel + Processes a One or More Path + Path + Evaluation Context + - + - Creates a new Join + Processes an Order By - Left Hand Side - Right Hand Side + + Evaluation Context - + - Evalutes a Join + Processes a Property Path + Path Evaluation Context - + - Gets the Variables used in the Algebra + Processes a Reduced modifier + Reduced modifier + Evaluation Context - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes a Select + Select + Evaluation Context - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes a Select Distinct Graphs + Select Distinct Graphs + Evaluation Context - + - Gets the LHS of the Join + Processes a Service + Service + Evaluation Context - + - Gets the RHS of the Join + Processes a Slice modifier + Slice modifier + Evaluation Context - + - Gets the String representation of the Join + Processes a subquery + Subquery + Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Union - + Union + Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes an Unknown Operator + Algebra + Evaluation Context - + - Transforms both sides of the Join using the given Optimiser + Processes a Zero Length Path - Optimser + Path + Evaluation Context - + - Transforms the LHS of the Join using the given Optimiser + Processes a Zero or More Path - Optimser + Path + Evaluation Context - + - Transforms the RHS of the Join using the given Optimiser + Default SPARQL Query Processor provided by the library's Leviathan SPARQL Engine - Optimser - + + + The Leviathan Query Processor simply invokes the Evaluate method of the SPARQL Algebra it is asked to process + + + In future releases much of the Leviathan Query engine logic will be moved into this class to make it possible for implementors to override specific bits of the algebra processing but this is not possible at this time + + - + - Represents an arbitrary property path in the algebra (only used when strict algebra is generated) + Creates a new Leviathan Query Processor + Triple Store - + - Creates a new Property Path operator + Creates a new Leviathan Query Processor - Path Start - Path Expression - Path End + SPARQL Dataset - + - Evaluates the Path in the given context + Processes a SPARQL Query - Evaluation Context + SPARQL Query - + - Converts the algebra back into a Graph Pattern + Processes a SPARQL Query sending the results to a RDF/SPARQL Results handler as appropriate - + RDF Handler + Results Handler + SPARQL Query - + - Gets the string representation of the algebra + Delegate used for asychronous execution - + RDF Handler + Results Handler + SPARQL Query - + - Interface for Property Path Operators + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback + + In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Gets the Path Start + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback + + In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Gets the Path End + Creates a new Evaluation Context + - + - Gets the Property Path + Creates a new Evaluation Context for the given Query + Query + - + - Abstract Base Class for Path Operators + Gets the Query Processor for a Context + - + - Creates a new Path Operator + Processes SPARQL Algebra - Path Start - Property Path - Path End + Algebra + SPARQL Evaluation Context - + - Gets the Path Start + Processes an Ask + Ask + SPARQL Evaluation Context - + - Gets the Path End + Processes a BGP + BGP + SPARQL Evaluation Context - + - Gets the Property Path + Processes a Bindings modifier + Bindings + SPARQL Evaluation Context - + - Evaluates the Property Path + Processes a Distinct modifier + Distinct modifier SPARQL Evaluation Context - - + - Gets the Variables used in the Algebra + Processes an Extend + Extend + SPARQL Evaluation Context - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes an Exists Join + Exists Join + SPARQL Evaluation Context - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes a Filter + Filter + SPARQL Evaluation Context - + - Transforms the Algebra back into a Query + Processes a Graph - + Graph + SPARQL Evaluation Context - + - Transforms the Algebra back into a Graph Pattern + Processes a Group By - + Group By + SPARQL Evaluation Context - + - Gets the String representation of the Algebra + Processes a Having - + Having + SPARQL Evaluation Context - + - Abstract Base Class for Arbitrary Length Path Operators + Processes a Join + Join + SPARQL Evaluation Context - + - Creates a new Arbitrary Lengh Path Operator + Processes a LeftJoin - Path Start - Path End - Property Path + Left Join + SPARQL Evaluation Context - + - Determines the starting points for Path evaluation + Processes a Minus - Evaluation Context - Paths - Whether to evaluate Paths in reverse + Minus + SPARQL Evaluation Context - + - Evaluates a setp of the Path + Processes a Negated Property Set - Context - Paths - Whether to evaluate Paths in reverse + Negated Property Set + SPARQL Evaluation Context - - - Abstract Base Class for specialised Filters which restrict the value of a variable to some values - - - - - Creates a new Variable Restriction Filter - - Algebra the filter applies over - Variable to restrict on - Filter to use - - + - Evalutes the algebra for the given evaluation context + Processes a Null Operator - Evaluation Context + Null Operator + SPARQL Evaluation Context - - - Gets the Variable that this filter restricts the value of - - - + - Gets the Variables used in the Algebra + Processes a One or More Path + Path + SPARQL Evaluation Context + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes an Order By + + SPARQL Evaluation Context - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Processes a Property Path + Path + SPARQL Evaluation Context + - + - Gets the Filter to be used + Processes a Reduced modifier + Reduced modifier + SPARQL Evaluation Context - + - Gets the Inner Algebra + Processes a Select + Select + SPARQL Evaluation Context - + - Gets the String representation of the FILTER + Processes a Select Distinct Graphs - + Select Distinct Graphs + SPARQL Evaluation Context - + - Converts the Algebra back to a SPARQL Query + Processes a Service - + Service + SPARQL Evaluation Context - + - Converts the Algebra back to a Graph Pattern + Processes a Slice modifier - + Slice modifier + SPARQL Evaluation Context - + - Transforms the Inner Algebra using the given Optimiser + Processes a Subquery - Optimiser + Subquery + SPARQL Evaluation Context - + - Abstract Base Class for specialised Filters which restrict the value of a variable to a single value + Processes a Union + Union + SPARQL Evaluation Context - + - Creates a new Single Value Restriction Filter + Processes a Unknown Operator - Algebra the filter applies over - Variable to restrict on - Value to restrict to - Filter to use + Unknown Operator + SPARQL Evaluation Context - + - Gets the Value Restriction which this filter applies + Processes a Zero Length Path + Path + SPARQL Evaluation Context + - + - Applies the Filter over the results of evaluating the inner pattern + Processes a Zero or More Path - Evaluation Context + Path + SPARQL Evaluation Context - + - Represents a special case Filter where the Filter restricts a variable to just one value i.e. FILTER(?x = <value>) + A SPARQL Query Processor which processes queries by parsing them to the SPARQL Query Service of a Knowledge Base on a Pellet Server - + - Creates a new Identity Filter + Creates a new Pellet Query Processor - Algebra the Filter applies over - Variable to restrict on - Expression Term + Pellet Server + Knowledge Base Name - + - Transforms the Inner Algebra using the given Optimiser + Creates a new Pellet Query Processor - Optimiser - + Pellet Server URI + Knowledge Base Name - + - Represents a special case Filter where the Filter is supposed to restrict a variable to just one value i.e. FILTER(SAMETERM(?x, <value>)) - + Processes a SPARQL Query + + SPARQL Query + - + - Creates a new Same Term Filter + Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate - Algebra the Filter applies over - Variable to restrict on - Expression Term + RDF Handler + Results Handler + SPARQL Query - + - Transforms the Inner Algebra using the given Optimiser + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes - Optimiser - + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback - + - Represents a sub-query as an Algebra operator (only used when strict algebra is generated) + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback - + - Creates a new subquery operator + Static Helper class containing extension methods related to queries - Subquery - + - Evaluates the subquery in the given context + Determines whether an Expresion uses the Default Dataset - Evaluation Context + Expression + + Almost all Expressions use the Default Dataset. The only ones that does are EXISTS/NOT EXISTS expressions where the graph pattern does not use the default dataset + - - - Gets the variables used in the subquery which are projected out of it - - - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + A SPARQL Query Processor where the query is processed by passing it to the ExecuteQuery() method of an INativelyQueryableStore - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Creates a new Simple Query Processor + Triple Store - + - Converts the algebra back into a Query + Processes a SPARQL Query + SPARQL Query - + - Converts the algebra back into a Subquery + Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate - + RDF Handler + Results Handler + SPARQL Query - + - Gets the string representation of the algebra + Delegate used for asychronous execution - + RDF Handler + Results Handler + SPARQL Query - + - Represents a Zero Length Path in the SPARQL Algebra + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback + + In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Creates a new Zero Length Path + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes - Path Start - Path End - Property Path + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback + + In the event of a success the callback will be invoked normally, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Evaluates a Zero Length Path + A SPARQL Query Processor where the query is processed by passing it to the Query() method of an IQueryableStorage - Evaluation Context - - + - Gets the String representation of the Algebra + Creates a new Generic Query Processor - + Generic IO Manager - + - Transforms the Algebra back into a Graph Pattern + Processes a SPARQL Query + SPARQL Query - + - Represents a Zero or More Path in the SPARQL Algebra + Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + RDF Handler + Results Handler + SPARQL Query - + - Creates a new Zero or More Path + Delegate used for asychronous execution - Path Start - Path End - Property Path + RDF Handler + Results Handler + SPARQL Query - + - Evaluates a Zero or More Path + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes - Evaluation Context - + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback + + In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Gets the String representation of the Algebra + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes - + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback + + In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Transforms the Algebra into a Graph Pattern + A SPARQL Query Processor where the query is processed by passing it to a remote SPARQL endpoint - - + - Represents the Minus join + Creates a new Remote Query Processor + SPARQL Endpoint - + - Creates a new Minus join + Processes a SPARQL Query - LHS Pattern - RHS Pattern + SPARQL Query + - + - Evaluates the Minus join by evaluating the LHS and RHS and substracting the RHS results from the LHS + Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate - Evaluation Context - + RDF Handler + Results Handler + SPARQL Query - + - Gets the Variables used in the Algebra + Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + SPARQL QUery + Callback for queries that return a Graph + Callback for queries that return a Result Set + State to pass to the callback + + In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback + + In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Stores information about the Evaluation of a Query during it's evaluation - + - Gets the LHS of the Join + Creates a new Evaluation Context for the given Query over the given Dataset + Query + Dataset - + - Gets the RHS of the Join + Creates a new Evaluation Context for the given Query over the given Dataset using a specific processor + Query + Dataset + Query Processor - + - Gets the string representation of the Algebra + Creates a new Evaluation Context which is a Container for the given Result Binder - + - + - Converts the Algebra back to a SPARQL Query + Gets the Query that is being evaluated - - + - Converts the Minus() back to a MINUS Graph Pattern + Gets the Dataset the query is over - - + - Transforms both sides of the Join using the given Optimiser + Gets the custom query processor that is in use (if any) - Optimser - - + - Transforms the LHS of the Join using the given Optimiser + Gets/Sets the Input Multiset - Optimser - - + - Transforms the RHS of the Join using the given Optimiser + Gets/Sets the Output Multiset - Optimser - - + - Represents a Multiset of possible solutions + Gets/Sets the Results Binder - + - Variables contained in the Multiset + Gets/Sets whether BGPs should trim temporary variables - + - Dictionary of Sets in the Multiset + Starts the Execution Timer - + - Counter used to assign Set IDs + Ends the Execution Timer - + - Creates a new Empty Multiset + Checks whether Execution should Time out + Thrown if the Query has exceeded the Execution Timeout - + - Creates a new Empty Mutliset that has the list of given Variables + Gets the Remaining Timeout i.e. the Timeout taking into account time already elapsed - + + If there is no timeout then this is always zero, if there is a timeout this is always >= 1 since any operation that wants to respect the timeout must have a non-zero timeout to actually timeout properly. + - + - Creates a new Multiset from a SPARQL Result Set + Gets the Query Timeout used for the Query - Result Set + + + This is taken either from the Timeout property of the SparqlQuery to which this evaluation context pertains (if any) or from the global option Options.QueryExecutionTimeout. To set the Timeout to be used set whichever of those is appropriate prior to evaluating the query. If there is a Query present then it's timeout takes precedence unless it is set to zero (no timeout) in which case the global timeout setting is applied. You cannot set the Query Timeout to be higher than the global timeout unless the global timeout is set to zero (i.e. no global timeout) + + - + - Creates a new Multiset by flattening a Group Multiset + Retrieves the Time in milliseconds the query took to evaluate - Group Multiset - + - Determines whether a given Value is present for a given Variable in any Set in this Multiset + Retrieves the Time in ticks the query took to evaluate - Variable - Value - - + - Returns whether a given Variable is present in any Set in this Multiset + Gets/Sets a Object that should be persisted over the entire Evaluation Context - Variable + Key + + May be used by parts of the Evaluation Process that need to ensure a persistent state across the entire Evaluation Query (e.g. the implementation of the BNODE() function) + - + - Determines whether this Multiset is disjoint with another Multiset + Evalutes an Algebra Operator in this Context using the current Query Processor (if any) or the default Evaluate() method - Other Multiset + Algebra - + - Adds a Set to the Multiset + Internal Class which parses SPARQL Expressions into Expression Trees - Set - + - Adds a Variable to the list of Variables present in this Multiset + Creates a new SPARQL Expression Parser - Variable - + - Sets the variable ordering for the multiset + Creates a new SPARQL Expression Parser which has a reference back to a Query Parser - Variable Ordering + Query Parser - + - Removes a Set from the Multiset + Creates a new SPARQL Expression Parser - Set ID + Whether Aggregates are allowed in Expressions - + - Trims the Multiset to remove Temporary Variables + Creates a new SPARQL Expression Parser which has a reference back to a Query Parser + Query Parser + Whether Aggregates are allowed in Expressions - + - Trims the Multiset to remove the given Variable + Sets the Base Uri used to resolve URIs and QNames - Variable - + - Gets whether the Multiset is empty + Sets the Namespace Map used to resolve QNames - + - Gets the number of Sets in the Multiset + Gets/Sets whether Aggregates are permitted in Expressions - + - Gets the Variables in the Multiset + Gets/Sets the Syntax that should be supported - + - Gets the Sets in the Multiset + Sets the Query Parser that the Expression Parser can call back into when needed - + - Gets the IDs of Sets in the Multiset + Gets/Sets the locally scoped custom expression factories - + - Gets a Set from the Multiset + Parses a SPARQL Expression - Set ID + Tokens that the Expression should be parsed from - + - Represents a Multiset when there are no possible Solutions + Helper method for raising informative standardised Parser Errors + The Error Message + The Token that is the cause of the Error + - + - Joins another Multiset to this Null Mutliset + Comparer class for implementing the SPARQL semantics for the relational operators - Other Multiset - - Results in this Null Multiset since Null joined to anything is Null - - + - Left Joins another Multiset to this Null Mutliset + Compares two Nodes - Other Multiset - Expression the join is predicate upon - - Results in this Null Multiset since Null joined to anything is Null - + Node + Node + - + - Exists Joins another Multiset to this Null Mutliset + Compares two valued Nodes - Other Multiset - Whether joinable solutions must exist in the other Multiset for joins to be made - - Results in this Null Multiset since Null joined to anything is Null - + Node + Node + - + - Minus Joins this Multiset to another Multiset + Compares two Nodes for Numeric Ordering - Other Multiset + Node + Node + Numeric Type - + - Computes the Product of this Multiset and another Multiset + Compares two Nodes for Numeric Ordering - Other Multiset - - Results in the Other Multiset since for Product we consider this Multiset to contain a single empty Set - + Node + Node + Numeric Type + - + - Unions this Multiset with another Multiset + Compares two Date Times for Date Time ordering - Other Multiset - - Results in the Other Multiset as this is an empty Multiset - + Node + Node + - + - Returns false since the Null Multiset contains no values + Compares two Date Times for Date Time ordering - Variable - Value + Node + Node - + - Returns false since the Null Multiset contains no variables + Compares two Dates for Date ordering - Variable + Node + Node - + - Returns true since the Null Multiset is disjoint with all Multisets + Compares two Dates for Date ordering - Other Multiset + Node + Node - + - Adds a Set to this Multiset + Comparer class for use in SPARQL ORDER BY - implements the Semantics broadly similar to the relational operator but instead of erroring using Node/Lexical ordering where an error would occur it makes an appropriate decision - Set - Thrown since the operation is invalid on a Null Multiset - + - Adds a Variable to this Multiset + Compares two Nodes - Variable - Thrown since the operation is invalid on a Null Multiset + Node + Node + - + - Sets the variable ordering for the multiset + Compares two Nodes - Variable Ordering + Node + Node + - + - Removes a Set from a Multiset + Compares two Date Times for Date Time ordering - Set ID - Thrown since the operation is invalid on a Null Multiset + Node + Node + - + - Returns true since the Null Multiset is always empty + A SPARQL Parameterized String is a String that can contain parameters in the same fashion as a SQL command string + + + This is intended for use in applications which may want to dynamically build SPARQL queries/updates where user input may comprise individual values in the triples patterns and the applications want to avoid SPARQL injection attacks which change the meaning of the query/update + + + It works broadly in the same way as a SqlCommand would in that you specify a string with paramters specified in the form @name and then use various set methods to set the actual values that should be used. The values are only substituted for parameters when you actually call the ToString() method to get the final string representation of the command. E.g. + + + SparqlParameterizedString queryString = new SparqlParameterizedString(); + queryString.CommandText = @"SELECT * WHERE + { + ?s a @type . + }"; + queryString.SetUri("type", new Uri("http://example.org/myType")); + Console.WriteLine(queryString.ToString()); + + + Would result in the following being printed to the Console: + + + SELECT * WHERE + { + ?s a <http://example.org/myType> + } + + + Calling a Set method to set a parameter that has already been set changes that value and the new value will be used next time you call ToString() - this may be useful if you plan to execute a series of queries/updates using a series of values since you need not instantiate a completely new parameterized string each time + + + This class was added to a library based on a suggestion by Alexander Sidorov and ideas from slides from Slideshare by Almedia et al + + + PERFORMANCE TIPS: if building the command text incrementaly, avoid using CommandText += and use the AppendSubQuery or Append methods instead + + - + - Returns an empty enumerable as the Null Multiset contains no Variables + Creates a new empty parameterized String - + - Returns an empty enumerable as the Null Multiset contains no Sets + Creates a new parameterized String + Command Text - + - Returns an empty enumerable as the Null Multiset contains no Sets + Gets/Sets the Namespace Map that is used to prepend PREFIX declarations to the command - + - Gets the Set with the given ID + Gets/Sets the Base URI which will be used to prepend BASE declarations to the command - Set ID - - Thrown since the Null Multiset contains no Sets - + - Represents an Order By clause + Gets/Sets the parameterized Command Text - + - Creates a new Order By clause + Appends the given query as a sub-query to the existing command text, any prefixes in the sub-query are moved to the parent query - Pattern - Ordering + Query - + - Evaluates the Order By clause + Appends the given query as a sub-query to the existing command text, any prefixes in the sub-query are moved to the parent query but any parameter/variable assignments will be lost - Evaluation Context - + Query - + - Gets the Variables used in the Algebra + Appends the given text to the existing command text, any prefixes in the sub-query are moved to the parent query but any parameter/variable assignments will be lost + Text - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Appends the given text to the existing command text, any prefixes in the command are moved to the parent query + Text - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets/Sets the Query processor which is used when you call the ExecuteQuery() method - + - Gets the Inner Algebra + Gets/Sets the Query processor which is used when you call the ExecuteUpdate() method - + - Gets the Ordering that is used + Gets an enumeration of the Variables for which Values have been set - - If the Query supplied in the SparqlEvaluationContext is non-null and has an ORDER BY clause then that is applied rather than the ordering with which the OrderBy algebra is instantiated - - + - Gets the String representation of the Algebra + Gets an enumeration of the Parameters for which Values have been set - - + - Converts the Algebra back to a SPARQL Query + Clears all set Parameters and Variables - - + - Throws an error since an OrderBy() cannot be converted back to a Graph Pattern + Clears all set Parameters - - Thrown since an OrderBy() cannot be converted back to a Graph Pattern - + - Transforms the Inner Algebra using the given Optimiser + Clears all set Variables - Optimiser - - + - Represents the Selection step of Query Evaluation + Sets the Value of a Parameter + Parameter Name + Value - Selection trims variables from the Multiset that are not needed in the final output. This is separate from Project so that all Variables are available for Ordering and Having clauses + Can be used in derived classes to set the value of parameters if the derived class defines additional methods for adding values for parameters - + - Creates a new Select + Removes a previously set value for a Parameter - Inner Pattern - Whether we are selecting all variables - Variables to Select + Parameter Name + + There is generally no reason to do this since you can just set a parameters value to change it + - + - Does this operator select all variables? + Removes a previously set value for a Variable + Variable Name + + May be useful if you have a skeleton query/update into which you sometimes substitute values for variables but don't always do so + - + - Gets the Inner Algebra + Sets the Value of a Variable + Variable Name + Value - + - Trims the Results of evaluating the inner pattern to remove Variables which are not Result Variables + Sets the Parameter to an Integer Literal - Evaluation Context - + Parameter + Integer - + - Gets the Variables used in the Algebra + Sets the Parameter to an Integer Literal + Parameter + Integer - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Sets the Parameter to an Integer Literal + Parameter + Integer - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Sets the Parameter to a Decimal Literal + Parameter + Integer - + - Gets the SPARQL Variables used + Sets the Parameter to a Float Literal - - If the Query supplied in the SparqlEvaluationContext is non-null then it's Variables are used rather than these - + Parameter + Integer - + - Gets the String representation of the Algebra + Sets the Parameter to a Double Literal - + Parameter + Integer - + - Converts the Algebra back to a SPARQL Query + Sets the Parameter to a Date Time Literal - + Parameter + Integer - + - Throws an error as a Select() cannot be converted back to a Graph Pattern + Sets the Parameter to a Date Time Literal - - Thrown since a Select() cannot be converted back to a Graph Pattern + Parameter + Integer + Whether to preserve precisely i.e. include fractional seconds - + - Transforms the Inner Algebra using the given Optimiser + Sets the Parameter to a Date Time Literal - Optimiser - + Parameter + Integer - + - Represents the Ask step of Query Evaluation + Sets the Parameter to a Date Time Literal - - Used only for ASK queries. Turns the final Multiset into either an IdentityMultiset if the ASK succeeds or a NullMultiset if the ASK fails - + Parameter + Integer + Whether to preserve precisely i.e. include fractional seconds - + - Creates a new ASK + Sets the Parameter to a Duration Literal - Inner Pattern + Parameter + Integer - + - Evaluates the ASK by turning the Results of evaluating the Inner Pattern to either an Identity/Null Multiset depending on whether there were any Results + Sets the Parameter to a Boolean Literal - Evaluation Context - + Parameter + Integer - + - Gets the Inner Algebra + Sets the Parameter to an Untyped Literal + Parameter + Integer - + - Gets the Variables used in the Algebra + Sets the Parameter to a Typed Literal + Parameter + Integer + Datatype URI - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Sets the Parameter to a Literal with a Language Specifier + Parameter + Integer + Language Specifier - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Sets the Parameter to a URI + Parameter + URI - + - Gets the String representation of the Ask + Sets the Parameter to be a Blank Node with the given ID - + Parameter + Node ID + + Only guarantees that the Blank Node ID will not clash with any other Blank Nodes added by other calls to this method or it's overload which generates anonymous Blank Nodes. If the base query text into which you are inserting parameters contains Blank Nodes then the IDs generated here may clash with those IDs. + - + - Converts the Algebra back to a SPARQL Query + Sets the Parameter to be a new anonymous Blank Node - + Parameter + + Only guarantees that the Blank Node ID will not clash with any other Blank Nodes added by other calls to this method or it's overload which takes an explicit Node ID. If the base query text into which you are inserting parameters contains Blank Nodes then the IDs generated here may clash with those IDs. + - + - Throws an exception since an Ask() cannot be converted to a Graph Pattern + Executes this command as a query - Thrown since an Ask() cannot be converted to a Graph Pattern - + - Transforms the Inner Algebra using the given Optimiser + Executes this command as a query - Optimiser - + RDF Handler + Results Handler - + - Represents a Service Clause + Executes this command as an update - + - Creates a new Service clause with the given Endpoint Specifier and Graph Pattern + Clears the preprocessing structures - Endpoint Specifier - Graph Pattern - Whether Evaluation Errors are suppressed - + - Creates a new Service clause with the given Endpoint Specifier and Graph Pattern + Trims out the SPARQL preamble (BASE and PREFIX definitions) from the command text - Endpoint Specifier - Graph Pattern + + This is done so the instance can be directly merged into another SparqlParameterizedString through the Append methods + - + - Evaluates the Service Clause by generating instance(s) of SparqlRemoteEndpoint as required and issuing the query to the remote endpoint(s) + Provides some fast string exploration to determine valid parameter/variable placeholders and leave out any constant SPARQL ambiguous patterns (language tags, parameter- or variable-like patterns in IRIs or in string literals...) - Evaluation Context - - + - Gets the Variables used in the Algebra + Returns the actual Query/Update String with parameter and variable values inserted + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Types of SPARQL Query - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Unknown - + - Gets the Endpoint Specifier + Ask - + - Gets the Graph Pattern + Constuct - + - Gets the String representation of the Algebra + Describe - - + - Converts the Algebra back to a SPARQL Query + Describe All - - + - Converts the Algebra into a Graph Pattern + Select - - + - Represents one possible set of values which is a solution to the query + Select Distinct - + - Creates a new Set + Select Reduced - + - Creates a new Set which is the Join of the two Sets + Select All - A Set - A Set - + - Creates a new Set which is a copy of an existing Set + Select All Distinct - Set to copy - + - Creates a new Set from a SPARQL Result + Select All Reduced - Result - + - Creates a new Set from a Binding Tuple + Types of Special SPARQL Query which may be optimised in special ways by the libraries SPARQL Engines - Tuple - + - Retrieves the Value in this set for the given Variable + The Query is of the form SELECT DISTINCT ?g WHERE {GRAPH ?g {?s ?p ?o}} - Variable - Either a Node or a null - + - Adds a Value for a Variable to the Set + The Query has no applicable special optimisation - Variable - Value - + - Removes a Value for a Variable from the Set + The Query has not yet been tested to determine if special optimisations are applicable - Variable - + - Checks whether the Set contains a given Variable + The Query is of the form ASK WHERE {?s ?p ?o} - Variable - - + - Gets whether the Set is compatible with a given set based on the given variables + Represents a SPARQL Query - Set - Variables - + + + Note: This class is purposefully sealed and most setters are private/protected internal since generally you create a query by using the to parse a query string/file. + + + To build a query programmatically you can use the class to generate a new query and then various extension methods to modify that query using a fluent style API. A query is not immutable + so if you use that API you are modifying the query, if you want to generate new queries by modifying an existing query consider using the method to take a copy of the existing query. + + - + - Gets whether the Set is minus compatible with a given set based on the given variables + Creates a new SPARQL Query - Set - Variables - - + - Gets the Variables in the Set + Creates a new SPARQL Query + Whether the Query is a Sub-query - + - Gets the Values in the Set + Creates a copy of the query + - + - Joins the set to another set + Gets the Namespace Map for the Query - Other Set - - + - Copies the Set + Gets/Sets the Base Uri for the Query - - + - Gets whether the Set is equal to another set + Gets the Default Graph URIs for the Query - Set to compare with - - + - Represents the Slice Operation in the SPARQL Algebra + Gets the Named Graph URIs for the Query - + - Creates a new Slice modifier which will detect LIMIT and OFFSET from the query + Gets the Variables used in the Query - Pattern - + - Creates a new Slice modifier which uses a specific LIMIT and OFFSET + Gets the Variables, QNames and URIs used in the Describe Query - Pattern - Limit - Offset - + - Evaluates the Slice by applying the appropriate LIMIT and OFFSET to the Results + Gets the type of the Query - Evaluation Context - - + - Gets the Variables used in the Algebra + Gets the Special Type of the Query (if any) - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Gets the top level Graph Pattern of the Query - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets/Sets the Construct Template for a Construct Query - + - Gets the Limit in use (-1 indicates no Limit) + Gets/Sets the Ordering for the Query - + - Gets the Offset in use (0 indicates no Offset) + Gets/Sets the Grouping for the Query - + - Gets whether the Algebra will detect the Limit and Offset to use from the provided query + Gets/Sets the Having Clause for the Query - + - Gets the Inner Algebra + Gets/Sets the VALUES Clause for the Query which are bindings that should be applied - + - Gets the String representation of the Algebra + Gets/Sets the ISparqlDescribe which provides the Describe algorithm you wish to use - + + By default this will be the ConciseBoundedDescription (CBD) algorithm. + - + - Converts the Algebra back to a SPARQL Query + Gets/Sets the locally scoped Algebra Optimisers that are used to optimise the Query Algebra in addition to (but before) any global optimisers (specified by SparqlOptimiser.AlgebraOptimisers) that are applied - - + - Throws an exception since a Slice() cannot be converted back to a Graph Pattern + Gets/Sets the locally scoped Expression Factories that may be used if the query is using the CALL() function to do dynamic function invocation - - Thrown since a Slice() cannot be converted to a Graph Pattern - + - Transforms the Inner Algebra using the given Optimiser + Gets/Sets the locally scoped Property Function factories that may be used by the when generating the algebra for the query - Optimiser - - + - Special Algebra Construct for optimising queries of the form SELECT DISTINCT ?g WHERE {GRAPH ?g {?s ?p ?o}} + Gets the Result Set Limit for the Query + Values less than zero are counted as -1 which indicates no limit - + - Creates a new Select Distinct algebra + Gets/Sets the Result Set Offset for the Query - Graph Variable to bind Graph URIs to + Values less than zero are treated as 0 which indicates no offset - + - Evaluates the Select Distinct Graphs optimisation + Gets/Sets the Query Execution Timeout in milliseconds - Evaluation Context - + + + This Timeout (typically) only applies when executing the Query in memory. If you have an instance of this class and pass its string representation (using ToString()) you will lose the timeout information as this is not serialisable in SPARQL syntax. + + - + - Gets the Variables used in the Algebra + Gets/Sets whether Partial Results should be returned in the event of Query Timeout + + + Partial Results (typically) only applies when executing the Query in memory. If you have an instance of this class and pass its string representation (using ToString()) you will lose the partial results information as this is not serialisable in SPARQL syntax. + + - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Gets the Time taken to execute a Query + Thrown if you try and inspect the execution time before the Query has been executed - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Gets whether the Query has an Aggregate as its Result - + - Gets the Graph Variable to which Graph URIs are bound + Gets whether Optimisation has been applied to the query - If the Query supplied in the SparqlEvaluationContext is non-null then the Variable Name from the Query is used rather than this + This only indicates that an Optimiser has been applied. You can always reoptimise the query using a different optimiser by using the relevant overload of the Optimise() method. - + - Gets the String representation of the Algebra + Gets whether this Query is a Sub-Query in another Query - - + - Converts the Algebra back to a SPARQL Query + Gets whether a Query has a DISTINCT modifier - - + - Converts the Algebra to a Graph Pattern + Gets whether the Query has a Solution Modifier (a GROUP BY, HAVING, ORDER BY, LIMIT or OFFSET) - - + - Special Algebra Construct for optimising queries of the form ASK WHERE {?s ?p ?o} + The number of results that would be returned without any limit clause to a query or -1 if not supported. Defaults to the same value as the Count member - + - Evalutes the Ask Any Triples optimisation + Adds a Variable to the Query - Evaluation Context - + Variable Name - + - Gets the Variables used in the Algebra + Adds a Variable to the Query + Variable Name + Does the Variable occur in the Output Result Set/Graph - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Adds a Variable to the Query + Variable - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Adds a Describe Variable to the Query + Variable/Uri/QName Token - + - Gets the String representation of the Algebra + Adds a Default Graph URI - + Graph URI - + - Converts the Algebra back to a SPARQL Query + Adds a Named Graph URI - + Graph URI - + - Converts the Algebra to a Graph Pattern + Removes all Default Graph URIs - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - Represents a SPARQL aggregate + Removes all Named Graph URIs - + - Initializes a new instance of the class. + Evaluates the SPARQL Query against the given Triple Store + Triple Store + + Either a SparqlResultSet or a Graph depending on the type of query executed + - + - Represents a blank node RDF term expression + Evaluates the SPARQL Query against the given Triple Store processing the results with the appropriate handler from those provided + RDF Handler + Results Handler + Triple Store - + - Wraps the as a blank node expression + Evaluates the SPARQL Query against the given Dataset + Dataset + + Either a SparqlResultSet or a IGraph depending on the type of query executed + - + - Represents a logical expression + Evaluates the SPARQL Query against the given Dataset processing the results with an appropriate handler form those provided + RDF Handler + Results Handler + Dataset - + - Wraps the as a boolean expression + Processes the Query using the given Query Processor + SPARQL Query Processor + - + - Creates a typed literal term + Applies optimisation to a Query using the default global optimiser - + - Creates an untyped literal term (simple literal) + Applies optimisation to a Query using the specific optimiser + Query Optimiser - + - Represents a numeric expression of known type + Helper method which rewrites Blank Node IDs for Describe Queries + Triple + Mapping of IDs to new Blank Nodes + Graph of the Description + - + - Wraps the as a typed numeric expression + Generates a String representation of the Query + + This method may not return a complete representation of the Query depending on the Query it is called on as not all the classes which can be included in a Sparql query currently implement ToString methods - + - Represents a numeric expression of undefined type + Converts the Query into it's SPARQL Algebra representation (as represented in the Leviathan API) + - + - Wraps the as a numeric expression + Applies Algebra Optimisations to the Query + Query Algebra + The Query Algebra which may have been transformed to a more optimal form - + - Represents a SPARQL expression (variable, function, operator, term or aggregate) + Gets whether the Query's ORDER BY clause can be optimised with Lazy evaluation - + - The undelrying expression + Gets whether a Query uses the Default Dataset against which it is evaluated + + + If the value is true then the Query will use whatever dataset is it evaluated against. If the value is false then the query changes the dataset at one/more points during its evaluation. + + + Things that may change the dataset and cause a query not to use the Default Dataset are as follows: +
    +
  • FROM clauses (but not FROM NAMED)
  • +
  • GRAPH clauses
  • +
  • Subqueries which do not use the default dataset
  • +
+
+
- + - Represents an expression, which evaluates to a variable + A Class for connecting to a remote SPARQL Endpoint and executing Queries against it - + - Gets the represented by this variable expression + Empty Constructor for use by derived classes - + - Represents a IRI expression + Creates a new SPARQL Endpoint for the given Endpoint URI + Remote Endpoint URI - + - Wraps the as an IRI expression + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph Uri + Remote Endpoint URI + Default Graph URI to use when Querying the Endpoint - + - Represents a literal expression + Creates a new SPARQL Endpoint for the given Endpoint Uri using the given Default Graph Uri + Remote Endpoint URI + Default Graph URI to use when Querying the Endpoint - + - Wraps the as a literal expression + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URI to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Represents a RDF term expression (IRI, literal or blank node) + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URI to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Wraps the as an RDF term expression + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URI to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Represents a SPARQL expression which is not an aggregate + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint - + - Initializes a new instance of the class. + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI - The expression. + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint - + - Creates a call to the IN function + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI - the list of SPARQL expressions + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Creates a greater than operator usage + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Creates a less than operator usage + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Creates a greater than or equal operator usage + Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Remote Endpoint URI + Default Graph URIs to use when Querying the Endpoint + Named Graph URIs to use when Querying the Endpoint - + - Creates a less than or equal operator usage + Gets the Default Graph URIs for Queries made to the SPARQL Endpoint - + - Represents a typed literal + Gets the List of Named Graphs used in requests - + - Wraps the as a typed literal expression + Gets/Sets the Accept Header sent with ASK/SELECT queries + + + Can be used to workaround buggy endpoints which don't like the broad Accept Header that dotNetRDF sends by default. If not set or explicitly set to null the library uses the default header generated by + + - + - Adds additional to DESCRIBE + Gets/Sets the Accept Header sent with CONSTRUCT/DESCRIBE queries + + + Can be used to workaround buggy endpoints which don't like the broad Accept Header that dotNetRDF sends by default. If not set or explicitly set to null the library uses the default header generated by + + - + - Adds additional to DESCRIBE + Makes a Query where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + SPARQL Query String + A SPARQL Result Set - + - Provides methods to supply the "then" expression for the IF function call + Makes a Query where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + Results Handler + SPARQL Query String - + - Sets the second parameter of the IF function call + Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + SPARQL Query String + RDF Graph - + - Provides methods to supply the "else" expression for the IF function call + Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + RDF Handler + SPARQL Query String - + - Sets the third parameter of the IF function call + Makes a Query to a SPARQL Endpoint and returns the raw Response + SPARQL Query String + - + - Creates a builder of a normal graph patterns + Makes a Query to a SPARQL Endpoint and returns the raw Response + SPARQL Query String + MIME Types to use for the Accept Header + - + - Creates a builder of a graph pattern + Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries - MINUS, GRAPH, SERVICE etc. + SPARQL Query String + A Sparql Result Set + + + Allows for implementation of asynchronous querying. Note that the overloads of QueryWithResultSet() and QueryWithResultGraph() that take callbacks are already implemented asynchronously so you may wish to use those instead if you don't need to explicitly invoke and wait on an async operation. + + - + - Exposes method for assigning a name to an expression variable + Delegate for making a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Sparql Query String + RDF Graph + Allows for implementation of asynchronous querying + + + Allows for implementation of asynchronous querying. Note that the overloads of QueryWithResultSet() and QueryWithResultGraph() that take callbacks are already implemented asynchronously so you may wish to use those instead if you don't need to explicitly invoke and wait on an async operation. + + - + - Set the expression's variable name + Internal method which builds the Query Uri and executes it via GET/POST as appropriate - the parent query or graph pattern builder + Sparql Query + Accept Header to use for the request + - + - Provides methods for creating aggregates expressions + Internal Helper Method which executes the HTTP Requests against the Sparql Endpoint + Uri to make Request to + Data that is to be POSTed to the Endpoint in application/x-www-form-urlencoded format + The Accept Header that should be used + HTTP Response - + - Gets a builder which builds a DISTICT aggregate + Makes a Query asynchronously where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + SPARQL Query String + Callback to invoke when the query completes + State to pass to the callback - + - Creates a SAMPLE aggregate + Makes a Query asynchronously where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + SPARQL Query String + Results Handler + Callback to invoke when the query completes + State to pass to the callback - + - Creates a SAMPLE aggregate + Makes a Query asynchronously where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + SPARQL Query String + Callback to invoke when the query completes + State to pass to the callback - + - Creates a SAMPLE aggregate + Makes a Query asynchronously where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + SPARQL Query String + RDF Handler + Callback to invoke when the query completes + State to pass to the callback - + - Interface for creating DESCRIBE queries + Serializes the Endpoint's Configuration + Configuration Serialization Context - + - Adds triple patterns to the SPARQL query or graph pattern + Class for representing a Row of a Sparql Result Set - + - Adds triple patterns to the SPARQL query or graph pattern + Creates a new empty SPARQL Result which can only be filled by methods internal to the dotNetRDF Library - + - Provides methods for creating DESCRIBE queries + Creates a new SPARQL Result from the given Set + Set - + - Adds additional to DESCRIBE + Creates a new SPARQL Result from the given Set which contains only the given variables in the given order + Set + Variables - + - Adds additional to DESCRIBE + Deserialization only constructor + Serialization Info + Streaming Context - + - Provides methods for creating aggregates expressions but only those allowing DISTINCT + Gets the Value that is bound to the given Variable + Variable whose Value you wish to retrieve + + Thrown if there is nothing bound to the given Variable Name for this Result - + - Creates a SUM aggregate + Gets the Value that is bound to the given Variable + Variable whose Value you wish to retrieve + + Thrown if there is nothing bound to the given Variable Name for this Result - + - Creates a SUM aggregate + Gets the Value that is bound at the given Index + Index whose Value you wish to retrieve + + + As of 1.0.0 the order of variables in a result may/may not vary depending on the original query. If a specific variable list was declared dotNetRDF tries to preserve that order but this may not always happen depending on how results are received. + + Thrown if there is nothing bound at the given Index - + - Creates a SUM aggregate + Tries to get a value (which may be null) for the variable + Variable + Value + True if the variable was present (even it was unbound) and false otherwise - + - Creates a SUM aggregate + Tries to get a non-null value for the variable + Variable + Value + True if the variable was present and bound, false otherwise - + - Creates a AVG aggregate + Gets the number of Variables for which this Result contains Bindings - + - Creates a AVG aggregate + Internal Only Method for setting the Value of a Result + Variable Name + Value bound to the Variable - + - Creates a AVG aggregate + Sets the variable ordering for the result + - + - Creates a AVG aggregate + Checks whether a given Variable has a value (which may be null) for this result + Variable Name + True if the variable is present, false otherwise + Returns true even if the value is null, use instead to see whether a non-null value is present for a variable. - + - Creates a MIN aggregate + Checks whether a given Variable has a non-null value for this result + Variable Name + True if the variable is present and has a non-null value, false otherwise - + - Creates a MIN aggregate + Gets the set of Variables that are bound in this Result - + - Creates a MIN aggregate + Gets whether a Result is a Ground Result + + A Ground Result is a result which is considered to be a fixed fact. In practise this means it contains no Blank Nodes + - + - Creates a MIN aggregate + Removes all Variables Bindings where the Variable is Unbound - + - Creates a MAX aggregate + Displays the Result as a comma separated string of pairs of the form ?var = value + - + - Creates a MAX aggregate + Displays the Result as a comma separated string of paris of the form ?var = value where values are formatted using the given Node Formatter + Node Formatter + - + - Creates a MAX aggregate + Override of the Equals method for Results + + + Used implicitly in applying Distinct and Reduced modifiers to the Result Set - + - Creates a MAX aggregate + Override of the GetHashCode method for Results + + Used implicitly in applying Distinct and Reduced modifiers to the Result Set - + - Creates a GROUP_CONCAT aggregate + Enumerates the Bindings of Variable Names to Values in this Result + + + Does not respect the ordering of the variables (if any) + - + - Creates a GROUP_CONCAT aggregate + Enumerates the Bindings of Variable Names to Values in this Result + - + - Creates a GROUP_CONCAT aggregate + Gets the data for serialization + Serialization Information + Streaming Context - + - Creates a COUNT(*) aggregate + Gets the schema for XML serialization + - + - Creates a COUNT aggregate + Writes the data for XML serialization (.Net serialization not the official SPARQL results serialization) + XML Writer - + - Creates a COUNT aggregate + Reads the data for XML deserialization (.Net serialization not the official SPARQL results serialization) + XML Reader - + - Creates a COUNT aggregate + Helper Class used in the execution of Sparql Queries + + - + - Creates a COUNT aggregate + Internal Empty Constructor for derived classes - + - Provides methods for building SPARQL expressions, including aggregates + Creates a new Results Binder + Query this provides Result Binding to - + - Provides methods for building graph patterns + Gets the Variables that the Binder stores Bindings for - + - Adds another child graph pattern builder. + Gets the enumeration of valid Binding IDs - + - Adds another child graph pattern builder. + Gets the set of Groups that result from the Query this Binder provides Binding to - + - Creates a UNION of multiple graph patterns. If is null or empty, - acts as a call to the method. + Gets the Value bound to a given Variable for a given Binding ID + Variable Name + Binding ID + - + - Creates a UNION of multiple graph patterns. If is null or empty, - acts as a call to the method. + Gets the Group referred to by the given ID + Group ID + - + - Adds triple patterns to the SPARQL query or graph pattern + Checks whether the given ID refers to a Group + Group ID + - + - Adds triple patterns to the SPARQL query or graph pattern + Sets the Group Context for the Binder + Whether you want to access the Group Contents or the Groups themselves - + - Adds an OPTIONAL graph pattern to the SPARQL query or graph pattern + Disposes of a Result Binder - + - Adds a FILTER to the SPARQL query or graph pattern + Results Binder used by Leviathan - + - Adds a FILTER expression to the SPARQL query or graph pattern + Creates a new Leviathan Results Binder + Evaluation Context - + - Adds a MINUS graph pattern to the SPARQL query or graph pattern + Gets the Value for a given Variable from the Set with the given Binding ID + Variable + Set ID + - + - Adds a GRAPH graph pattern to the graph pattern + Gets the Variables contained in the Input - + - Adds a GRAPH graph pattern to the graph pattern + Gets the IDs of Sets - + - Adds a SERVICE graph pattern to the graph pattern + Determines whether a given ID is for of a Group + Group ID + - + - Adds a BIND variable assignment to the graph pattern + Returns the Group with the given ID + Group ID + - + - Adds a "normal" child graph pattern + Sets the Group Context for the Binder + Whether you want to access the Group Contents or the Groups themselves - + - Adds a "normal" child graph pattern + Special Temporary Results Binder used during LeftJoin's - + - Adds a "normal" child graph pattern + Creates a new LeftJoin Binder + Input Multiset - + - Provides methods for building SPARQL expressions, but not aggregates + Gets the Value for a given Variable from the Set with the given Binding ID + Variable + Set ID + - + - Creates a call to the REGEX function + Gets the Variables in the Input Multiset - + - Creates a call to the REGEX function + Gets the IDs of Sets - + - Creates a call to the REGEX function + Represents the type of the SPARQL Results Set - + - Creates a call to the REGEX function + The Result Set represents a Boolean Result - + - Creates a call to the REGEX function + The Result Set represents a set of Variable Bindings - + - Creates a call to the REGEX function + The Result Set represents an unknown result i.e. it has yet to be filled with Results - + - Creates a call to the REGEX function + Class for representing Sparql Result Sets - + - Creates a call to the REGEX function + Lists of Sparql Results - + - Creates a call to the REGEX function + Lists of Variables in the Result Set - + - Creates a call to the REGEX function + Boolean Result - + - Creates a call to the STRLEN function with a variable parameter + Creates an Empty Sparql Result Set - a SPARQL variable + Useful where you need a possible guarentee of returning an result set even if it proves to be empty and also necessary for the implementation of Result Set Parsers. - + - Creates a call to the STRLEN function with a string literal parameter + Creates a Sparql Result Set for the Results of an ASK Query with the given Result value - a string literal parameter + - + - Creates a call to the SUBSTR function with a string literal and variable parameters + Creates a Sparql Result Set for the collection of results - a string literal parameter - 1-based start index + Results - + - Creates a call to the SUBSTR function with a string literal and interger expression parameters + Creates a SPARQL Result Set for the Results of a Query with the Leviathan Engine - a string literal parameter - a SPARQL variable + SPARQL Evaluation Context - + - Creates a call to the SUBSTR function with a string literal and interger parameters + Gets the Type of the Results Set - a string literal parameter - 1-based start index - + - Creates a call to the SUBSTR function with a variable and interger expression parameters + Gets the Result of an ASK Query - a SPARQL variable - 1-based start index + Result Set is deemed to refer to an ASK query if the Variables list is empty since an ASK Query result has an empty <head>. It is always true for any other Query type where one/more variables were requested even if the Result Set is empty. - + - Creates a call to the SUBSTR function with a variable and interger parameters + Gets the number of Results in the Result Set - a SPARQL variable - 1-based start index - + - Creates a call to the SUBSTR function with two variable parameters + Gets whether the Result Set is empty and can have Results loaded into it - a SPARQL variable - a SPARQL variable + + - + - Creates a call to the SUBSTR function with a string literal and variable parameters + Gets the List of Results - a string literal parameter - 1-based start index - substring length - + - Creates a call to the SUBSTR function with a string literal and interger expression parameters + Index directly into the Results - a string literal parameter - a SPARQL variable - substring length + Index of the Result you wish to retrieve + - + - Creates a call to the SUBSTR function with a string literal and interger parameters + Gets the Variables used in the Result Set - a string literal parameter - 1-based start index - substring length + + As of 1.0 where possible dotNetRDF tries to preserve the ordering of variables however this may not be possible depending on where the result set originates from or how it is populated + - + - Creates a call to the SUBSTR function with a variable and interger expression parameters + Trims the Result Set to remove unbound variables from results - a SPARQL variable - 1-based start index - substring length + + Note: This does not remove empty results this only removes unbound variables from individual results + - + - Creates a call to the SUBSTR function with a variable and interger parameters + Adds a Variable to the Result Set - a SPARQL variable - 1-based start index - substring length + Variable Name - + - Creates a call to the SUBSTR function with two variable parameters + Adds a Result to the Result Set - a SPARQL variable - a SPARQL variable - substring length + Result - + - Creates a call to the SUBSTR function with a string literal and two integer expressions parameters + Sets the Boolean Result for the Result Set - a string literal parameter - 1-based start index - substring length + Boolean Result - + - Creates a call to the SUBSTR function with a string literal, variable and interger expression parameters + Gets an Enumerator for the Results List - a string literal parameter - a SPARQL variable - substring length + - + - Creates a call to the SUBSTR function with a string literal, interger and integer expression parameters + Gets an Enumerator for the Results List - a string literal parameter - 1-based start index - substring length + - + - Creates a call to the SUBSTR function with a variable, interger expression and integer expression parameters + Determines whether two Result Sets are equal - a SPARQL variable - 1-based start index - substring length + + + + Experimental and not yet complete + - + - Creates a call to the SUBSTR function with a variable, interger and a numeric expression parameters + Converts a Result Set into a Triple Collection - a SPARQL variable - 1-based start index - substring length + Graph to generate the Nodes in + + + Assumes the Result Set contains three variables ?s, ?p and ?o to use as the Subject, Predicate and Object respectively. Only Results for which all three variables have bound values will generate Triples + - + - Creates a call to the SUBSTR function with two variable parameters + Converts a Result Set into a Triple Collection - a SPARQL variable - a SPARQL variable - substring length + Graph to generate the Nodes in + Variable whose value should be used for Subjects of Triples + Variable whose value should be used for Predicates of Triples + Variable whose value should be used for Object of Triples + + + Only Results for which all three variables have bound values will generate Triples + - + - Creates a call to the SUBSTR function with a string literal, interger expression and a numeric expression parameters + Disposes of a Result Set - a string literal parameter - 1-based start index - substring length - + - Creates a call to the SUBSTR function with a string literal, interger expression and a variable parameters + Gets the data for serialization - a string literal parameter - a SPARQL variable - substring length + Serialization Information + Streaming Context - + - Creates a call to the SUBSTR function with a string literal, interger and a variable parameters - - a string literal parameter - 1-based start index - substring length + Gets the schema for XML serialization + + - + - Creates a call to the SUBSTR function with a variable, interger expression and a variable parameters + Writes the data for XML serialization (.Net serialization not the official SPARQL results serialization) - a SPARQL variable - 1-based start index - substring length + XML Writer - + - Creates a call to the SUBSTR function with a variable, interger and a variable parameters + Reads the data for XML deserialization (.Net serialization not the official SPARQL results serialization) - a SPARQL variable - 1-based start index - substring length + XML Reader - + - Creates a call to the SUBSTR function with three variable parameters + Class containing Helper information and methods pertaining to the Sparql Query Language for RDF - a SPARQL variable - a SPARQL variable - substring length - + - Creates a call to the LANGMATCHES function + Namespace Uri for SPARQL Namespace - + - Creates a call to the LANGMATCHES function + Namespace Uri for the RDF serialization of a SPARQL Result Set - + - Creates a call to the LANGMATCHES function + Keywords in Sparql - + - Creates a call to the LANGMATCHES function + Keywords in Sparql - + - Creates a call to the LANGMATCHES function + Keywords in Sparql - + - Creates a call to the LANGMATCHES function + Keywords in Sparql - + - Creates a call to the isIRI function with an expression parameter + Keywords in Sparql - any SPARQL expression - + - Creates a call to the isIRI function with a variable parameter + Keywords in Sparql - name of variable to check - + - Creates a call to the isBlank function with an expression parameter + Keywords in Sparql - any SPARQL expression - + - Creates a call to the isBlank function with a variable parameter + Keywords in Sparql - name of variable to check - + - Creates a call to the isLiteral function with an expression parameter + Keywords in Sparql - any SPARQL expression - + - Creates a call to the isLiteral function with a variable parameter + Keywords in Sparql - name of variable to check - + - Creates a call to the isNumeric function with an expression parameter + Keywords in Sparql - any SPARQL expression - + - Creates a call to the isNumeric function with a variable parameter + Keywords in Sparql - name of variable to check - + - Creates a call to the STR function with a variable parameter + Keywords in Sparql - a SPARQL variable - + - Creates a call to the STR function with a literal expression parameter + Keywords in Sparql - a SPARQL literal expression - + - Creates a call to the STR function with an variable parameter + Keywords in Sparql - an RDF IRI term - + - Creates a call to the LANG function with a variable parameter + Keywords in Sparql - a SPARQL variable - + - Creates a call to the LANG function with a literal expression parameter + Keywords in Sparql - a SPARQL literal expression - + - Creates a call to the DATATYPE function with a literal expression parameter + Keywords in Sparql - a SPARQL literal expression - depending on will use a different flavour of datatype function - + - Creates a parameterless call to the BNODE function + Keywords in Sparql - + - Creates a call to the BNODE function with a simple literal parameter + Keywords in Sparql - a SPARQL simple literal - + - Creates a call to the BNODE function with a string literal parameter + Keywords in Sparql - a SPARQL string literal - + - Creates a call to the STRDT function with a simple literal and a IRI expression parameters + Keywords in Sparql - a SPARQL simple literal - datatype IRI - + - Creates a call to the STRDT function with a simple literal and a parameters + Keywords in Sparql - a SPARQL simple literal - datatype IRI - + - Creates a call to the STRDT function with a simple literal and a variable parameters + Keywords in Sparql - a SPARQL simple literal - datatype IRI - + - Creates a call to the STRDT function with a simple literal and a IRI expression parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the STRDT function with a simple literal and a IRI expression parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the STRDT function with a simple literal and a parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the STRDT function with a variable and a parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the STRDT function with a variable and a parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the STRDT function with a variable and a IRI expression parameters + Keywords in Sparql - a literal - datatype IRI - + - Creates a call to the UUID function + Keywords in Sparql - + - Creates a call to the StrUUID function + Keywords in Sparql - + - Creates a call to the BOUND function with a variable parameter + Keywords in Sparql - a SPARQL variable - + - Creates a call to the BOUND function with a variable parameter + Keywords in Sparql - a SPARQL variable name - + - Creates a call to the IF function with an expression for the first parameter + Keywords in Sparql - conditional clause expression - + - Creates a call to the IF function with a variable for the first parameter + Keywords in Sparql - conditional clause variable expression - + - Creates a call of the COALESCE function with a variable number of expression parameters + Keywords in Sparql - SPARQL expressions - + - Creates a call of the EXISTS function + Keywords in Sparql - a function, which will create the graph pattern parameter - + - Creates a call of the SAMETERM function with two expression parameters + Keywords in Sparql - a SPARQL expression - a SPARQL expression - + - Creates a call of the SAMETERM function with variable and expression parameters + Keywords in Sparql - a variable name - a SPARQL expression - + - Creates a call of the SAMETERM function with expression and variable parameters + Keywords in Sparql - a SPARQL expression - a variable name - + - Creates a call of the SAMETERM function with two variable parameters + Keywords in Sparql - a variable name - a variable name - + - SPARQL syntax verions to use when creating expressions + Keywords in Sparql - + - Creates a SPARQL variable + Keywords in Sparql - + - Creates a SPARQL variable + Keywords in Sparql - + - Creates a string constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a boolean constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a numeric constant + Keywords in Sparql - + - Creates a datetime constant + Keywords in Sparql - + - Creates an IRI constant + Keywords in Sparql - + - Builds a SPARQL constructor function call + Keywords in Sparql - + - Interface for creating SELECT queries + Keywords in Sparql - + - Adds additional SELECT return + Keywords in Sparql - + - Adds additional SELECT return + Keywords in Sparql - + - Adds additional SELECT expression + Keywords in Sparql - + - Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) + Keywords in Sparql - + - Provides methods for building triple patterns + Keywords in Sparql - + - Sets a variable as + Keywords in Sparql - + - Sets a variable as + Keywords in Sparql - + - Depending on the generic parameter type, sets a literal, a QName or a blank node as + Keywords in Sparql - Either a variable name, a literal, a QName or a blank node identifier - A relevant prefix/base URI must be added to to accept a QName - + - Depending on the 's type, sets a literal, a QName or a blank node as + Keywords in Sparql - A relevant prefix/base URI must be added to to accept a QName - + - Sets a as + Keywords in Sparql - + - Sets a as + Keywords in Sparql - + - Creates a + Keywords in Sparql - + - Class responsible for creating s + Keywords in Sparql - + - Build a simple DESCRIBE query without the WHERE part. + Keywords in Sparql - + - Add a group graph pattern or a sub query to the query. + Keywords in Sparql - - + - Add a group graph pattern or a sub query to the query. + Keywords in Sparql - - + - Adds additional SELECT + Keywords in Sparql - + - Adds additional SELECT expression + Keywords in Sparql - + - Adds additional SELECT + Keywords in Sparql - + - Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) + Keywords in Sparql - + - Provides methods for casting expressions to XPath types + Keywords in Sparql - + - Creates a cast to xsd:integer + Keywords in Sparql - + - Creates a cast to xsd:double + Keywords in Sparql - + - Creates a cast to xsd:decimal + Keywords in Sparql - + - Creates a cast to xsd:dateTime + Keywords in Sparql - + - Creates a cast to xsd:float + Keywords in Sparql - + - Creates a cast to xsd:boolean + Keywords in Sparql - + - Creates a cast to xsd:string + Keywords in Sparql - + - Class responsible for setting the object part of triple patterns + Keywords in Sparql - + - Sets a SPARQL variable as + Keywords in Sparql - + - Sets a SPARQL variable as + Keywords in Sparql - + - Depending on the generic parameter type, sets a literal, a QName or a blank node as + Keywords in Sparql - Either a variable name, a literal, a QName or a blank node identifier - A relevant prefix/base URI must be added to to accept a QName - + - Depending on the 's type, sets a literal, a QName or a blank node as + Keywords in Sparql - + - Sets a as + Keywords in Sparql - + - Sets a plain literal as + Keywords in Sparql - + - Sets a literal with language tag as + Keywords in Sparql - + - Sets a typed literal as + Keywords in Sparql - + - Sets a as + Keywords in Sparql - + - Class responsible for setting the predicate part of triple patterns + Keywords in Sparql - + - Sets a SPARQL variable as + Keywords in Sparql - + - Sets a SPARQL variable as + Keywords in Sparql - + - Sets a as + Keywords in Sparql - + - Sets a as + Keywords in Sparql - + - Sets a as using a QName + Keywords in Sparql - A relevant prefix/base URI must be added to - + - Sets a as using a + Keywords in Sparql - + - Interface for building SPARQL queries + Keywords in Sparql - + - Gets the query type of the generated SPARQL query. + Keywords in Sparql - + - Gets the builder associated with the root graph pattern. + Keywords in Sparql - + - Gets the prefix manager, which allows adding prefixes to the query or graph pattern + Keywords in Sparql - + - Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) + Keywords in Sparql - + - Applies a LIMIT + Keywords in Sparql - Limit value. Pass negative to disable LIMIT - + - Applies an OFFSET + Keywords in Sparql - + - Adds ascending ordering by a variable to the query + Keywords in Sparql - + - Adds ascending ordering by a variable to the query + Keywords in Sparql - + - Adds descending ordering by a variable to the query + Keywords in Sparql - + - Adds descending ordering by a variable to the query + Keywords in Sparql - + - Adds ascending ordering by an expression to the query + Keywords in Sparql - + - Adds descending ordering by an expression to the query + Keywords in Sparql - + - Adds a GROUP BY clause to the query. + Keywords in Sparql - + - Adds a GROUP BY clause to the query. + Keywords in Sparql - + - Adds a GROUP BY clause to the query. + Keywords in Sparql - + - Adds a HAVING clause to the query. + Keywords in Sparql - + - Builds and returns a + Keywords in Sparql - + - Adds a BIND variable assignment to the root graph pattern + Keywords in Sparql - + - Provides methods for building queries with a fluent style API + Keywords in Sparql - - - A is mutable by definition so calling any of the extension methods in this API will cause the existing query it is called on to be changed. You can call on an existing query to create a new copy if you want to make different queries starting from the same base query - - - + - Gets or sets the namespace mappings for the SPARQL query being built + Keywords in Sparql - + - Creates a new ASK query + Keywords in Sparql - + - Creates a new CONSTRUCT query + Keywords in Sparql - + - Creates a new CONSTRUCT WHERE query + Keywords in Sparql - + - Creates a new SELECT * query + Set of SPARQL Keywords that are Non-Query Keywords - + - Creates a new SELECT query which will return the given - + Set of SPARQL Keywords that are Function Keywords - query result variables - + - Creates a new SELECT query which will return the given - + Set of SPARQL Keywords that are Aggregate Keywords - query result variables + + Unlike AggregateFunctionKeywords[] this includes keywords related to aggregates (like DISTINCT) and those for Leviathan extension aggregates which are not standard SPARQL 1.1 syntax + - + - Creates a new SELECT query which will return an expression + Set of SPARQL Keywords that are built in SPARQL Aggregate Functions - + - Creates a new query, which will DESCRIBE the given + Set of XML Schema Data Types which are derived from Integer and can be treated as Integers by SPARQL - + - Creates a new query, which will DESCRIBE the given + Set of IRIs for supported Cast Functions - + - Applies the DISTINCT modifier if the Query is a SELECT, otherwise leaves query unchanged (since results from any other query are DISTINCT by default) + Set of Keywords for SPARQL Query 1.0 - + - Applies a LIMIT + Set of additional Keywords for SPARQL Query 1.1 - Limit value. Pass negative to disable LIMIT - + - Applies an OFFSET + Set of SPARQL Keywords that are Update Keywords - + - Adds ascending ordering by a variable to the query + Set of Keywords for SPARQL Update 1.1 - + - Adds ascending ordering by a variable to the query + Regular Expression Pattern for Valid Integers in Sparql - + - Adds descending ordering by a variable to the query + Regular Expression Pattern for Valid Decimals in Sparql - + - Adds descending ordering by a variable to the query + Regular Expression Pattern for Valid Doubles in Sparql - + - Adds ascending ordering by an expression to the query + Checks whether a given Keyword is a SPARQL Query Keyword + Keyword to check + - + - Adds descending ordering by an expression to the query + Checks whether a given Keyword is a SPARQL Non-Query Keyword + Keyword to check + - + - Abstract Dataset wrapper implementation for datasets that can load graphs on demand + Checks whether a given Keyword is a SPARQL Function Verb + Keyword to check + - + - Creates a new Demand Dataset + Checks whether a given Keyword is a SPARQL 1.1 Function Verb - Underlying Dataset + Keyword to check + - + - Sees if the underlying dataset has a graph and if not tries to load it on demand + Checks whether a given Keyword is a SPARQL Aggregate Keyword (includes keywords related to aggregates like DISTINCT, AS and Leviathan extension aggregate keywords) - Graph URI + Keyword to check - + - Method to be implemented by derived classes which implements the loading of graphs on demand + Checks whether a given Keyword is a SPARQL Aggregate Function Keyword (only keywords for the SPARQL built-in aggregate functions) - Graph URI - Graph + Keyword to check - + - Implementation of a dataset wrapper which can load additional graphs from the web on demand + Checks whether a given Keyword is a SPARQL Update Keyword + + - + - Creates a new Web Demand Dataset + Checks whether a given QName is valid in Sparql - Underlying Dataset + QName to check + SPARQL Syntax + - + - Tries to load graphs from the web + Checks whether a given Variable Name is valid in Sparql - Graph URI - Graph + Variable Name to check - + - - Namespace for classes used to define a Dataset over which SPARQL Queries and Updates evaluated using the Leviathan engine operate - + Gets whether a given prefix declaration is valid in SPARQL + Prefix declaration + - + - Abstract Base class of dataset designed around out of memory datasets where you rarely wish to load data into memory but simply wish to know which graph to look in for data + Gets whether a given BNode ID is valid + Value + - + - Creates a new Quad Dataset + Checks whether a given Character matches the PN_CHARS_BASE rule from the Sparql Specification + Character to test + - + - Creates a new Quad Dataset + Checks whether a given Character matches the PN_CHARS_U rule from the SPARQL Specification - Whether to make the default graph the union of all graphs + Character to test + - + - Creates a new Quad Dataset + Checks whether a given Character matches the PN_CHARS rule from the SPARQL Specification - URI of the Default Graph + Character to test + - + - Sets the Active Graph + Checks whether a given String matches the PN_LOCAL rule from the Sparql Specification - Graph URIs + String as character array + SPARQL Syntax + - + - Sets the Active Graph + Checks whether a given String matches the PN_PREFIX rule from the SPARQL Specification - Graph URI + String as character array + - + - Sets the Default Graph + Checks whether a given String matches the PLX rule from the SPARQL Specification - Graph URI + String as character array + Start Index + Resulting End Index + - + - Sets the Default Graph + Gets whether a character is a Hex character - Graph URIs + Character + - + - Resets the Active Graph + Unescapes local name escapes from QNames + Value to unescape + - + - Resets the Default Graph + Checks whether the given value is a valid Numeric Literal in Sparql + Value to test + - + - Gets the Default Graph URIs + Checks whether the given value is a valid Integer Literal in Sparql + Value to test + - + - Gets the Active Graph URIs + Checks whether the given value is a valid Decimal Literal in Sparql + Value to test + - + - Gets whether this dataset uses a union default graph + Checks whether the given value is a valid Float Literal in Sparql + + - + - Gets whether the given URI represents the default graph of the dataset + Checks whether the given value is a valid Double Literal in Sparql - Graph URI + Value to test - + - Adds a Graph to the dataset + Determines the Sparql Numeric Type for a Literal based on its Data Type Uri - Graph + Data Type Uri + - + - Adds a Quad to the Dataset + Determines the Sparql Numeric Type for a Literal based on its Data Type Uri - Graph URI - Triple + Data Type Uri as a String + - + - Removes a Graph from the Dataset + Calculates the Effective Boolean Value of a given Node according to the Sparql specification - Graph URI + Node to computer EBV for + - + - Removes a Quad from the Dataset + Checks whether the Query is a SELECT Query - Graph URI - Triple + Query Type + - + - Gets whether a Graph with the given URI is the Dataset + Implements Node Equality with SPARQL Semantics - Graph URI + Node + Node - + - Determines whether a given Graph exists in the Dataset + Implements Node Inequality with SPARQL Semantics - Graph URI + Node + Node - + - Gets the Graphs in the dataset + Implements Numeric Equality with SPARQL Semantics + Node + Node + SPARQL Numeric Tyoe + - + - Gets the URIs of the graphs in the dataset + Implements Date Time Equality with SPARQL Semantics + Node + Node + - + - Gets the Graph with the given URI from the Dataset + Implements Date Equality with SPARQL Semantics - Graph URI + Node + Node - - - This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage - - - + - Gets a Graph from the dataset + Implements Time Span Equality with SPARQL Semantics - Graph URI + Node + Node - + - Gets a modifiable graph from the dataset + Converts a Literal Node to a Decimal - Graph URI + Literal Node - + - Gets whether the dataset has any triples + Converts a Literal Node to a Double + Literal Node + - + - Gets whether the dataset contains a triple + Converts a Literal Node to a Float - Triple + Literal Node - + - Gets whether a Triple exists in a specific Graph of the dataset + Converts a Literal Node to an Integer - Graph URI - Triple + Literal Node - + - Gets all triples from the dataset + Converts a Literal Node to a Date Time + Literal Node + - + - Gets all the Triples for a specific graph of the dataset + Converts a Literal Node to a Date Time Offset - Graph URI + Literal Node - + - Gets all the Triples with a given subject + Converts a Literal Node to a Time Span - Subject + Literal Node - + - Gets all the Triples with a given subject from a specific graph of the dataset + Gets a SPARQL Formatter to use in formatting Queries as Strings - Graph URI - Subject - - + - Gets all the Triples with a given predicate + Class of Sparql Variables - Predicate - - + - Gets all the Triples with a given predicate from a specific graph of the dataset + Creates a new Sparql Variable - Graph URI - Predicate - + Variable Name + Does this Variable appear in the Result Set? - + - Gets all the Triples with a given object + Creates a new Sparql Variable - Object - + Variable Name (with leading ?/$ removed) - + - Gets all the Triples with a given object from a specific graph of the dataset + Creates a new Sparql Variable which is an Aggregate - Graph URI - Object - + Variable Name (with leading ?/$ removed) + Aggregate Function + All Aggregate Variables are automatically considered as Result Variables - + - Gets all the Triples with a given subject and predicate + Creates a new Sparql Variable which is a Projection Expression - Subject - Predicate - + Variable Name (with leading ?/$ removed) + Projection Expression - + - Gets all the Triples with a given subject and predicate from a specific graph of the dataset + Variable Name - Graph URI - Subject - Predicate - - + - Gets all the Triples with a given subject and object + Gets whether the Variable appears in the Result Set - Subject - Object - - + - Gets all the Triples with a given subject and object from a specific graph of the dataset + Gets whether the Variable is an Aggregate - Graph URI - Subject - Object - - + - Gets all the Triples with a given predicate and object + Gets whether the Variable is a Projection Expression - Predicate - Object - - + - Gets all the Triples with a given predicate and object from a specific graph of the dataset + Gets the Aggregate Function for this Variable - Graph URI - Predicate - Object - - + - Flushes any changes to the dataset + Gets the Projection Expression for this Variable - + - Discards any changes to the dataset + Get the String representation of the Variable + - + - Abstract Base class for immutable quad datasets + Abstract Base class for SPARQL Views which are Graphs which are generated from SPARQL Queries and get automatically updated when the Store they are attached to changes + + + CONSTRUCT, DESCRIBE or SELECT queries can be used to generate a Graph. If you use a SELECT query the returned variables must contain ?s, ?p and ?o in order to generate a view correctly + + - + - Throws an error as this dataset is immutable + SPARQL Query - Graph - + - Throws an error as this dataset is immutable + Graphs that are mentioned in the Query - Graph URI - Triple - + - Throws an error as this dataset is immutable + Triple Store the query operates over - Graph URI - + - Throws an error as this dataset is immutable + Creates a new SPARQL View - Graph URI - Triple + SPARQL Query + Triple Store to query - + - Throws an error as this dataset is immutable + Creates a new SPARQL View - Graph URI - + SPARQL Query + Triple Store to query - + - Abstract Base class for quad datasets that support transactions + Creates a new SPARQL View - - - The Transaction implementation of dotNetRDF is based upon a MRSW concurrency model, since only one writer may be active changes are immediately pushed to the dataset and visible within the transaction and they are committed or rolled back when Flush() or Discard() are called. - - - So in practical terms it is perfectly OK for the storage to be updated during a transaction because if the transaction fails the changes will be rolled back because all changes are stored in-memory until the end of the transaction. This may not be an ideal transaction model for all scenarios so you may wish to implement your own version of transactions or code your implementations of the abstract methods accordingly to limit actual persistence to the end of a transaction. - - + SPARQL Query + Triple Store to query - + - Creates a Transactional Quad Dataset + Initialises the SPARQL View - + - Creates a Transactional Quad Dataset + Invalidates the View causing it to be updated - Sets whether the default graph should be the union of all graphs - + - Creates a Transactional Quad Dataset + Callback for when asychronous invalidation completes - Default Graph URI + Async call results - + - Adds a Graph to the Dataset + Forces the view to be updated - Graph to add - + - Adds a Graph to the Dataset + Abstract method that derived classes should implement to update the view - Graph to add - + - Removes a Graph from the Dataset + Gets the error that occurred during the last update (if any) - Graph URI - + - Removes a Graph from the dataset + Represents a SPARQL View over an in-memory store - Graph URI - + - Gets a Graph from the dataset + Creates a new SPARQL View - Graph URI - + SPARQL Query + Triple Store to query - + - Gets a Graph from the Dataset that can be modified + Creates a new SPARQL View - Graph URI - + SPARQL Query + Triple Store to query - + - Gets a Graph from the Dataset that can be modified transactionally + Creates a new SPARQL View - Graph URI - + SPARQL Query + Triple Store to query - + - Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + Updates the view by making the SPARQL Query in-memory over the relevant Triple Store - - Commits the Active Transaction - - + - Ensures that any changes to the Dataset (if any) are discarded + Represents a SPARQL View over an arbitrary native Triple Store - - Rollsback the Active Transaction - - + - Allows the derived dataset to take any post-Flush() actions required + Creates a new SPARQL View + SPARQL Query + Triple Store to query - + - Allows the derived dataset to take any post-Discard() actions required + Creates a new SPARQL View + SPARQL Query + Triple Store to query - + - An in-memory dataset that operates in terms of quads, underlying storage is identical to a InMemoryDataset though this dataset should be more performant for queries that access named graphs frequently + Creates a new SPARQL View + SPARQL Query + Triple Store to query - + - Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage + Updates the view by making the query over the Native Store (i.e. the query is handled by the stores SPARQL implementation) - + - Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage + A collection of cross-target extensions to the string class - Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Creates a new in-memory dataset containing initially just the given graph and treating the given graph as the default graph of the dataset + Return true if the character sequence starting at the specifie offset is a URI hex-encoded character - Graph + The input string + The character offset from which to start the check for a hex-encoded character + - + - Creates a new In-Memory dataset + Key for Objects that are cached by the Configuration Loader - In-Memory queryable store - + - Creates a new In-Memory dataset + Creates a new Cached Object Key - In-Memory queryable store - Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset + Object Node + Configuration Graph - + - Creates a new In-Memory dataset + Gets the Hash Code for the Key - In-Memory queryable store - Default Graph URI + - + - Gets the Lock used to ensure MRSW concurrency on the dataset when available + Gets whether this Key is equal to the given Object + Object + - + - Adds a Graph to the Dataset merging it with any existing Graph with the same URI + Gets whether this Key is equal to the given Key - Graph + Key + - + - Removes a Graph from the Dataset + Object Factory for loading triple and graph collections - Graph URI - + - Gets whether a Graph with the given URI is the Dataset + Tries to load a triple/graph collection which is specified in the given configuration graph - Graph URI + Configuration Graph + Object Node + Target type + Returned Object - + - Gets all the Graphs in the Dataset + Gets whether this factory can load objects of the given type + Type + - + - Gets all the URIs of Graphs in the Dataset + Interface for Object Factories which are factory classes that can create Objects based on configuration information in a Graph - + - Gets the Graph with the given URI from the Dataset + Attempts to load an Object of the given type identified by the given Node and returned as the Type that this loader generates - Graph URI - + Configuration Graph + Object Node + Target Type + Created Object + True if the loader succeeded in creating an Object - For In-Memory datasets the Graph returned from this property is no different from the Graph returned by the GetModifiableGraphInternal() method + The Factory should not throw an error if some required configuration is missing as another factory further down the processing chain may still be able to create the object. If the factory encounters errors and all the required configuration information is present then that error should be thrown i.e. class instantiation throws an error or a call to load an object that this object requires fails. - + - Gets a Modifiable wrapper around a Graph in the Dataset + Returns whether this Factory is capable of creating objects of the given type - Graph URI + Target Type - - - Adds a quad to the dataset - - Graph URI - Triple - - + - Gets whether the dataset contains a given Quad + Interface for Objects which can have their configuration serialized to RDF - Graph URI - Triple - + - Gets all quads for a given graph + Serializes the Configuration in the given context - Graph URI - + Configuration Serialization Context - + - Gets all Quads with a given object + Inteface for Objects which can resolve paths specified for Configuration properties - Graph URI - Object - - + - Gets all Quads with a given predicate + Resolves a Path - Graph URI - Predicate + Path - + - Gets all Quads with a given predicate and object + The Configuration Loader is responsible for the loading of Configuration information and objects based upon information encoded in a Graph but more generally may be used for the loading of any type of object whose configuration has been loaded in a Graph and for which a relevant IObjectFactory is available. - Graph URI - Predicate - Object - + + + - + - Gets all Quads with a given subject + Configuration Namespace URI - Graph URI - Subject - - + - Gets all Quads with a given subject and object + Constants for URI Schemes with special meaning within the Configuration API - Graph URI - Subject - Object - - + - Gets all Quads with a given subject and predicate + Constants for URI Schemes with special meaning within the Configuration API - Graph URI - Subject - Predicate - - + - Removes a quad from the dataset + URI Constants for configuration properties - Graph URI - Triple - + - Flushes any changes to the store + URI Constants for configuration properties - + - A Triple Collection which is a thin wrapper around a BaseQuadDataset to reduce much of the complexity for ISparqlDataset implementors around returning of Graphs + URI Constants for configuration properties - + - An abstract dataset wrapper that can be used to wrap another dataset and just modify some functionality i.e. provides a decorator over an existing dataset + URI Constants for configuration properties - + - Underlying Dataset + URI Constants for configuration properties - + - Creates a new wrapped dataset + URI Constants for configuration properties - Dataset - + - Gets the Lock used to ensure MRSW concurrency on the dataset when available + URI Constants for configuration properties - + - Gets the underlying dataset + URI Constants for configuration properties - + - Sets the Active Graph for the dataset + URI Constants for configuration properties - Graph URIs - + - Sets the Active Graph for the dataset + URI Constants for configuration properties - Graph URI - + - Sets the Default Graph for the dataset + URI Constants for configuration properties - Graph URI - + - Sets the Default Graph for the dataset + URI Constants for configuration properties - Graph URIs - + - Resets the Active Graph + URI Constants for configuration properties - + - Resets the Default Graph + URI Constants for configuration properties - + - Gets the Default Graph URIs + URI Constants for configuration properties - + - Gets the Active Graph URIs + URI Constants for configuration properties - + - Gets whether the default graph is the union of all graphs + URI Constants for configuration properties - + - Adds a Graph to the dataset + URI Constants for configuration properties - Graph - + - Removes a Graph from the dataset + URI Constants for configuration properties - Graph URI - + - Gets whether the dataset contains a given Graph + URI Constants for configuration properties - Graph URI - - + - Gets the Graphs in the dataset + URI Constants for configuration properties - + - Gets the URIs of Graphs in the dataset + URI Constants for configuration properties - + - Gets a Graph from the dataset + URI Constants for configuration properties - Graph URI - - + - Gets a modifiable graph from the dataset + URI Constants for configuration properties - Graph URI - - + - Gets whether the dataset has any triples + URI Constants for configuration properties - + - Gets whether the dataset contains a given triple + URI Constants for configuration properties - Triple - - + - Gets all triples from the dataset + URI Constants for configuration properties - + - Gets triples with a given subject + URI Constants for configuration properties - Subject - - + - Gets triples with a given predicate + URI Constants for configuration properties - Predicate - - + - Gets triples with a given object + URI Constants for configuration properties - Object - - + - Gets triples with a given subject and predicate + URI Constants for configuration properties - Subject - Predicate - - + - Gets triples with a given subject and object + URI Constants for configuration properties - Subject - Object - - + - Gets triples with a given predicate and object + URI Constants for configuration properties - Predicate - Object - - + - Flushes any changes to the dataset + URI Constants for configuration properties - + - Discards any changes to the dataset + URI Constants for configuration properties - + - Serializes the Configuration of the Dataset + URI Constants for configuration properties - Serialization Context - + - Abstract Base Class for Datasets which provides implementation of Active and Default Graph management + URI Constants for configuration properties - + - Reference to the Active Graph being used for executing a SPARQL Query + URI Constants for configuration properties - + - Default Graph for executing SPARQL Queries against + URI Constants for configuration properties - + - Stack of Default Graph References used for executing a SPARQL Query when a Query may choose to change the Default Graph from the Dataset defined one + URI Constants for configuration properties - + - Stack of Active Graph References used for executing a SPARQL Query when there are nested GRAPH Clauses + URI Constants for configuration properties - + - Creates a new Dataset + URI Constants for configuration properties - + - Creates a new Dataset with the given Union Default Graph setting + URI Constants for configuration properties - Whether to use a Union Default Graph - + - Creates a new Dataset with a fixed Default Graph and without a Union Default Graph + URI Constants for configuration properties - - + - Gets a reference to the actual IGraph that is currently treated as the default graph + URI Constants for configuration properties - + - Sets the Default Graph for the SPARQL Query + URI Constants for configuration properties - - + - Sets the Default Graph + URI Constants for configuration properties - Graph URI - + - Sets the Default Graph + URI Constants for configuration properties - Graph URIs - + - Sets the Active Graph for the SPARQL Query + URI Constants for configuration properties - Active Graph - + - Sets the Active Graph for the SPARQL query + URI Constants for configuration properties - Uri of the Active Graph - - Helper function used primarily in the execution of GRAPH Clauses - - + - Sets the Active Graph for the SPARQL query + URI Constants for configuration properties - URIs of the Graphs which form the Active Graph - Helper function used primarily in the execution of GRAPH Clauses - + - Sets the Active Graph for the SPARQL query to be the previous Active Graph + URI Constants for configuration properties - + - Sets the Default Graph for the SPARQL Query to be the previous Default Graph + URI Constants for configuration properties - + - Gets the Default Graph URIs + URI Constants for configuration properties - + - Gets the Active Graph URIs + URI Constants for configuration properties - + - Gets whether the Default Graph is treated as being the union of all Graphs in the dataset when no Default Graph is otherwise set + URI Constants for configuration properties - + - Adds a Graph to the Dataset + URI Constants for configuration properties - Graph - + - Removes a Graph from the Dataset + URI Constants for configuration properties - Graph URI - + - Removes a Graph from the Dataset + URI Constants for configuration properties - Graph URI - + - Gets whether a Graph with the given URI is the Dataset + URI Constants for configuration properties - Graph URI - - + - Determines whether a given Graph exists in the Dataset + URI Constants for configuration properties - Graph URI - - + - Gets all the Graphs in the Dataset + URI Constants for configuration properties - + - Gets all the URIs of Graphs in the Dataset + URI Constants for configuration properties - + - Gets the Graph with the given URI from the Dataset + URI Constants for configuration properties - Graph URI - - - - This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage - - - + - Gets the given Graph from the Dataset + URI Constants for configuration properties - Graph URI - - + - Gets the Graph with the given URI from the Dataset + URI Constants for configuration properties - Graph URI - - - - Graphs returned from this method must be modifiable and the Dataset must guarantee that when it is Flushed or Disposed of that any changes to the Graph are persisted - - - + - Gets whether the Dataset has any Triples + URI Constants for configuration properties - + - Gets whether the Dataset contains a specific Triple + URI Constants for configuration properties - Triple - - + - Determines whether the Dataset contains a specific Triple + URI Constants for configuration properties - Triple to search for - - + - Gets all the Triples in the Dataset + URI Constants for configuration properties - + - Abstract method that concrete implementations must implement to return an enumerable of all the Triples in the Dataset + URI Constants for configuration properties - - + - Gets all the Triples in the Dataset with the given Subject + URI Constants for configuration properties - Subject - - + - Gets all the Triples in the Dataset with the given Subject + URI Constants for configuration properties - Subject - - + - Gets all the Triples in the Dataset with the given Predicate + URI Constants for configuration properties - Predicate - - + - Gets all the Triples in the Dataset with the given Predicate + URI Constants for configuration properties - Predicate - - + - Gets all the Triples in the Dataset with the given Object + URI Constants for configuration properties - Object - - + - Gets all the Triples in the Dataset with the given Object + URI Constants for configuration properties - Object - - + - Gets all the Triples in the Dataset with the given Subject and Predicate + URI Constants for configuration properties - Subject - Predicate - - + - Gets all the Triples in the Dataset with the given Subject and Predicate + URI Constants for configuration properties - Subject - Predicate - - + - Gets all the Triples in the Dataset with the given Subject and Object + URI Constants for configuration properties - Subject - Object - - + - Gets all the Triples in the Dataset with the given Subject and Object + URI Constants for configuration properties - Subject - Object - - + - Gets all the Triples in the Dataset with the given Predicate and Object + URI Constants for configuration properties - Predicate - Object - - + - Gets all the Triples in the Dataset with the given Predicate and Object + URI Constants for configuration properties - Predicate - Object - - + - Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + URI Constants for configuration properties - + - Ensures that any changes to the Dataset (if any) are discarded + URI Constants for configuration properties - + - Abstract Base Class for Immutable Datasets + URI Constants for configuration classes - + - Throws an exception since Immutable Datasets cannot be altered + URI Constants for configuration classes - Graph to add - + - Throws an exception since Immutable Datasets cannot be altered + URI Constants for configuration classes - Graph URI - + - Throws an exception since Immutable Datasets cannot be altered + URI Constants for configuration classes - Graph URI - + - Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + URI Constants for configuration classes - + - Ensures that any changes to the Dataset (if any) are discarded + URI Constants for configuration classes - + - Abstract Base Class for Mutable Datasets that support Transactions + URI Constants for configuration classes - - - The Transaction implementation of dotNetRDF is based upon a MRSW concurrency model, since only one writer may be active changes are immediately pushed to the dataset and visible within the transaction and they are committed or rolled back when Flush() or Discard() are called. - - - So in practical terms it is perfectly OK for the storage to be updated during a transaction because if the transaction fails the changes will be rolled back because all changes are stored in-memory until the end of the transaction. This may not be an ideal transaction model for all scenarios so you may wish to implement your own version of transactions or code your implementations of the abstract methods accordingly to limit actual persistence to the end of a transaction. - - - + - Creates a new Transactional Dataset + URI Constants for configuration classes - + - Creates a new Transactional Dataset with the given Union Default Graph setting + URI Constants for configuration classes - Whether to use a Union Default Graph - + - Creates a new Transactional Dataset with a fixed Default Graph and no Union Default Graph + URI Constants for configuration classes - Default Graph URI - + - Adds a Graph to the Dataset + URI Constants for configuration classes - Graph to add - + - Adds a Graph to the Dataset + URI Constants for configuration classes - Graph to add - + - Removes a Graph from the Dataset + URI Constants for configuration classes - Graph URI - + - Gets a Graph from the Dataset + URI Constants for configuration classes - Graph URI - - - If the Graph has been modified during the active Transaction the modified version is returned rather than the original version - - + - Gets a Graph from the Dataset that can be modified + URI Constants for configuration classes - Graph URI - - + - Gets a Graph from the Dataset that can be modified transactionally + URI Constants for configuration classes - Graph URI - - + - Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + URI Constants for configuration classes - - Commits the Active Transaction - - + - Ensures that any changes to the Dataset (if any) are discarded + URI Constants for configuration classes - - Rollsback the Active Transaction - - + - Allows the derived dataset to take any post-Flush() actions required + URI Constants for configuration classes - + - Allows the derived dataset to take any post-Discard() actions required + URI Constants for configuration classes - + - A Graph Collection which wraps an ISparqlDataset implementation so it can be used as if it was a Graph Collection + URI Constants for configuration classes - + - Creates a new Dataset Graph collection + URI Constants for configuration classes - SPARQL Dataset - + - Gets whether the Collection contains a Graph with the given URI + URI Constants for configuration classes - Graph URI - - + - Adds a Graph to the Collection + URI Constants for configuration classes - Graph to add - Whether to merge the given Graph with any existing Graph with the same URI - Thrown if a Graph with the given URI already exists and the mergeIfExists is set to false - + - Removes a Graph from the Collection + URI Constants for configuration classes - URI of the Graph to removed - + - Gets the number of Graphs in the Collection + URI Constants for configuration classes - + - Gets the URIs of Graphs in the Collection + URI Constants for configuration classes - + - Gets the Graph with the given URI + URI Constants for configuration classes - Graph URI - - + - Disposes of the Graph Collection + URI Constants for configuration classes - + - Gets the enumeration of Graphs in this Collection + URI Constants for configuration classes - - + - Represents an in-memory dataset (i.e. a InMemoryQueryableStore) for querying and updating using SPARQL + URI Constants for configuration classes - + - Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage + URI Constants for configuration classes - + - Creates a new in-memory dataset using the default in-memory TripleStore as the underlying storage + QName Constants for Default Types for some configuration classes - Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Creates a new in-memory dataset containing initially just the given graph and treating the given graph as the default graph of the dataset + QName Constants for Default Types for some configuration classes - Graph - + - Creates a new In-Memory dataset + QName Constants for Default Types for some configuration classes - In-Memory queryable store - + - Creates a new In-Memory dataset + QName Constants for Default Types for some configuration classes - In-Memory queryable store - Whether the Default Graph when no Active/Default Graph is explicitly set should be the union of all Graphs in the Dataset - + - Creates a new In-Memory dataset + QName Constants for Default Types for some configuration classes - In-Memory queryable store - Default Graph URI - + - Gets the Lock used to ensure MRSW concurrency on the dataset when available + QName Constants for Default Types for some configuration classes - + - Adds a Graph to the Dataset merging it with any existing Graph with the same URI + QName Constants for Default Types for some configuration classes - Graph - + - Removes a Graph from the Dataset + QName Constants for Default Types for some configuration classes - Graph URI - + - Gets whether a Graph with the given URI is the Dataset + Cache for loaded objects - Graph URI - - + - Gets all the Graphs in the Dataset + Set of built-in object factories that are automatically registered and used - + - Gets all the URIs of Graphs in the Dataset + Path resolver - + - Gets the Graph with the given URI from the Dataset + Gets or sets the provider of external settings - Graph URI - - - - For In-Memory datasets the Graph returned from this property is no different from the Graph returned by the GetModifiableGraphInternal() method - - + On .NET Framework defaults to a reader of <appSettings> configuration section - + - Gets a Modifiable wrapper around a Graph in the Dataset + Loads a Configuration Graph and applies auto-configuration - Graph URI + URI to load from - + - Gets whether the Dataset contains a specific Triple + Loads a Configuration Graph and applies auto-configuration if desired - Triple + URI to load from + Whether to apply auto-configuration - + - Gets all the Triples in the underlying in-memory store + Loads a Configuration Graph and applies auto-configuration + File to load from - + - Gets all the Triples in the Dataset with the given Subject + Loads a Configuration Graph and applies auto-configuration if desired - Subject + File to load from + Whether to apply auto-configuration - + - Gets all the Triples in the Dataset with the given Predicate + Loads a Configuration Graph and applies auto-configuration - Predicate + Embedded Resource to load - + - Gets all the Triples in the Dataset with the given Object + Loads a Configuration Graph and applies auto-configuration if desired - Object + Embedded Resource to load + Whether to apply auto-configuration - + - Gets all the Triples in the Dataset with the given Subject and Predicate + Common loader for Configuration Graphs, handles the resolution of dnr:imports and applies the auto-configuration if selected - Subject - Predicate + Configuration Graph + Source the graph originated from + Whether to apply auto-configuration - + - Gets all the Triples in the Dataset with the given Subject and Object + Common loader for Configuration Graphs, handles the resolution of dnr:imports and applies the auto-configuration if selected - Subject - Object + Configuration Graph + Sources the graph originated from + Whether to apply auto-configuration - + - Gets all the Triples in the Dataset with the given Predicate and Object + Given a Configuration Graph applies all available auto-configuration based on the contents of the graph - Predicate - Object - + Configuration Graph - + - If there have been changes made to the Dataset and the underlying in-memory store is a ITransactionalStore ensures the underlying store is notified to flush those changes + Given a Configuration Graph will detect and configure Object Factories defined in the configuration + Configuration Graph - + - Interfaces for Datasets that SPARQL Queries and Updates can be applied to + Given a Configuration Graph will detect and configure static options that are specified using the dnr:configure property with special <dotnetrdf-configure:Class/Property> subject URIs + Configuration Graph - Note: For all operations that take a Graph URI a null Uri should be considered to refer to the Default Graph of the dataset - -

Default and Active Graph

- - Leviathan expects that a Query operates over the Dataset in the following order: -
    -
  1. If an Active Graph is set then Queries operate over that
  2. -
  3. Otherwise if a Default Graph is set then Queries operate over that
  4. -
  5. Finally the Queries operate over the all triples, the notion of all triples may be dataset implementation specific i.e. may be union of all graphs, the default unnamed graph only or something else entirely
  6. -
- Please note that the Query may change the Active and Default Graph over the course of the query depending on the Query e.g. FROM, FROM NAMED and GRAPH all can potentially change these. -
- - You can limit your queries to use specific portions of your dataset by using the SetActiveGraph() and SetDefaultGraph() methods on your dataset instance before then passing it to the LeviathanQueryProcessor + An example of using this mechanism to configure a static option is as follows: +
+            <dotnetrdf-configure:VDS.RDF.Options#UsePLinqEvaluation> dnr:configure false .
+            
- Note: By default the InMemoryDataset uses the Union of all Graphs in the Dataset if no Active/Default Graph is otherwise specified. Use the UsesUnionDefaultGraph property to see whether a Dataset implementation behaves in this way. + Class and property names must be fully qualified, to specify static options outside of dotNetRDF itself you can add an additional path segment with the assembly name after the initial configure keyword. If the class/property does not exist or the value of the literal cannot be appropriately converted to the type of the property then an exception will be thrown. If there is a problem setting the property (e.g. it does not have a public setter) then an exception will be thrown.
- + - Sets the Active Graph to be the merge of the Graphs with the given URIs + Given a Configuration Graph will detect Readers and Writers for RDF and SPARQL syntaxes and register them with MimeTypesHelper. This will cause the library defaults to be overridden where appropriate. - Graph URIs + Configuration Graph - + - Sets the Active Graph to be the Graph with the given URI + Given a Configuration Graph will detect and configure SPARQL Operators - Graph URI + Configuration Graph - + - Sets the Default Graph to be the Graph with the given URI + Checks for circular references and throws an error if there is one - Graph URI + Object you are attempting to load + Object being referenced + QName for the property that makes the reference + + + If the Object you are trying to load and the Object you need to load are equal then this is a circular reference and an error is thrown + + + The ConfigurationLoader is not currently capable of detecting more subtle circular references + + - + - Sets the Default Graph to be the merge of the Graphs with the given URIs + Creates a URI Node that refers to some Configuration property/type - Graph URIs + Configuration Graph + QName of the property/type + + + + The QName provides should be of the form dnr:qname - the dnr prefix will be automatically be considered to be to the Configuration Namespace which is defined by the ConfigurationNamespace constant. + + + This function uses caching to ensure that URI Nodes aren't needlessly recreated in order to save memory. + + - + - Resets the Active Graph to the previous Active Graph + Clears the Object Loader cache (this is not recommended) + + + This method should only be invoked in cases where you have attempted to load an object and some error occurred which was external to dotNetRDF e.g. network connectivity problem and + + - + - Resets the Default Graph to the previous Default Graph + Gets all the values given for a property of a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Node + + Enumeration of values given for the property for the Object + - + - Gets the enumeration of the Graph URIs that currently make up the default graph + Gets all the literal values given for a property of a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Node + + + + Only returns the value part of Literal Nodes which are given as values for the property i.e. ignores all non-Literals and discards any language/data type from Literals + + - + - Gets the enumeration of the Graph URIs that currently make up the active graph + Gets the first value given for a property of a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Node + + First value given for the property of the Object + - + - Gets whether the Default Graph is treated as being the union of all Graphs in the dataset when no Default Graph is otherwise set + Gets the first value given for the first found property of a given Object in the Configuration Graph + Configuration Graph + Object Node + Properties + + First value given for the first property of the Object which is matched + - + - Adds a Graph to the Dataset + Gets the String value or null of the first instance of a property for a given Object in the Configuration Graph where the value for the property is a Literal Node - Graph - May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable + Configuration Graph + Object Node + Property Node + + + String value of the first instance of the property or a null if no values or not a literal value + + + If you want the String value regardless of Node type then use the GetConfigurationValue function instead + + - + - Removes a Graph from the Dataset + Gets the String value or null of the first instance of the first property for a given Object in the Configuration Graph where the value for the property is a Literal Node - Graph URI - May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable + Configuration Graph + Object Node + Property Nodes + + + String value of the first instance of the first property or a null if no values or not a literal value + + + If you want the String value regardless of Node type then use the GetConfigurationValue function instead + + - + - Gets whether a Graph with the given URI is the Dataset + Gets the String value or null of the first instance of a property for a given Object in the Configuration Graph - Graph URI + Configuration Graph + Object Node + Property Node - + - Gets all the Graphs in the Dataset + Gets the String value or null of the first instance of the first property for a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Nodes + - + - Gets all the URIs of Graphs in the Dataset + Gets the Boolean value or a given default of the first instance of a property for a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Node + Default Value to return if there is no valid boolean value + + If there is a valid boolean value for the property then that is returned, in any other case the given Default Value is returned + - + - Gets the Graph with the given URI from the Dataset + Gets the Boolean value or a given default of the first instance of the first property for a given Object in the Configuration Graph - Graph URI - - - - This property need only return a read-only view of the Graph, code which wishes to modify Graphs should use the GetModifiableGraph() method to guarantee a Graph they can modify and will be persisted to the underlying storage - - + Configuration Graph + Object Node + Property Nodes + Default Value to return if there is no valid boolean value + + If there is a valid boolean value for any property then that is returned, in any other case the given Default Value is returned + - + - Gets the Graph with the given URI from the Dataset + Gets the 64 bit Integer value or a given default of the first instance of a property for a given Object in the Configuration Graph - Graph URI - - May be thrown if the Dataset is immutable i.e. Updates not supported /// May be thrown if the Dataset is immutable - - - Graphs returned from this method must be modifiable and the Dataset must guarantee that when it is Flushed or Disposed of that any changes to the Graph are persisted - - + Configuration Graph + Object Node + Property Node + Default Value to return if there is no valid boolean value + + If there is a valid integer value for the property then that is returned, in any other case the given Default Value is returned + - + - Gets whether the Dataset has any Triples + Gets the 64 bit Integer value or a given default of the first instance of the first property for a given Object in the Configuration Graph + Configuration Graph + Object Node + Property Nodes + Default Value to return if there is no valid boolean value + + If there is a valid integer value for any property then that is returned, in any other case the given Default Value is returned + - + - Gets whether the Dataset contains a specific Triple + Gets the 64 bit Integer value or a given default of the first instance of a property for a given Object in the Configuration Graph - Triple - + Configuration Graph + Object Node + Property Node + Default Value to return if there is no valid boolean value + + If there is a valid integer value for the property then that is returned, in any other case the given Default Value is returned + - + - Gets all the Triples in the Dataset + Gets the 64 bit Integer value or a given default of the first instance of the first property for a given Object in the Configuration Graph - - - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset - - + Configuration Graph + Object Node + Property Nodes + Default Value to return if there is no valid boolean value + + If there is a valid integer value for any property then that is returned, in any other case the given Default Value is returned + - + - Gets all the Triples in the Dataset with the given Subject + Gets the Username and Password specified for a given Object - Subject - + Configuration Graph + Object Node + Whether settings may be specified using the dnr:credentials property + Username + Password - - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset - + Username and/or Password will be null if there is no value specified for the relevant properties - + - Gets all the Triples in the Dataset with the given Predicate + Gets whether the given Object has already been loaded and cached - Predicate + Configuration Graph + Object Node - - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset - + If this returns true then loading that object again should be essentially instantaneous as it will come from the cache - + - Gets all the Triples in the Dataset with the given Object + Loads the Object identified by the given Node as an object of the given type based on information from the Configuration Graph - Object + Configuration Graph + Object Node + Target Type - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + Callers of this method should be careful to check that the Object returned is of a usable type to them. The Target Type parameter does not guarantee that the return value is of that type it is only used to determine which registered instances of IObjectFactory are potentially capable of creating the desired Object + + + Callers should also take care that any Objects returned from this method are disposed of when the caller no longer has a use for them as otherwise the reference kept in the cache here will cause the Object to remain in-memory consuming resources - + - Gets all the Triples in the Dataset with the given Subject and Predicate + Loads the Object identified by the given Node based on information from the Configuration Graph - Subject - Predicate + Configuration Graph + Object Node - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + Use this overload when you have a Node which identifies an Object and you don't know what the type of that Object is. This function looks up the dnr:type property for the given Object and then calls the other version of this function providing it with the relevant type information. - + - Gets all the Triples in the Dataset with the given Subject and Object + Attempts to find the Default Type to load an Object as when no explicit dnr:type property has been declared but an rdf:type property has been declared giving a valid Configuration Class - Subject - Object + Configuration Graph + Object Node - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset + Note: Only some configuration classes have corresponding default types, in general it is recommended that Configuration Graphs should always use the dnr:type property to explicitly state the intended type of an Object - + - Gets all the Triples in the Dataset with the given Predicate and Object + Attempts to return the Default Type to load an Object as when there is no dnr:type property but there is a rdf:type property - Predicate - Object + Type URI declared by the rdf:type property - - - Note: The Triples returned from the method should be limited to those in the current Active Graph if present, otherwise those in the current Default Graph if present and finally the entire Dataset - - - + - Ensures that any changes to the Dataset (if any) are flushed to the underlying Storage + Attempts to resolve special <appsettings> URIs into actual values + + + - While partly intended for use in implementations which support transactions though other implementations may wish to use this to ensure that changes to the dataset are persisted properly + These special URIs have the form <appsetting:Key> where Key is the key for an appSetting in your applications configuration file. When used these URIs are resolved at load time into the actual values from your configuration file. This allows you to avoid spreading configuration data over multiple files since you can specify things like connection settings in the Application Config file and then simply reference them in the dotNetRDF configuration file. - - - - - Ensures that any changes to the Dataset (if any) are discarded - - - Primarily intended for use in implementations which support transactions though other implementations may wish to use this to ensure that changes to the dataset are persisted properly + Warning: This feature is not supported in the Silverlight build - - - Interface for SPARQL Datasets which also provide a Lock by which threading can be controlled - - - Note that there is no guarantees that consuming code will respect the fact that a Dataset is Thread Safe and use the Lock property appropriately. Additionally some datasets may choose to implement thread safety in other ways which don't rely on this interface - - - - - Gets the Lock used to ensure MRSW concurrency of the Dataset when used with the Leviathan SPARQL processors - - - + - Class representing the SPARQL CALL() function + Creates a new instance of , which + loads an existing configuration graph and applies auto-configuration - + - Creates a new COALESCE function with the given expressions as its arguments + Creates a new instance of , which + loads an existing configuration graph and optionally applies auto-configuration - Argument expressions - + - Gets the value of the expression as evaluated in the given Context for the given Binding ID + Creates a new instance of , which + loads an existing configuration graph and applies auto-configuration - Evaluation Context - Binding ID - - + - Gets the Variables used in all the argument expressions of this function + Creates a new instance of , which + loads an existing configuration graph and optionally applies auto-configuration - + - Gets the String representation of the function + Creates a new instance of , which + loads an existing configuration graph from file and applies auto-configuration - - + - Gets the Type of the Expression + Creates a new instance of , which + loads an existing configuration graph and optionally applies auto-configuration - + - Gets the Functor of the Expression + Loads the Object identified by the given blank node identifier as an object of the given type based on information from the Configuration Graph + + See remarks under + - + - Gets the Arguments of the Expression + Loads the Object identified by the given URI as an object of the given type based on information from the Configuration Graph + + See remarks under + - + - Gets whether an expression can safely be evaluated in parallel + Loads the Object identified by the given blank node identifier as an + + See remarks under + - + - Transforms the Expression using the given Transformer + Loads the Object identified by the given URI as an - Expression Transformer - + + See remarks under + - + - Abstract Base Class for functions that generate UUIDs + Registers an Object Factory with the Configuration Loader + Object Factory - + - Evaluates the expression + Gets/Sets the in-use Path Resolver - Evaluation Context - Binding ID - - + - Method to be implemented by derived classes to implement the actual logic of turning the generated UUID into a RDF term + Resolves a Path using the in-use path-resolver - UUID + Path to resolve - - - Gets the variables used in the expression - - - - - Gets the Type of the expression - - - + - Gets the Functor of the expression + Marker class used in the ConfigurationLoader Object cache to mark objects which are unloadable due to some errors to stop the loader repeatedly trying to load an Object whose configuration is invalid, incomplete or otherwise erroneous. - + - Gets the arguments of the expression + Reads configuration using System.Configuration.ConfigurationManager - + - Applies the transformer to the arguments of this expression + Gets the setting from %lt;appSettings%gt; configuration section - Transformer - - + - Returns whether the function can be parallelised + Context Class for writing serializing Configuration information - + - Represents the SPARQL UUID Function + Configuration Graph being written to - + - Evaluates the function by generating the URN UUID form based on the given UUID + Creates a new Serialization Context - UUID - - + - Gets the functor for the expression + Creates a new Serialization Context + Base Configuration Graph - + - Represents the SPARQL STRUUID Function + Gets the Graph to which Configuration information should be written - + - Evaluates the function by returning the string form of the given UUID + Gets/Sets the next subject to be used - UUID - + + + Always returns a Blank Node if none is currently explicitly specified + + + Used to link objects together when you want some subsidiary object to serialize it's configuration and link that to the configuration you are currently serializing + + - + - Gets the functor for the expression + Factory class for producing Network Credentials - + - Namespace containing expression classes which provide the SPARQL built-in functions pertaining to string manipulation + Tries to load a Network Credential based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Abstract Base Class for SPARQL String Testing functions which take two arguments + Gets whether this Factory can load objects of the given Type + Type + - + - Creates a new Base Binary SPARQL String Function + Factory class for producing Graphs from Configuration Graphs - String Expression - Argument Expression - + - Evaluates the expression + Tries to load a Graph based on information from the Configuration Graph - Evaluation Context - Binding ID + Configuration Graph + Object Node + Target Type + Output Object - + - Abstract method that child classes must implement to + Gets whether this Factory can load objects of the given Type - - + Type - + - Determines whether the Arguments are valid + Factory class for producing Triple Stores from Configuration Graphs - String Literal - Argument Literal - - + - Gets the Expression Type + Tries to load a Triple Store based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Represents the SPARQL CONCAT function + Gets whether this Factory can load objects of the given Type + Type + - + - Creates a new SPARQL Concatenation function + An Object Factory for creating SPARQL Datasets - Enumeration of expressions - + - Gets the Value of the function as evaluated in the given Context for the given Binding ID + Tries to load a SPARQL Dataset based on information from the Configuration Graph - Context - Binding ID + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the Arguments the function applies to + Gets whether this Factory can load objects of the given Type + Type + - + - Gets whether an expression can safely be evaluated in parallel + Factory class for producing SPARQL Endpoints from Configuration Graphs - + - Gets the Variables used in the function + Tries to load a SPARQL Endpoint based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Gets the String representation of the function + Gets whether this Factory can load objects of the given Type + Type - + - Gets the Type of the SPARQL Expression + Factory class for producing Custom SPARQL Expression Factories from Configuration Graphs - + - Gets the Functor of the expression + Tries to load a SPARQL Custom Expression Factory based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Transforms the Expression using the given Transformer + Gets whether this Factory can load objects of the given Type - Expression Transformer + Type - + - Represents the SPARQL CONTAINS function + The Configuration Loader is responsible for the loading of Configuration information + and objects based upon information encoded in a Graph but more generally may be used + for the loading of any type of object whose configuration has been loaded in a Graph + and for which a relevant IObjectFactory is available. - + - Creates a new SPARQL CONTAINS function + Loads the Object identified by the given URI as an object of the given type based on information from the Configuration Graph - String Expression - Search Expression + + See remarks under + - + - Determines whether the String contains the given Argument + Loads the Object identified by the given blank node identifier as an object of the given type based on information from the Configuration Graph - String Literal - Argument Literal - + + See remarks under + - + - Gets the Functor of the Expression + Loads the Object identified by the given blank node identifier as an + + See remarks under + - + - Gets the String representation of the Expression + Loads the Object identified by the given URI as an - + + See remarks under + - + - Transforms the Expression using the given Transformer + Gives access to application settings from external source - Expression Transformer - - + - Represents the SPARQL ENCODE_FOR_URI Function + Gets a setting for the provided key + null if the was not found - + - Creates a new Encode for URI function + Factory class for producing IObjectFactory instances from Configuration Graphs - Expression + + + This essentially reflexive implementation allows for defining additional IObjectFactory instances that can load custom/user defined types based on user definable Configuration. If your Configuration Graphs contain custom object factory definitions then you should call ConfigurationLoader.AutoConfigureObjectFactories() before attempting to load any Configuration. + + - + - Gets the Value of the function as applied to the given String Literal + Tries to load an Object Factory based on information from the Configuration Graph - Simple/String typed Literal + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the String representation of the function + Gets whether this Factory can load objects of the given Type + Type - + - Gets the Functor of the Expression + SPARQL Operator factory which is capable of loading any implementation of ISparqlOperator which has a public unparameterized constructor - + - Transforms the Expression using the given Transformer + Tries to load an object of the given type - Expression Transformer + Configuration Graph + Object Node + Target Type + Returned Object - + - Represents the SPARQL LCASE Function + Gets whether this factory can load objects of the given type + Type + - + - Creates a new LCASE function + An Object Factory that can generate SPARQL Query and Algebra Optimisers - Argument Expression - + - Calculates + Tries to load a SPARQL Query/Algebra Optimiser based on information from the Configuration Graph - + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the Functor of the Expression + Gets whether this Factory can load objects of the given Type + Type + - + - Gets the String representation of the Expression + Object Factory used by the Configuration API to load parsers from configuration graphs + + + + + Tries to load a Parser based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object - + - Transforms the Expression using the given Transformer + Gets whether this Factory can load objects of the given Type - Expression Transformer + Type - + - Represents the XPath fn:replace() function + Object Factory used by the Configuration API to load writers from configuration graphs - + - Creates a new SPARQL Replace function + Tries to load a Writer based on information from the Configuration Graph - Text Expression - Search Expression - Replace Expression + Configuration Graph + Object Node + Target Type + Output Object + - + - Creates a new SPARQL Replace function + Gets whether this Factory can load objects of the given Type - Text Expression - Search Expression - Replace Expression - Options Expression + Type + - + - Configures the Options for the Regular Expression + Factory class for producing Permissions from Configuration Graphs - Node detailing the Options - Whether errors should be thrown or suppressed - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Tries to load a Permission based on information from the Configuration Graph - Evaluation Context - Binding ID + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the String representation of this Expression + Gets whether this Factory can load objects of the given Type + Type - + - Gets the enumeration of Variables involved in this Expression + Factory class for producing User Groups from Configuration Graphs - + - Gets the Type of the Expression + Tries to load a User Group based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Gets the Functor of the Expression + Gets whether this Factory can load objects of the given Type + Type + - + - Gets the Arguments of the Expression + Possible permission models - + - Gets whether an expression can safely be evaluated in parallel + If the action appears in the deny list it is denied unless it is in the allow list, otherwise it is allowed - + - Transforms the Expression using the given Transformer + If the action appears in the allow list it is allowed unless it is in the deny list, otherwise it is denied - Expression Transformer - - + - Class representing the SPARQL Datatype() function + All actions are allowed - + - Creates a new Datatype() function expression + All actions are denied - Expression to apply the function to - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Interface for Permission - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets whether the Permission is for a specific action + Action - + - Gets the Type of the Expression + Represents a action that can be allowed/denied - + - Gets the Functor of the Expression + Creates a new Permission for the given Action + Action - + - Transforms the Expression using the given Transformer + Gets whether the Permission is for the given action - Expression Transformer + Action - + - Class representing the SPARQL Datatype() function in SPARQL 1.1 + Represents a set of Permissions that can be allowed/denied - - This is required because the changes to the function in SPARQL 1.1 are not backwards compatible with SPARQL 1.0 - - + - Creates a new DataType function + Creates a new Permissions Set - Expression + Action - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Creates a new Permissions Set - Evaluation Context - Binding ID + Actions + + + + Gets whether the Permission is for the given action + + Action - + - Class representing the Sparql Lang() function + Represents a Group of Users and the permissions they have to perform actions - + - Creates a new Lang() function expression + Creates a new User Group - Expression to apply the function to - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Creates a new User Group which may allow guests - Evaluation Context - Binding ID - + Are guests allowed? + + If guests are allowed then this Groups permissions apply to unauthenticated users + - + - Gets the String representation of this Expression + Gets/Sets whether Guests are allowed - - + - Gets the Type of the Expression + Gets/Sets the in-use Permission Model - + - Gets the Functor of the Expression + Adds a User to the Group + User Credentials - + - Transforms the Expression using the given Transformer + Adds an allow action permission to the Group - Expression Transformer - + Permission - + - Represents the SPARQL STRAFTER Function + Adds a deny action permission to the Group + Permission - + - Creates a new STRAFTER Function + Returns whether the Group has a member with the given username - String Expression - Starts Expression + Username + - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Returns whether the Group has a member with the given credentials - Evaluation Context - Binding ID + Username + Password - + - Determines whether the Arguments are valid + Gets whether the Group permits the action - String Literal - Argument Literal + Action - + - Gets the Variables used in the function + + Namespace for classes related to configuring Permissions + + + Warning: The API here is experimental and may changed/be removed in future releases + - + - Gets the Type of the Expression + Factory class for producing SPARQL Query Processors from Configuration Graphs - + - Gets the Functor of the Expression + Tries to load a SPARQL Query Processor based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Gets the Arguments of the Function + Gets whether this Factory can load objects of the given Type + Type + - + - Gets whether an expression can safely be evaluated in parallel + Factory class for producing SPARQL Update Processors from Configuration Graphs - + - Transforms the Expression using the given Transformer + Tries to load a SPARQL Update based on information from the Configuration Graph - Expression Transformer + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the String representation of the function + Gets whether this Factory can load objects of the given Type + Type - + - Represents the SPARQL STRBEFORE function + Factory class for producing SPARQL Graph Store HTTP Protocol Processors from Configuration Graphs - + - Creates a new STRBEFORE Function + Tries to load a SPARQL Graph Store HTTP Protocol Processor based on information from the Configuration Graph - String Expression - Starts Expression + Configuration Graph + Object Node + Target Type + Output Object + - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Gets whether this Factory can load objects of the given Type - Evaluation Context - Binding ID + Type - + - Determines whether the Arguments are valid + Factory class for producing Custom SPARQL Expression Factories from Configuration Graphs - String Literal - Argument Literal + + + + Tries to load a SPARQL Property Function Factory based on information from the Configuration Graph + + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the Variables used in the function + Gets whether this Factory can load objects of the given Type + Type + - + - Gets the Type of the Expression + Factory class for creating Web Proxies from Configuration Graphs - + - Gets the Functor of the Expression + Tries to load a Web Proxy based on information from the Configuration Graph + Configuration Graph + Object Node + Target Type + Output Object + - + - Gets the Arguments of the Function + Gets whether this Factory can load objects of the given Type + Type + - + - Gets whether an expression can safely be evaluated in parallel + Factory class for producing Reasoners from Configuration Graphs - + - Transforms the Expression using the given Transformer + Tries to load a Reasoner based on information from the Configuration Graph - Expression Transformer + Configuration Graph + Object Node + Target Type + Output Object - + - Gets the String representation of the function + Gets whether this Factory can load objects of the given Type + Type - + - Represents the SPARQL STRENDS Function + Factory class for producing IStorageProvider and instances from Configuration Graphs - + - Creates a new STRENDS() function + Tries to load a Generic IO Manager based on information from the Configuration Graph - String Expression - Argument Expression + Configuration Graph + Object Node + Target Type + Output Object + - + - Determines whether the given String Literal ends with the given Argument Literal + Gets whether this Factory can load objects of the given Type - String Literal - Argument Literal + Type - + - Gets the Functor of the Expression + Class for representing errors with dotNetRDF Configuration + + + Configuration exceptions are thrown when the user tries to load objects using the ConfigurationLoader and their is insufficient/invalid information to load the desired object + + - + - Gets the String representation of the Expression + Creates a new dotNetRDF Configuration Exception - + Error Message - + - Transforms the Expression using the given Transformer + Creates a new dotNetRDF Configuration Exception - Expression Transformer - + Error Message + Exception that caused this Exception - + - Class representing the Sparql Str() function + + Namespace for Configuration Classes which are used for dynamic loading of Configuration serialized as RDF Graphs. + + + This API which provides for encoding dotNetRDF centric configuration in RDF Graphs though it can be extended to serialize and deserialize arbitrary objects if desired. This configuration API is used extensively with our ASP.Net support as it allows for highly expressive and flexible configurations. See the documentation on the main website for many detailed examples. + - + - Creates a new Str() function expression + A decorator for graph collections that allows for graphs to be loaded on demand if they don't exist in the underlying graph collection - Expression to apply the function to - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Creates a new decorator - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Creates a new decorator over the given graph collection - + Graph Collection - + - Gets the Type of the Expression + Checks whether the collection contains a Graph invoking an on-demand load if not present in the underlying collection + Graph URI + - + - Gets the Functor of the Expression + Loads a Graph on demand + URI of the Graph to load + A Graph if it could be loaded and throws an error otherwise - + - Transforms the Expression using the given Transformer + A decorator for graph collections where graphs not in the underlying graph collection can be loaded on-demand from the Web as needed - Expression Transformer - - + - Represents the SPARQL STRLEN Function + Creates a new Web Demand Graph Collection which loads Graphs from the Web on demand - + - Creates a new STRLEN() function + Creates a new Web Demand Graph Collection which loads Graphs from the Web on demand - Argument Expression + Collection to decorate - + - Determines the Length of the given String Literal + Tries to load a Graph on demand from a URI - String Literal + Graph URI - + - Gets the Functor of the Expression + A decorator for graph collection where graphs not in the underlying graph collection can be loaded on-demand from the Files on Disk as needed - + - Gets the String representation of the Expression + Creates a new Disk Demand Graph Collection which loads Graphs from the Web on demand - - + - Transforms the Expression using the given Transformer + Creates a new Disk Demand Graph Collection - Expression Transformer + Collection to decorate + + + + Tries to load a Graph on demand + + - + - Represents the SPARQL STRSTARTS Function + Abstract Base class for HTTP endpoints - + - Creates a new STRSTARTS() function + Creates a new Base Endpoint - String Expression - Argument Expression - + - Determines whether the given String Literal starts with the given Argument Literal + Creates a new Base Endpoint - String Literal - Argument Literal - + Endpoint URI - + - Gets the Functor of the Expression + Gets the Endpoints URI - + - Gets the String representation of the Expression + Gets/Sets the HTTP authentication credentials to be used - - + - Transforms the Expression using the given Transformer + Gets/Sets the HTTP Mode used for requests - Expression Transformer - + + This property defaults to the value AUTO. in AUTO mode GET will be used unless the total length of query parameters exceeeds 2048 characters + or the query contains non-ASCII characters, and POST will be used for longer queries or where the query contains non-ASCII characters. + + Only AUTO, GET and POST are permitted - implementations may override this property if they wish to support more methods. + + - + - Represents the SPARQL SUBSTR Function + Gets/Sets the HTTP Timeouts used specified in milliseconds + + + Defaults to 30 Seconds (i.e. the default value is 30,000) + + + It is important to understand that this timeout only applies to the HTTP request portions of any operation performed and that the timeout may apply more than once if a POST operation is used since the timeout applies separately to obtaining the request stream to POST the request and obtaining the response stream. Also the timeout does not in any way apply to subsequent work that may be carried out before the operation can return so if you need a hard timeout you should manage that yourself. + + + When set to a zero/negative value then the standard .Net timeout of 100 seconds will apply, use if you want the maximum possible timeout i.e. if you expect to launch extremely long running operations. + + + Not supported under Silverlight, Windows Phone and Portable Class Library builds + + - + - Creates a new XPath Substring function + Sets the HTTP Digest authentication credentials to be used - Expression - Start + Username + Password - + - Creates a new XPath Substring function + Sets the HTTP Digest authentication credentials to be used - Expression - Start - Length + Username + Password + Domain - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Clears any in-use credentials so subsequent requests will not use HTTP authentication - Evaluation Context - Binding ID - - + - Gets the Variables used in the function + Serializes the endpoints Credential and Proxy information + Configuration Serialization Context - + - Gets the String representation of the function + Method which may be overridden in derived classes to add any additional custom request options/headers to the request - + HTTP Request + + This is called at the end of so can also be used to override that methods default behaviour + - + - Gets the Type of the Expression + Controls whether the Credentials set with the SetCredentials() method or the Credentialsare also used for a Proxy (if used) - + - Gets the Functor of the Expression + Sets a Proxy Server to be used + Proxy Address - + - Gets the Arguments of the Function + Sets a Proxy Server to be used + Proxy Address - + - Gets whether an expression can safely be evaluated in parallel + Gets/Sets a Proxy Server to be used - + - Transforms the Expression using the given Transformer + Clears any in-use credentials so subsequent requests will not use a proxy server - Expression Transformer - - + - Represents the SPARQL UCASE Function + Sets Credentials to be used for Proxy Server + Username + Password - + - Creates a new UCASE() function + Sets Credentials to be used for Proxy Server - Argument Expression + Username + Password + Domain - + - Converts the given String Literal to upper case + Gets/Sets Credentials to be used for Proxy Server - String Literal - - + - Gets the Functor of the Expression + Clears the in-use proxy credentials so subsequent requests still use the proxy server but without credentials - + - Gets the String representation of the Expression + Applies generic request options (timeout, authorization and proxy server) to a request - + HTTP Request - + - Transforms the Expression using the given Transformer + Abstract Base Implementation of the IGraph interface - Expression Transformer - - + - Namespace containing expression classes which provide the SPARQL built-in functions + Collection of Triples in the Graph - + - Namespace containing expression classes which provide the SPARQL built-in functions which have boolean results + Namespace Mapper - + - Represents the SPARQL ISNUMERIC() Function + Base Uri of the Graph - + - Creates a new SPARQL ISNUMERIC() Function + Blank Node ID Mapper - Argument Expression - + - Evaluates the expression + Creates a new Base Graph using the given Triple Collection - - - + Triple Collection to use - + - Gets the Type of this Expression + Creates a new Base Graph which uses the default as the Triple Collection - + - Gets the Functor of this Expression + Creates a Graph from the given Serialization Information + Serialization Information + Streaming Context - + - Gets the String representation of this Expression + Gets the set of Triples described in this Graph - - + - Transforms the Expression using the given Transformer + Gets the set of Nodes which make up this Graph - Expression Transformer - - + - Class representing the SPARQL BOUND() function + Gets the Namespace Mapper for this Graph which contains all in use Namespace Prefixes and their URIs + - + - Creates a new Bound() function expression + Gets the current Base Uri for the Graph - Variable Expression + + This value may be changed during Graph population depending on whether the Concrete syntax allows the Base Uri to be changed and how the Parser handles this + - + - Evaluates the expression + Gets whether a Graph is Empty ie. Contains No Triples or Nodes - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Asserts a Triple in the Graph - + The Triple to add to the Graph - + - Gets the Type of the Expression + Asserts a List of Triples in the graph + List of Triples in the form of an IEnumerable - + - Gets the Functor of the Expression + Retracts a Triple from the Graph + Triple to Retract + Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted - + - Transforms the Expression using the given Transformer + Retracts a enumeration of Triples from the graph - Expression Transformer - + Enumeration of Triples to retract - + - Class representing the Sparql IsBlank() function + Clears all Triples from the Graph + + + The Graph will raise the ClearRequested event at the start of the Clear operation which allows for aborting the operation if the operation is cancelled by an event handler. On completing the Clear the Cleared event will be raised. + + - + - Creates a new IsBlank() function expression + Creates a New Blank Node with an auto-generated Blank Node ID - Expression to apply the function to + - + - Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Creates a New Blank Node with a user-defined Blank Node ID - Evaluation Context - Binding ID + Node ID to use - + - Gets the String representation of this Expression + Creates a New Literal Node with the given Value + String value of the Literal - + - Gets the Type of the Expression + Creates a New Literal Node with the given Value and Language Specifier + String value of the Literal + Language Specifier of the Literal + - + - Gets the Functor of the Expression + Creates a new Literal Node with the given Value and Data Type + String value of the Literal + URI of the Data Type + - + - Transforms the Expression using the given Transformer + Creates a new URI Node that refers to the Base Uri of the Graph - Expression Transformer - + - Class representing the Sparql IsIRI() function + Creates a new URI Node with the given URI + URI for the Node + + + Generally we expect to be passed an absolute URI, while relative URIs are permitted the behaviour is less well defined. If there is a Base URI defined for the Graph then relative URIs will be automatically resolved against that Base, if the Base URI is not defined then relative URIs will be left as is. In this case issues may occur when trying to serialize the data or when accurate round tripping is required. + - + - Creates a new IsIRI() function expression + Creates a new URI Node with the given QName - Expression to apply the function to + QName for the Node + + Internally the Graph will resolve the QName to a full URI, throws an RDF Exception when this is not possible - + - Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Creates a new Variable Node - Evaluation Context - Binding ID + Variable Name - + - Gets the String representation of this Expression + Creates a new Graph Literal Node with its value being an Empty Subgraph - + - Gets the Type of the Expression + Creates a new Graph Literal Node with its value being the given Subgraph + Subgraph this Node represents + - + - Gets the Functor of the Expression + Returns the Blank Node with the given Identifier + The Identifier of the Blank Node to select + Either the Blank Node or null if no Node with the given Identifier exists - + - Transforms the Expression using the given Transformer + Returns the LiteralNode with the given Value in the given Language if it exists - Expression Transformer - + The literal value of the Node to select + The Language Specifier for the Node to select + Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists - + - Class representing the Sparql IsURI() function + Returns the LiteralNode with the given Value if it exists + The literal value of the Node to select + Either the LiteralNode Or null if no Node with the given Value exists + The LiteralNode in the Graph must have no Language or DataType set - + - Creates a new IsURI() function expression + Returns the LiteralNode with the given Value and given Data Type if it exists - Expression to apply the function to + The literal value of the Node to select + The Uri for the Data Type of the Literal to select + Either the LiteralNode Or null if no Node with the given Value and Data Type exists - + - Gets the String representation of this Expression + Returns the UriNode with the given QName if it exists + The QName of the Node to select - + - Gets the Functor of the Expression + Returns the UriNode with the given Uri if it exists + The Uri of the Node to select + Either the UriNode Or null if no Node with the given Uri exists - + - Transforms the Expression using the given Transformer + Gets all the Triples involving the given Uri - Expression Transformer - + The Uri to find Triples involving + Zero/More Triples - + - Class representing the Sparql IsLiteral() function + Gets all the Triples involving the given Node + The Node to find Triples involving + Zero/More Triples - + - Creates a new IsLiteral() function expression + Gets all the Triples with the given Uri as the Object - Expression to apply the function to + The Uri to find Triples with it as the Object + Zero/More Triples - + - Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Gets all the Triples with the given Node as the Object - Evaluation Context - Binding ID + The Node to find Triples with it as the Object - + - Gets the String representation of this Expression + Gets all the Triples with the given Node as the Predicate + The Node to find Triples with it as the Predicate - + - Gets the Type of the Expression + Gets all the Triples with the given Uri as the Predicate + The Uri to find Triples with it as the Predicate + Zero/More Triples - + - Gets the Functor of the Expression + Gets all the Triples with the given Node as the Subject + The Node to find Triples with it as the Subject + Zero/More Triples - + - Transforms the Expression using the given Transformer + Gets all the Triples with the given Uri as the Subject - Expression Transformer - + The Uri to find Triples with it as the Subject + Zero/More Triples - + - Class representing the Sparql LangMatches() function + Selects all Triples with the given Subject and Predicate + Subject + Predicate + - + - Creates a new LangMatches() function expression + Selects all Triples with the given Subject and Object - Expression to obtain the Language of - Expression representing the Language Range to match + Subject + Object + - + - Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Selects all Triples with the given Predicate and Object - Evaluation Context - Binding ID + Predicate + Object - + - Gets the String representation of this Expression + Gets whether a given Triple exists in this Graph + Triple to test - + - Gets the Type of the Expression + Merges another Graph into the current Graph + Graph to Merge into this Graph + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - + - Gets the Functor of the Expression + Merges another Graph into the current Graph + Graph to Merge into this Graph + Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in + + + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. + + + The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised + + - + - Transforms the Expression using the given Transformer + Determines whether a Graph is equal to another Object - Expression Transformer + Object to test + + + A Graph can only be equal to another Object which is an IGraph + + + Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals + + - - - Class representing the SPARQL REGEX function - - - + - Creates a new Regex() function expression + Determines whether this Graph is equal to the given Graph - Text to apply the Regular Expression to - Regular Expression Pattern + Graph to test for equality + Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes + + + See for documentation of the equality algorithm used. + - + - Creates a new Regex() function expression + Checks whether this Graph is a sub-graph of the given Graph - Text to apply the Regular Expression to - Regular Expression Pattern - Regular Expression Options + Graph + - + - Configures the Options for the Regular Expression + Checks whether this Graph is a sub-graph of the given Graph - Node detailing the Options - Whether errors should be thrown or suppressed + Graph + Mapping of Blank Nodes + - + - Evaluates the expression + Checks whether this Graph has the given Graph as a sub-graph - Evaluation Context - Binding ID + Graph - + - Gets the String representation of this Expression + Checks whether this Graph has the given Graph as a sub-graph + Graph + Mapping of Blank Nodes - + - Gets the enumeration of Variables involved in this Expression + Computes the Difference between this Graph the given Graph + Graph + + + + Produces a report which shows the changes that must be made to this Graph to produce the given Graph + + - + - Gets the Type of the Expression + Helper function for Resolving QNames to URIs + QName to resolve to a Uri + - + - Gets the Functor of the Expression + Creates a new unused Blank Node ID and returns it + - + - Gets the Arguments of the Expression + Event which is raised when a Triple is asserted in the Graph - + - Gets whether an expression can safely be evaluated in parallel + Event which is raised when a Triple is retracted from the Graph - + - Transforms the Expression using the given Transformer + Event which is raised when the Graph contents change - Expression Transformer - - + - Class representing the Sparql SameTerm() function + Event which is raised just before the Graph is cleared of its contents - + - Creates a new SameTerm() function expression + Event which is raised after the Graph is cleared of its contents - First Term - Second Term - + - Computes the Effective Boolean Value of this Expression as evaluated for a given Binding + Event which is raised when a Merge operation is requested on the Graph - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Event which is raised when a Merge operation is completed on the Graph - - + - Gets the Type of the Expression + Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event + Sender + Triple Event Arguments - + - Gets the Functor of the Expression + Helper method for raising the Triple Asserted event manually + Triple Event Arguments - + - Transforms the Expression using the given Transformer + Helper method for raising the Triple Asserted event manually - Expression Transformer - + Triple - + - Represents an EXIST/NOT EXISTS clause used as a Function in an Expression + Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event + Sender + Triple Event Arguments - + - Creates a new EXISTS/NOT EXISTS function + Helper method for raising the Triple Retracted event manually - Graph Pattern - Whether this is an EXIST + - + - Gets the Value of this function which is a Boolean as a Literal Node + Helper method for raising the Triple Retracted event manually - Evaluation Context - Binding ID - + Triple - + - Internal method which evaluates the Graph Pattern + Helper method for raising the Changed event - Evaluation Context - - We only ever need to evaluate the Graph Pattern once to get the Results - + Triple Event Arguments - + - Gets the Variables used in this Expression + Helper method for raising the Changed event - + - Gets whether an expression can safely be evaluated in parallel + Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Gets the String representation of the Expression + Helper method for raising the Cleared event - - + - Gets the Type of the Expression + Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Gets the Functor of the Expression + Helper method for raising the Merged event - + - Gets the Arguments of the Expression + Helper method for attaching the necessary event Handlers to a Triple Collection + Triple Collection + + May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's + - + - Transforms the Expression using the given Transformer + Helper method for detaching the necessary event Handlers from a Triple Collection - Expression Transformer - + Triple Collection + + May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's + - + - Namespace containing expression classes which provide the SPARQL built-in functions which construct new terms + Disposes of a Graph - + - Class representing the SPARQL BNODE() function + Gets the Serialization Information for serializing a Graph + Serialization Information + Streaming Context - + - Creates a new BNode Function + Gets the Schema for XML Serialization + - + - Creates a new BNode Function + Reads the data for XML deserialization - Argument Expression + XML Reader - + - Gets the value of the expression as evaluated in a given Context for a given Binding + Writes the data for XML serialization - Evaluation Context - Binding ID - + XML Writer - + - Gets the Type of the Expression + Abstract Base Class for Graph Collections + Designed to allow the underlying storage of a Graph Collection to be changed at a later date without affecting classes that use it - + - Gets the Functor of the Expression + Checks whether the Graph with the given Uri exists in this Graph Collection + Graph Uri to test + + + The null URI is used to reference the Default Graph + - + - Gets the Variables used in the Expression + Adds a Graph to the Collection + Graph to add + Sets whether the Graph should be merged with an existing Graph of the same Uri if present - + - Gets the Arguments of the Expression + Removes a Graph from the Collection + Uri of the Graph to remove + + The null URI is used to reference the Default Graph + - + - Gets whether the expression can be parallelised + Gets the number of Graphs in the Collection - + - Gets the String representation of the Expression + Provides access to the Graph URIs of Graphs in the Collection - - + - Transforms the Expression using the given Transformer + Gets a Graph from the Collection - Expression Transformer + Graph Uri + + The null URI is used to reference the Default Graph + - + - Class representing the SPARQL IRI() function + Disposes of the Graph Collection + Invokes the Dispose() method of all Graphs contained in the Collection - + - Creates a new IRI() function expression + Gets the Enumerator for the Collection - Expression to apply the function to + - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Gets the Enumerator for this Collection - Evaluation Context - Binding ID - + - Gets the String representation of the function + Event which is raised when a Graph is added to the Collection - - + - Gets the Type of the Expression + Event which is raised when a Graph is removed from the Collection - + - Gets the Functor of the Expression + Helper method which raises the Graph Added event manually + Graph - + - Transforms the Expression using the given Transformer + Helper method which raises the Graph Removed event manually - Expression Transformer - + Graph - + - Class representing the Sparql StrDt() function + Abstract Class for Nodes, implements the two basic properties of the INode Interface - + - Creates a new STRDT() function expression + Reference to the Graph that the Node belongs to - String Expression - Datatype Expression - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Uri of the Graph that the Node belongs to - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Node Type for the Node - - + - Gets the Type of the Expression + Stores the computed Hash Code for this Node - + - Gets the Functor of the Expression + Base Constructor which instantiates the Graph reference, Graph Uri and Node Type of the Node + Graph this Node is in + Node Type - + - Transforms the Expression using the given Transformer + Nodes have a Type - Expression Transformer - - + - Class representing the Sparql StrDt() function + Nodes belong to a Graph - + - Creates a new STRLANG() function expression + Gets/Sets the Graph Uri of the Node - String Expression - Language Expression - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Nodes must implement an Equals method - Evaluation Context - Binding ID + Object to compare against - + - Gets the String representation of this Expression + Nodes must implement a ToString method + + + Essential for the implementation of GetHashCode to work correctly, Nodes should generate a String representation that is 'unique' as far as that is possible. + + + Any two Nodes which match via the Equals method (based on strict RDF Specification Equality) should produce the same String representation since Hash Codes are generated by calling GetHashCode on this String + + - + - Gets the Type of the Expression + Gets the String representation of the Node formatted with the given Node formatter + Formatter + - + - Gets the Functor of the Expression + Gets the String representation of the Node formatted with the given Node formatter + Formatter + Triple Segment + - + - Transforms the Expression using the given Transformer + Gets a Hash Code for a Node - Expression Transformer + + + Implemented by getting the Hash Code of the result of ToString for a Node prefixed with its Node Type, this is pre-computed for efficiency when a Node is created since Nodes are immutable. See remarks on ToString for more detail. + + + Since Hash Codes are based on a String representation there is no guarantee of uniqueness though the same Node will always give the same Hash Code (on a given Platform - see the MSDN Documentation for string.GetHashCode() for further details) + + - + - Namespace containing expression classes which provide the SPARQL built-in functions pertaining to date times + The Equality operator is defined for Nodes + First Node + Second Node + Whether the two Nodes are equal + Uses the Equals method to evaluate the result - + - Represents the SPARQL DAY() Function + The Non-Equality operator is defined for Nodes + First Node + Second Node + Whether the two Nodes are non-equal + Uses the Equals method to evaluate the result - + - Creates a new SPARQL DAY() Function + Nodes must implement a CompareTo method to allow them to be Sorted - Argument Expression + Node to compare self to + + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Gets the Functor of this Expression + Nodes must implement a CompareTo method to allow them to be Sorted + Node to compare self to + + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Gets the String representation of this Expression + Nodes must implement a CompareTo method to allow them to be Sorted + Node to compare self to + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Transforms the Expression using the given Transformer + Nodes must implement a CompareTo method to allow them to be Sorted - Expression Transformer + Node to compare self to + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Represents the SPARQL HOURS() Function + Nodes must implement a CompareTo method to allow them to be Sorted + Node to compare self to + + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Creates a new SPARQL HOURS() Function + Nodes must implement a CompareTo method to allow them to be Sorted - Argument Expression + Node to compare self to + + + Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper + - + - Gets the Functor of this Expression + Nodes must implement an Equals method so we can do type specific equality + Node to check for equality + + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Gets the String representation of this Expression + Nodes must implement an Equals method so we can do type specific equality + Node to check for equality + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Transforms the Expression using the given Transformer + Nodes must implement an Equals method so we can do type specific equality - Expression Transformer + Node to check for equality + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Represents the SPARQL MINUTES() Function + Nodes must implement an Equals method so we can do type specific equality + Node to check for equality + + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Creates a new SPARQL MINUTES() Function + Nodes must implement an Equals method so we can do type specific equality - Argument Expression + Node to check for equality + + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Gets the Functor of this Expression + Nodes must implement an Equals method so we can do type specific equality + Node to check for equality + + + Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper + - + - Gets the String representation of this Expression + Gets the information for serialization - + Serialization Information + Streaming Context - + - Transforms the Expression using the given Transformer + Gets the schema for XML serialization - Expression Transformer - + - Represents the SPARQL MONTH() Function + Reads the data for XML deserialization + XML Reader - + - Creates a new SPARQL YEAR() Function + Writes the data for XML serialization - Argument Expression + XML Writer - + - Gets the Functor of this Expression + Abstract Base Class for Triple Collections + + Designed to allow the underlying storage of a Triple Collection to be changed at a later date without affecting classes that use it. + - + - Gets the String representation of this Expression + Adds a Triple to the Collection - + Triple to add + Adding a Triple that already exists should be permitted though it is not necessary to persist the duplicate to underlying storage - + - Transforms the Expression using the given Transformer + Determines whether a given Triple is in the Triple Collection - Expression Transformer - + The Triple to test + True if the Triple already exists in the Triple Collection - + - Represents the SPARQL NOW() Function + Gets the Number of Triples in the Triple Collection - + - Gets the Functor of this Expression + Deletes a Triple from the Collection + Triple to remove + Deleting something that doesn't exist should have no effect and give no error - + - Gets the String representation of this Expression + Gets the given Triple + Triple to retrieve + Thrown if the given Triple doesn't exist - + - Represents the SPARQL SECONDS() Function + Gets all the Nodes which are Objects of Triples in the Triple Collection - + - Creates a new SPARQL SECONDS() Function + Gets all the Nodes which are Predicates of Triples in the Triple Collection - Argument Expression - + - Gets the Functor of this Expression + Gets all the Nodes which are Subjects of Triples in the Triple Collection - + - Gets the String representation of this Expression + Gets all the Triples with the given Subject + ubject to lookup - + - Transforms the Expression using the given Transformer + Gets all the Triples with the given Predicate - Expression Transformer + Predicate to lookup - + - Represents the SPARQL TIMEZONE() Function + Gets all the Triples with the given Object + Object to lookup + - + - Creates a new SPARQL TIMEZONE() Function + Gets all the Triples with the given Subject Predicate pair - Argument Expression + Subject to lookup + Predicate to lookup + - + - Gets the Timezone of the Argument Expression as evaluated for the given Binding in the given Context + Gets all the Triples with the given Predicate Object pair - Evaluation Context - Binding ID + Predicate to lookup + Object to lookup - + - Gets the Functor of this Expression + Gets all the Triples with the given Subject Object pair + Subject to lookup + Object to lookup + - + - Gets the String representation of this Expression + Diposes of a Triple Collection - - + - Transforms the Expression using the given Transformer + Gets the typed Enumerator for the Triple Collection - Expression Transformer - + - Represents the SPARQL TZ() Function + Gets the non-generic Enumerator for the Triple Collection + - + - Creates a new SPARQL TZ() Function + Event which occurs when a Triple is added to the Collection - Argument Expression - + - Gets the Timezone of the Argument Expression as evaluated for the given Binding in the given Context + Event which occurs when a Triple is removed from the Collection - Evaluation Context - Binding ID - - + - Gets the Type of this Expression + Helper method for raising the Triple Added event + Triple - + - Gets the Functor of this Expression + Helper method for raising the Triple Removed event + Triple - + - Gets the String representation of this Expression + Abstract Base Class for a Triple Store - - + - Transforms the Expression using the given Transformer + Collection of Graphs that comprise the Triple Store - Expression Transformer - - + - Represents the SPARQL YEAR() Function + Event Handler definitions - + - Creates a new SPARQL YEAR() Function + Event Handler definitions - Argument Expression - + - Gets the Functor of this Expression + Event Handler definitions - + - Gets the String representation of this Expression + Event Handler definitions - - + - Transforms the Expression using the given Transformer + Event Handler definitions - Expression Transformer - - + - Namespace containing expression classes which provide the SPARQL built-in functions pertaining to hash algorithms + Creates a new Base Triple Store + Graph Collection to use - + - Represents the SPARQL SHA384() Function + Gets whether the Triple Store is empty - + - Creates a new SHA384() Function + Gets the Collection of Graphs that comprise this Triple Store - Argument Expression - + - Gets the Functor of the Expression + Gets all the Triples in the Triple Store - + - Gets the String representation of the Expression + Adds a Graph into the Triple Store - + Graph to add - + - Transforms the Expression using the given Transformer + Adds a Graph into the Triple Store using the chosen Merging Behaviour - Expression Transformer - + Graph to add + Whether the Graph should be merged with an existing Graph with the same Base Uri - + - Represents the SPARQL MD5() Function + Adds a Graph into the Triple Store which is retrieved from the given Uri + Uri of the Graph to load - + - Creates a new MD5() Function + Adds a Graph into the Triple Store which is retrieved from the given Uri using the chosen Merging Behaviour - Argument Expression + Graph to add + Whether the Graph should be merged with an existing Graph with the same Base Uri + + Important: Under Silverlight/Windows Phone 7 this will happen asynchronously so the Graph may not be immediatedly available in the store + - + - Gets the Functor of the Expression + Removes a Graph from the Triple Store + Uri of the Graph to Remove - + - Gets the String representation of the Expression + Checks whether a Graph with the given Base Uri exists in the Triple Store - + Graph Uri + True if the Graph exists in the Triple Store - + - Transforms the Expression using the given Transformer + Gets the Graph with the given URI - Expression Transformer + Graph URI - - - Represents the SPARQL SHA1() Function - - - + - Creates a new SHA1() Function + Event which is raised when a Graph is added - Argument Expression - + - Gets the Functor of the Expression + Event which is raised when a Graph is removed - + - Gets the String representation of the Expression + Event which is raised when a Graphs contents changes - - + - Transforms the Expression using the given Transformer + Event which is raised when a Graph is cleared - Expression Transformer - - + - Represents the SPARQL SHA256() Function + Event which is raised when a Graph has a merge operation performed on it - + - Creates a new SHA256() Function + Helper method for raising the Graph Added event manually - Argument Expression + Graph - + - Gets the Functor of the Expression + Helper method for raising the Graph Added event manually + Graph Event Arguments - + - Gets the String representation of the Expression + Event Handler which handles the Graph Added event from the underlying Graph Collection and raises the Triple Store's Graph Added event - + Sender + Graph Event Arguments + Override this method if your Triple Store implementation wishes to take additional actions when a Graph is added to the Store - + - Transforms the Expression using the given Transformer + Helper method for raising the Graph Removed event manually - Expression Transformer - + Graph - + - Represents the SPARQL SHA512() Function + Helper method for raising the Graph Removed event manually + Graph Event Arguments - + - Creates a new SHA512() Function + Event Handler which handles the Graph Removed event from the underlying Graph Collection and raises the Triple Stores's Graph Removed event - Argument Expression + Sender + Graph Event Arguments - + - Gets the Functor of the Expression + Helper method for raising the Graph Changed event manually + Graph Event Arguments - + - Gets the String representation of the Expression + Event Handler which handles the Changed event of the contained Graphs by raising the Triple Store's Graph Changed event - + Sender + Graph Event Arguments - + - Transforms the Expression using the given Transformer + Helper method for raising the Graph Changed event manually - Expression Transformer - + Graph - + - Abstract base class for Hash Functions + Helper method for raising the Graph Cleared event manually + Graph Event Arguments - + - Creates a new Hash function + Event Handler which handles the Cleared event of the contained Graphs by raising the Triple Stores's Graph Cleared event - Expression - Hash Algorithm to use + Sender + Graph Event Arguments - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Helper method for raising the Graph Merged event manually - Evaluation Context - Binding ID - + Graph Event Arguments - + - Computes Hashes + Event Handler which handles the Merged event of the contained Graphs by raising the Triple Store's Graph Merged event - Input String - + Sender + Graph Event Arguments - + - Gets the String representation of the function + Helper method which attaches the Triple Store's Event Handlers to the relevant events of a Graph - + Graph - + - Gets the Type of the Expression + Helper method which detaches the Triple Store's Event Handlers from the relevant events of a Graph + - + - Namespace containing expression classes which provide the SPARQL built-in numeric functions + Disposes of the Triple Store + Derived classes must override this to implement required disposal actions - + - Represents the SPARQL ABS() Function + Abstract Base Class for Blank Nodes - + - Creates a new SPARQL ABS() Function + Internal Only Constructor for Blank Nodes - Argument Expression + Graph this Node belongs to - + - Gets the Functor of this Expression + Internal Only constructor for Blank Nodes + Graph this Node belongs to + Custom Node ID to use - + - Gets the String representation of this Expression + Internal Only constructor for Blank Nodes - + Node Factory from which to obtain a Node ID - + - Transforms the Expression using the given Transformer + Unparameterized Constructor for deserialization usage only - Expression Transformer - - + - Represents the SPARQL CEIL() Function + Deserialization Constructor + Serialization Information + Streaming Context - + - Creates a new SPARQL CEIL() Function + Returns the Internal Blank Node ID this Node has in the Graph - Argument Expression + + Usually automatically assigned and of the form autosXXX where XXX is some number. If an RDF document contains a Blank Node ID of this form that clashes with an existing auto-assigned ID it will be automatically remapped by the Graph using the BlankNodeMapper when it is created. + - + - Gets the Functor of this Expression + Indicates whether this Blank Node had its ID assigned for it by the Graph - + - Gets the String representation of this Expression + Implementation of Equals for Blank Nodes + Object to compare with the Blank Node + + Blank Nodes are considered equal if their internal IDs match precisely and they originate from the same Graph + - + - Transforms the Expression using the given Transformer + Implementation of Equals for Blank Nodes - Expression Transformer + Object to compare with the Blank Node + + Blank Nodes are considered equal if their internal IDs match precisely and they originate from the same Graph + - + - Represents the SPARQL FLOOR() Function + Determines whether this Node is equal to another + Other Blank Node + - + - Creates a new SPARQL FLOOR() Function + Determines whether this Node is equal to a Graph Literal Node (should always be false) - Argument Expression + Graph Literal Node + - + - Gets the Functor of this Expression + Determines whether this Node is equal to a Literal Node (should always be false) + Literal Node + - + - Gets the String representation of this Expression + Determines whether this Node is equal to a URI Node (should always be false) + URI Node - + - Transforms the Expression using the given Transformer + Determines whether this Node is equal to a Variable Node (should always be false) - Expression Transformer + Variable Node - + - Represents the SPARQL RAND() Function + Determines whether this Node is equal to a Blank Node + Blank Node + - + - Creates a new SPARQL RAND() Function + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Evaluates the expression + Returns an Integer indicating the Ordering of this Node compared to another Node - Evaluation Context - Binding ID + Node to test against - + - Gets the Variables used in this Expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the Type of this Expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the Arguments of this Expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets whether an expression can safely be evaluated in parallel + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the Functor of this Expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the String representation of this Expression + Returns a string representation of this Blank Node in QName form - + - Transforms the Expression using the given Transformer + Gets the data for serialization - Expression Transformer - + Serialization Information + Streaming Context - + - Represents the SPARQL ROUND() Function + Reads the data for XML deserialization + XML Reader - + - Creates a new SPARQL ROUND() Function + Writes the data for XML serialization - Argument Expression + XML Writer - + - Gets the Functor of this Expression + Throws an error as a Blank Node cannot be cast to a String + - + - Gets the String representation of this Expression + Throws an error as a Blank Node cannot be cast to an integer - + - Transforms the Expression using the given Transformer + Throws an error as a Blank Node cannot be cast to a decimal - Expression Transformer - + - Namespace containing expression classes which provide the SPARQL built-in functions pertaining to sets (IN and NOT IN) + Throws an error as a Blank Node cannot be cast to a float + - + - Abstract base class for SPARQL Functions which operate on Sets + Throws an error as a Blank Node cannot be cast to a double + - + - Variable Expression Term that the Set function applies to + Throws an error as a Blank Node cannot be cast to a boolean + - + - Set that is used in the function + Throws an error as a Blank Node cannot be cast to a date time + - + - Creates a new SPARQL Set function + Throws an error as a Blank Node cannot be cast to a date time offset - Expression - Set + - + - Gets the value of the function as evaluated for a given Binding in the given Context + Throws an error as a Blank Node cannot be case to a time span - SPARQL Evaluation Context - Binding ID - + - Gets the Variable the function applies to + Gets the URI of the datatype this valued node represents as a String - + - Gets the Type of the Expression + Gets the Numeric Type of the Node - + - Gets the Functor of the Expression + Class for representing Blank RDF Nodes - + - Gets the Arguments of the Exception + Internal Only Constructor for Blank Nodes + Graph this Node belongs to - + - Gets whether an expression can safely be evaluated in parallel + Internal Only constructor for Blank Nodes + Graph this Node belongs to + Custom Node ID to use - + - Gets the String representation of the Expression + Internal Only constructor for Blank Nodes - + Node Factory from which to obtain a Node ID - + - Transforms the Expression using the given Transformer + Constructor for deserialization usage only - Expression Transformer - - + - Class representing the SPARQL IN set function + Deserialization Constructor + Serialization Information + Streaming Context - + - Creates a new SPARQL IN function + Implementation of Compare To for Blank Nodes - Expression - Set + Blank Node to Compare To + + + Simply invokes the more general implementation of this method + - + - Evaluates the expression + Determines whether this Node is equal to a Blank Node - Evaluation Context - Binding ID + Blank Node - + - Gets the Functor of the Expression + Mapper class which creates Blank Node IDs and ensures that auto-assigned and user specified IDs don't collide - + - Gets the String representation of the Expression + Creates a new Blank Node Mapper - - + - Transforms the Expression using the given Transformer + Creates a new Blank Node Mapper that uses a custom Prefix - Expression Transformer - + Prefix - + - Class representing the SPARQL NOT IN set function + Gets the next available auto-assigned Blank Node ID + - + - Creates a new SPARQL NOT IN function + Checks that an ID can be used as a Blank Node ID remapping it to another ID if necessary - Expression - Set + ID to be checked + + If the ID is not known it is added to the ID maps. If the ID is known but is user-assigned then this can be used fine. If the ID is known and was auto-assigned then it has to be remapped to a different ID. + - + - Evaluates the expression + Mapper class which remaps Blank Node IDs which aren't valid as-is in a given serialization to a new ID - Evaluation Context - Binding ID - + + This also has to take care of the fact that it's possible that these remapped IDs then collide with existing valid IDs in which case these also have to be remapped + - + - Gets the Functor of the Expression + Creates a new Blank Node ID mapper + Function which determines whether IDs are valid or not - + - Gets the String representation of the Expression + Takes a ID, validates it and returns either the ID or an appropriate remapped ID + ID to map - + - Transforms the Expression using the given Transformer + Internal Helper function which generates the new IDs - Expression Transformer - + - Class representing the SPARQL COALESCE() function + Records Blank Node assigments - + - Creates a new COALESCE function with the given expressions as its arguments + Creates a new Blank Node ID Assigment Record - Argument expressions + ID to assign + Was the ID auto-assigned - + - Gets the value of the expression as evaluated in the given Context for the given Binding ID + Assigned ID - Evaluation Context - Binding ID - - + - Gets the Variables used in all the argument expressions of this function + Whether the ID is auto-assigned - + - Gets the String representation of the function + Returns whether a given Object is equal to this Blank Node ID assignment + Object to test - - - Gets the Type of the Expression - - - + - Gets the Functor of the Expression + Class for representing Triple Stores which are collections of RDF Graphs + + The 'Disk Demand' Triple Store is a Triple Store which automatically retrieves Graphs from the Disk based on the URIs of Graphs that you ask it for when those URIs are file:/// URIs + - + - Gets the Arguments of the Expression + Creates a new Disk Demand Triple Store - + - Gets whether an expression can safely be evaluated in parallel + Static Helper class containing standard implementations of Equality between various Node types - + - Transforms the Expression using the given Transformer + Determines whether two URIs are equal - Expression Transformer + First URI + Second URI + + Unlike the Equals method provided by the Uri class by default this takes into account Fragment IDs which are essential for checking URI equality in RDF + - + - Class representing the SPARQL IF function + Determines whether two URIs are equal + First URI Node + Second URI Node + - + - Creates a new IF function + Determines whether two Literals are equal - Condition - Expression to evaluate if condition evaluates to true - Expression to evalaute if condition evaluates to false/error + First Literal + Second Literal + - + - Gets the value of the expression as evaluated in the given Context for the given Binding ID + Determines whether two Blank Nodes are equal - SPARQL Evaluation Context - Binding ID + First Blank Node + Second Blank Node - + - Gets the enumeration of variables used in the expression + Determines whether two Graph Literals are equal + First Blank Node + Second Blank Node + - + - Gets the String representation of the function + Determines whether two Variable Nodes are equal + First Variable Node + Second Variable Node - + - Gets the Expression Type + Static Helper class containing standard implementations of Comparison between various Node types - + - Gets the Functor for the Expression + Compares two URIs + First URI + Second URI + - + - Gets the Arguments of the Expression + Compares two URI Nodes + First URI Node + Second URI Node + - + - Gets whether an expression can safely be evaluated in parallel + Compares two Literal Nodes using global default comparison options where applicable + First Literal Node + Second Literal Node + - + - Transforms the Expression using the given Transformer + Compares two Literal Nodes - Expression Transformer + First Literal Node + Second Literal Node + Culture to use for lexical string comparisons where more natural comparisons are not possible/applicable + String Comparison options used for lexical string comparisons where more natural comparisons are not possible/applicable - + - - Namespace containing expression classes which model functions in SPARQL expressions - + Compares two Blank Nodes + First Blank Node + Second Blank Node + - + - Namespace containing expression classes which provide the ARQ function library + Compares two Graph Literals + First Graph Literal + Second Graph Literal + - + - Represents the ARQ afn:bnode() function + Compares two Variable Nodes + First Variable Node + Second Variable Node + - + - Creates a new ARQ afn:bnode() function + Equality comparer that compares URIs - Expression - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Compares two URIs - Evaluation Context - Binding ID + URI + URI - + - Gets the String representation of the function + Determines whether two URIs are equal + URI + URI - + - Gets the Type of the Expression + Gets the Hash Code for a URI + URI + - + - Gets the Functor of the Expression + Provides useful Extension Methods for use elsewhere in the Library - + - Transforms the Expression using the given Transformer + Takes a single item and generates an IEnumerable containing only it - Expression Transformer + Type of the enumerable + Item to wrap in an IEnumerable + + This method taken from Stack Overflow - see here + - + - Gets whether the expression can be parallelized + Determines whether the contents of two enumerables are disjoint + Type Parameter + An Enumerable + Another Enumerable + - + - Gets the arguments of the expression + Gets the Subset of Triples from an existing Enumerable that have a given Subject + Enumerable of Triples + Subject to match + - + - Gets the variables in the expression + Gets the Subset of Triples from an existing Enumerable that have a given Predicate + Enumerable of Triples + Predicate to match + - + - Represents the ARQ e() function + Gets the Subset of Triples from an existing Enumerable that have a given Object + Enumerable of Triples + Object to match + - + - Evaluates the function + Gets the Blank Nodes - Context - Binding ID + Nodes - + - Gets the String representation of the function + Gets the Graph Literal Nodes + Nodes - + - Gets the Functor of the Expression + Gets the Literal Nodes + Nodes + - + - Gets the Variables used + Gets the URI Nodes + Nodes + - + - Gets the type of the expression + Gets the Variable Nodes + Nodes + - + - Gets the arguments of the expression + Gets an Enhanced Hash Code for a Uri + Uri to get Hash Code for + + + The .Net Uri class Hash Code ignores the Fragment ID when computing the Hash Code which means that URIs with the same basic URI but different Fragment IDs have identical Hash Codes. This is perfectly acceptable and sensible behaviour for normal URI usage since Fragment IDs are only relevant to the Client and not the Server. But in the case of URIs in RDF the Fragment ID is significant and so we need in some circumstances to compute a Hash Code which includes this information. + - + - Gets whether an expression can safely be evaluated in parallel + Gets an SHA256 Hash for a URI + URI to get Hash Code for + - + - Applies a transformer to the expressions arguments + Gets a SHA256 Hash for a String - Transformer + String to hash - + - Represents the ARQ afn:localname() function + Asserts a new Triple in the Graph + Graph to assert in + Subject + Predicate + Object + Handy method which means you can assert a Triple by specifying the Subject, Predicate and Object without having to explicity declare a new Triple - + - Creates a new ARQ Local Name function + Retracts a Triple from the Graph - Expression + Graph to retract from + Subject + Predicate + Object + Handy method which means you can retract a Triple by specifying the Subject, Predicate and Object without having to explicity declare a new Triple - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Asserts a list as a RDF collection and returns the node that represents the root of the RDF collection - Evaluation Context - Binding ID - + Type of Objects + Graph to assert in + Objects to place in the collection + Mapping from Object Type to INode + + Either the blank node which is the root of the collection or rdf:nil for empty collections + - + - Gets the String representation of the function + Asserts a list as a RDF collection using an existing node as the list root - + Type of Objects + Graph to assert in + Root Node for List + Objects to place in the collection + Mapping from Object Type to INode - + - Gets the Type of the Expression + Asserts a list as a RDF collection and returns the node that represents the root of the RDF collection + Graph to assert in + Objects to place in the collection + + Either the blank node which is the root of the collection or rdf:nil for empty collections + - + - Gets the Functor of the Expression + Asserts a list as a RDF collection using an existing node as the list root + Graph to assert in + Root Node for List + Objects to place in the collection - + - Transforms the Expression using the given Transformer + Gets all the Triples that make up a list (aka a RDF collection) - Expression Transformer - + Graph + Root Node for List + Triples that make up the List - + - Represents the ARQ max() function + Gets all the Nodes which are the items of the list (aka the RDF collection) + Graph + Root Node for List + Nodes that are the items in the list - + - Creates a new ARQ max() function + Gets all the Nodes which are the intermediate nodes in the list (aka the RDF collection). These represents the nodes used to link the actual items of the list together rather than the actual items of the list - First Argument - Second Argument + Graph + Root Node for List + Nodes that are the intermediate nodes of the list - + - Gets the numeric value of the function in the given Evaluation Context for the given Binding ID + Gets whether a given Node is valid as a List Root, this does not guarantee that the list itself is valid simply that the Node appears to be the root of a list - Evaluation Context - Binding ID + Node to check + Graph + + We consider a node to be a list root if there are no incoming rdf:rest triples and only a single outgoing rdf:first triple + - + - Gets the String representation of the function + Gets the Node that represents the last item in the list + Graph + Root Node for List - + - Gets the Type of the Expression + Retracts a List (aka a RDF collection) + Graph + Root Node for List - + - Gets the Functor of the Expression + Adds new items to the end of a list (aka a RDF collection) + Type of Objects + Graph to assert in + Root Node for the list + Objects to add to the collection + Mapping from Object Type to INode - + - Transforms the Expression using the given Transformer + Adds new items to the end of a list (aka a RDF collection) - Expression Transformer - + Graph to assert in + Root Node for the list + Objects to add to the collection - + - Represents the ARQ min() function + Removes the given items from a list (aka a RDF collection), if an item occurs multiple times in the list all occurrences will be removed + Type of Objects + Graph to retract from + Root Node for the list + Objects to remove from the collection + Mapping from Object Type to INode - + - Creates a new ARQ min() function + Removes the given items from a list (aka a RDF collection), if an item occurs multiple times in the list all occurrences will be removed - First Argument - Second Argument + Graph to retract from + Root Node for the list + Objects to remove from the collection - + - Gets the numeric value of the function in the given Evaluation Context for the given Binding ID + Copies a Node to the target Graph - Evaluation Context - Binding ID + Node to copy + Target Graph + Shorthand for the Tools.CopyNode() method - + - Gets the String representation of the function + Copies a Node to the target Graph + Node to copy + Target Graph + Indicates whether Nodes should preserve the Graph Uri of the Graph they originated from + Shorthand for the Tools.CopyNode() method - + - Gets the Type of the Expression + Copies a Triple to the target Graph + Triple to copy + Target Graph + + Shorthand for the Tools.CopyTriple() method - + - Gets the Functor of the Expression + Copies a Triple to the target Graph + Triple to copy + Target Graph + Indicates whether Nodes should preserve the Graph Uri of the Graph they originated from + + Shorthand for the Tools.CopyTriple() method - + - Transforms the Expression using the given Transformer + Copies a Triple from one Graph mapping Nodes as appropriate - Expression Transformer + Triple to copy + TargetGraph + Mapping of Nodes - + - Represents the ARQ namespace() function + Gets either the String representation of the Object or the Empty String if the object is null + Object + - + - Creates a new ARQ Namespace function + Gets either the String representation of the URI or the Empty String if the URI is null - Expression + URI + - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Turns a string into a safe URI - Evaluation Context - Binding ID - + String + Either null if the string is null/empty or a URI otherwise - + - Gets the String representation of the function + Gets the String representation of the URI formatted using the given Formatter + URI + URI Formatter - + - Gets the Type of the Expression + Appends a String to the StringBuilder with an indent of spaces + String Builder + String to append + Indent - + - Gets the Functor of the Expression + Appends a String to the StringBuilder with an indent of spaces + String Builder + String to append + Indent + + Strings containing new lines are split over multiple lines + - + - Transforms the Expression using the given Transformer + Takes a String and escapes any backslashes in it which are not followed by a valid escape character - Expression Transformer + String value + Valid Escape Characters i.e. characters which may follow a backslash - + - Represents the ARQ afn:now() function + Determines whether a string is ASCII + + - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Determines whether a String is fully escaped - Evaluation Context - Binding ID - - Returns a constant Literal Node which is a Date Time typed Literal - + String value + Valid Escape Characters i.e. characters which may follow a backslash + Characters which must be escaped i.e. must be preceded by a backslash + - + - Gets the Type of the Expression + Escapes all occurrences of a given character in a String + String + Character to escape + + + Ignores all existing escapes (indicated by a \) and so avoids double escaping as far as possible + - + - Gets the Functor of the Expression + Escapes all occurrences of a given character in a String using the given escape character + String + Character to escape + Character to escape as + + + Ignores all existing escapes (indicated by a \) and so avoids double escaping as far as possible + - + - Gets whether an expression can safely be evaluated in parallel + Provides useful Extension Methods for working with Graphs - + - Gets the String representation of the function + Turns a Graph into a Triple Store + Graph - + - Gets the variables in the expression + Executes a SPARQL Query on a Graph + Graph to query + SPARQL Query + - + - Gets the arguments of the expression + Executes a SPARQL Query on a Graph handling the results using the handlers provided + Graph to query + RDF Handler + SPARQL Results Handler + SPARQL Query - + - Returns the expression as there are no arguments to be transformed + Executes a SPARQL Query on a Graph - Expression Transformer + Graph to query + SPARQL Query - - - Represents the ARQ pi() function - - - + - Creates a new ARQ Pi function + Executes a SPARQL Query on a Graph handling the results using the handlers provided + Graph to query + RDF Handler + SPARQL Results Handler + SPARQL Query - + - Evaluates the expression + Executes a SPARQL Query on a Graph - Evaluation Context - Binding ID + Graph to query + SPARQL Query - + - Gets the String representation of the function + Executes a SPARQL Query on a Graph handling the results using the handlers provided - + Graph to query + RDF Handler + SPARQL Results Handler + SPARQL Query - + - Gets the Functor of the Expression + Loads RDF data from a file into a Graph + Graph to load into + File to load from + Parser to use + + + This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace + + + Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If a File URI is assigned it will always be an absolute URI for the file + + - + - Gets the variables in the expression + Loads RDF data from a file into a Graph + Graph to load into + File to load from + + + This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace + + + Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If a File URI is assigned it will always be an absolute URI for the file + + - + - Gets whether an expression can safely be evaluated in parallel + Loads RDF data from a URI into a Graph + Graph to load into + URI to load from + Parser to use + + + This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + - + - Gets the type of the expression + Loads RDF data from a URI into a Graph + Graph to load into + URI to load from + + + This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + - + - Gets the arguments of the expression + Loads RDF data from a String into a Graph + Graph to load into + Data to load + Parser to use + + This is just a shortcut to using the static Parse() methods from the StringParser class located in the Parsing namespace + - + - Returns the expression as there are no arguments to be transformed + Loads RDF data from a String into a Graph - Expression Transformer - + Graph to load into + Data to load + + This is just a shortcut to using the static Parse() methods from the StringParser class located in the Parsing namespace + - + - Represents the ARQ afn:sha1sum() function + Loads RDF data from an Embedded Resource into a Graph + Graph to load into + Assembly qualified name of the resource to load from + + This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace + - + - Creates a new ARQ SHA1 Sum function + Loads RDF data from an Embedded Resource into a Graph - Expression + Graph to load into + Assembly qualified name of the resource to load from + Parser to use + + This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace + - + - Gets the String representation of the function + Saves a Graph to a File - + Graph to save + File to save to + Writer to use - + - Gets the Functor of the Expression + Saves a Graph to a File + Graph to save + File to save to + Writer to use - + - Transforms the Expression using the given Transformer + Saves a Graph to a File - Expression Transformer - + Graph to save + File to save to - + - Represents the ARQ afn:strjoin() function which is a string concatenation function with a separator + Saves a Graph to a stream + Graph to save + Stream to save to + Writer to use - + - Creates a new ARQ String Join function + Saves a Graph to a stream - Separator Expression - Expressions to concatentate + Graph to save + Stream to save to + Writer to use - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Save a graph to a stream, determining the type of writer to use by the output file name - Evaluation Context - Binding ID - + The graph to write + The output file name to use to determine the output format to write + The stream to write to - + - Gets the Variables used in the function + Provides useful Extension Methods for working with Triple Stores - + - Gets the String representation of the function + Loads an RDF dataset from a file into a Triple Store - + Triple Store to load into + File to load from + Parser to use + + This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace + - + - Gets the Type of the Expression + Loads an RDF dataset from a file into a Triple Store + Triple Store to load into + File to load from + + This is just a shortcut to using the static Load() methods from the FileLoader class located in the Parsing namespace + - + - Gets the Functor of the Expression + Loads an RDF dataset from a URI into a Triple Store + Triple Store to load into + URI to load from + Parser to use + + This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace + - + - Gets the Arguments of the Expression + Loads an RDF dataset from a URI into a Triple Store + Triple Store to load into + URI to load from + + This is just a shortcut to using the static Load() methods from the UriLoader class located in the Parsing namespace + - + - Gets whether an expression can safely be evaluated in parallel + Loads an RDF dataset from a String into a Triple Store + Triple Store to load into + Data to load + Parser to use + + This is just a shortcut to using the static ParseDataset() methods from the StringParser class located in the Parsing namespace + - + - Transforms the Expression using the given Transformer + Loads an RDF dataset from a String into a Triple Store - Expression Transformer - + Triple Store to load into + Data to load + + This is just a shortcut to using the static ParseDataset() methods from the StringParser class located in the Parsing namespace + - + - Represents the ARQ afn:substring() function which is a sub-string with Java semantics + Loads an RDF dataset from an Embedded Resource into a Triple Store + Triple Store to load into + Assembly Qualified Name of the Embedded Resource to load from + Parser to use + + This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace + - + - Creates a new ARQ substring function + Loads an RDF dataset from an Embedded Resource into a Triple Store - Expression - Expression giving an index at which to start the substring + Triple Store to load into + Assembly Qualified Name of the Embedded Resource to load from + + This is just a shortcut to using the static Load() methods from the EmbeddedResourceLoader class located in the Parsing namespace + - + - Creates a new ARQ substring function + Saves a Triple Store to a file - Expression - Expression giving an index at which to start the substring - Expression giving an index at which to end the substring + Triple Store to save + File to save to + Writer to use - + - Gets the value of the function in the given Evaluation Context for the given Binding ID + Saves a Triple Store to a file - Evaluation Context - Binding ID - + Triple Store to save + File to save to - + - Gets the Variables used in the function + Provides extension methods for converting primitive types into appropriately typed Literal Nodes - + - Gets the String representation of the function + Creates a new Boolean typed literal - + Boolean + Node Factory to use for Node creation + Literal representing the boolean + Thrown if the Factory argument is null - + - Gets the Type of the Expression + Creates a new Byte typed literal + Byte + Node Factory to use for Node creation + Literal representing the byte + + Byte in .Net is actually equivalent to Unsigned Byte in XML Schema so depending on the value of the Byte the type will either be xsd:byte if it fits or xsd:usignedByte + - + - Gets the Functor of the Expression + Creates a new Byte typed literal + Byte + Node Factory to use for Node creation + Literal representing the signed bytes + + SByte in .Net is directly equivalent to Byte in XML Schema so the type will always be xsd:byte + - + - Gets the Arguments of the Expression + Creates a new Date Time typed literal + Date Time + Node Factory to use for Node creation + Literal representing the date time + Thrown if the Factory argument is null - + - Gets whether an expression can safely be evaluated in parallel + Creates a new Date Time typed literal + Date Time + Node Factory to use for Node creation + Whether to preserve precisely i.e. include fractional seconds + Literal representing the date time + Thrown if the Factory argument is null - + - Transforms the Expression using the given Transformer + Creates a new Date Time typed literal - Expression Transformer - + Date Time + Node Factory to use for Node creation + Literal representing the date time + Thrown if the Factory argument is null - + - Namespace containing expression classes which provide the Leviathan function library + Creates a new Date Time typed literal + Date Time + Node Factory to use for Node creation + Whether to preserve precisely i.e. include fractional seconds + Literal representing the date time + Thrown if the Factory argument is null - + - Namespace containing expression classes which provide the hash functions from the Leviathan function library + Creates a new Date typed literal + Date Time + Node Factory to use for Node creation + + Thrown if the Factory argument is null - + - Represents the Leviathan lfn:md5hash() function + Creates a new Date typed literal + Date Time + Node Factory to use for Node creation + + Thrown if the Factory argument is null - + - Creates a new Leviathan MD5 Hash function + Creates a new Time typed literal - Expression + Date Time + Node Factory to use for Node creation + Literal representing the time + Thrown if the Factory argument is null - + - Gets the String representation of the function + Creates a new Time typed literal - + Date Time + Node Factory to use for Node creation + Whether to preserve precisely i.e. include fractional seconds + Literal representing the time + Thrown if the Factory argument is null - + - Gets the Functor of the Expression + Creates a new duration typed literal + Time Span + Node Factory to use for Node creation + Literal representing the time span - + - Transforms the Expression using the given Transformer + Creates a new Time typed literal - Expression Transformer - + Date Time + Node Factory to use for Node creation + Literal representing the time + Thrown if the Factory argument is null - + - Represents the Leviathan lfn:sha256hash() function + Creates a new Time typed literal + Date Time + Node Factory to use for Node creation + Whether to preserve precisely i.e. include fractional seconds + Literal representing the time + Thrown if the Factory argument is null - + - Creates a new Leviathan SHA 256 Hash function + Creates a new Decimal typed literal - Expression + Decimal + Node Factory to use for Node creation + Literal representing the decimal + Thrown if the Factory argument is null - + - Gets the String representation of the function + Creates a new Double typed literal - + Double + Node Factory to use for Node creation + Literal representing the double + Thrown if the Factory argument is null - + - Gets the Functor of the Expression + Creates a new Float typed literal + Float + Node Factory to use for Node creation + Literal representing the float + Thrown if the Factory argument is null - + - Transforms the Expression using the given Transformer + Creates a new Integer typed literal - Expression Transformer - + Integer + Node Factory to use for Node creation + Literal representing the short + Thrown if the Factory argument is null - + - Namespace containing expression classes which provide the numeric functions from the Leviathan function library + Creates a new Integer typed literal + Integer + Node Factory to use for Node creation + Literal representing the integer + Thrown if the Factory argument is null - + - Namespace containing expression classes which provide the trigonometric functions from the Leviathan function library + Creates a new Integer typed literal + Integer + Node Factory to use for Node creation + Literal representing the integer + Thrown if the Factory argument is null - + - Abstract Base Class for Unary Trigonometric Functions in the Leviathan Function Library + Creates a new String typed literal + String + Node Factory to use for Node creation + Literal representing the string + Thrown if the Graph/String argument is null - + - Trigonometric function + Class for representing RDF Graphs + Safe for multi-threaded read-only access but unsafe if one/more threads may modify the Graph by using the Assert, Retract or Merge methods - + - Creates a new Unary Trigonometric Function + Creates a new instance of a Graph - Expression - + - Creates a new Unary Trigonometric Function + Creates a new instance of a Graph with an optionally empty Namespace Map - Expression - Trigonometric Function + Whether the Namespace Map should be empty - + - Evaluates the expression + Creates a new instance of a Graph using the given Triple Collection - Evaluation Context - Binding ID - + Triple Collection - + - Gets the expression type + Creates a new instance of a Graph using the given Triple Collection and an optionally empty Namespace Map + Triple Collection + Whether the Namespace Map should be empty - + - Gets the string representation of the Function + Deserialization Constructor - + Serialization Information + Streaming Context - + - Represents the Leviathan lfn:cosec() or lfn:cosec-1 function + Asserts a Triple in the Graph + The Triple to add to the Graph - + - Creates a new Leviathan Cosecant Function + Asserts a List of Triples in the graph - Expression + List of Triples in the form of an IEnumerable - + - Creates a new Leviathan Cosecant Function + Retracts a Triple from the Graph - Expression - Whether this should be the inverse function + Triple to Retract + Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted - + - Gets the String representation of the function + Retracts a enumeration of Triples from the graph - + Enumeration of Triples to retract - + - Gets the Functor of the Expression + Returns the UriNode with the given Uri if it exists + The Uri of the Node to select + Either the UriNode Or null if no Node with the given Uri exists - + - Transforms the Expression using the given Transformer + Returns the UriNode with the given QName if it exists - Expression Transformer + The QName of the Node to select - - - Represents the Leviathan lfn:cos() or lfn:cos-1 function - - - + - Creates a new Leviathan Cosine Function + Returns the LiteralNode with the given Value if it exists - Expression + The literal value of the Node to select + Either the LiteralNode Or null if no Node with the given Value exists + The LiteralNode in the Graph must have no Language or DataType set - + - Creates a new Leviathan Cosine Function + Returns the LiteralNode with the given Value in the given Language if it exists - Expression - Whether this should be the inverse function + The literal value of the Node to select + The Language Specifier for the Node to select + Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists - + - Gets the String representation of the function + Returns the LiteralNode with the given Value and given Data Type if it exists - + The literal value of the Node to select + The Uri for the Data Type of the Literal to select + Either the LiteralNode Or null if no Node with the given Value and Data Type exists - + - Gets the Functor of the Expression + Returns the Blank Node with the given Identifier + The Identifier of the Blank Node to select + Either the Blank Node or null if no Node with the given Identifier exists - + - Transforms the Expression using the given Transformer + Gets all the Triples involving the given Node - Expression Transformer - + The Node to find Triples involving + Zero/More Triples - + - Represents the Leviathan lfn:cot() or lfn:cot-1 function + Gets all the Triples involving the given Uri + The Uri to find Triples involving + Zero/More Triples - + - Creates a new Leviathan Cotangent Function + Gets all the Triples with the given Node as the Subject - Expression + The Node to find Triples with it as the Subject + Zero/More Triples - + - Creates a new Leviathan Cotangent Function + Gets all the Triples with the given Uri as the Subject - Expression - Whether this should be the inverse function + The Uri to find Triples with it as the Subject + Zero/More Triples - + - Gets the String representation of the function + Gets all the Triples with the given Node as the Predicate + The Node to find Triples with it as the Predicate - + - Gets the Functor of the Expression + Gets all the Triples with the given Uri as the Predicate + The Uri to find Triples with it as the Predicate + Zero/More Triples - + - Transforms the Expression using the given Transformer + Gets all the Triples with the given Node as the Object - Expression Transformer + The Node to find Triples with it as the Object - + - Represents the Leviathan lfn:degrees-to-radians() function + Gets all the Triples with the given Uri as the Object + The Uri to find Triples with it as the Object + Zero/More Triples - + - Creates a new Leviathan Degrees to Radians Function + Selects all Triples with the given Subject and Predicate - Expression + Subject + Predicate + - + - Evaluates the expression + Selects all Triples with the given Subject and Object - Evaluation Context - Binding ID + Subject + Object - + - Gets the String representation of the function + Selects all Triples with the given Predicate and Object + Predicate + Object - + - Gets the Functor of the Expression + Class for representing RDF Graphs when you don't want Indexing + + Gives better load performance but poorer lookup performance + - + - Gets the Type of this expression + Creates a new Graph which is not indexed - + - Transforms the Expression using the given Transformer + Wrapper class for Graph Collections - Expression Transformer - - + - Represents the Leviathan lfn:radians-to-degrees() function + Internal Constant used as the Hash Code for the default graph - + - Creates a new Leviathan Radians to Degrees Function + Dictionary of Graph Uri Enhanced Hash Codes to Graphs - Expression + See GetEnhancedHashCode() - + - Evaluates the expression + Creates a new Graph Collection - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Checks whether the Graph with the given Uri exists in this Graph Collection + Graph Uri to test - + - Gets the Functor of the Expression + Adds a Graph to the Collection + Graph to add + Sets whether the Graph should be merged with an existing Graph of the same Uri if present + Throws an RDF Exception if the Graph has no Base Uri or if the Graph already exists in the Collection and the parameter was not set to true - + - Gets the type of the expression + Removes a Graph from the Collection + Uri of the Graph to remove - + - Transforms the Expression using the given Transformer + Gets the number of Graphs in the Collection - Expression Transformer - - + - Represents the Leviathan lfn:sec() or lfn:sec-1 function + Provides access to the Graph URIs of Graphs in the Collection - + - Creates a new Leviathan Secant Function + Gets a Graph from the Collection - Expression + Graph Uri + - + - Creates a new Leviathan Secant Function + Gets the Enumerator for the Collection - Expression - Whether this should be the inverse function + - + - Gets the String representation of the function + Gets the Enumerator for this Collection - + - Gets the Functor of the Expression + Disposes of the Graph Collection + Invokes the Dispose() method of all Graphs contained in the Collection - + - Transforms the Expression using the given Transformer + Thread Safe decorator around a Graph collection - Expression Transformer - + + Dependings on your platform this either provides MRSW concurrency via a or exclusive access concurrency via a + - + - Represents the Leviathan lfn:sin() or lfn:sin-1 function + Creates a new Thread Safe decorator around the default - + - Creates a new Leviathan Sine Function + Creates a new Thread Safe decorator around the supplied graph collection - Expression + Graph Collection - + - Creates a new Leviathan Sine Function + Enters the write lock - Expression - Whether this should be the inverse function - + - Gets the String representation of the function + Exits the write lock - - + - Gets the Functor of the Expression + Enters the read lock - + - Transforms the Expression using the given Transformer + Exits the read lock - Expression Transformer + + + + Checks whether the Graph with the given Uri exists in this Graph Collection + + Graph Uri to test - + - Represents the Leviathan lfn:tan() or lfn:tan-1 function + Adds a Graph to the Collection + Graph to add + Sets whether the Graph should be merged with an existing Graph of the same Uri if present + Throws an RDF Exception if the Graph has no Base Uri or if the Graph already exists in the Collection and the parameter was not set to true - + - Creates a new Leviathan Tangent Function + Removes a Graph from the Collection - Expression + Uri of the Graph to remove - + - Creates a new Leviathan Tangent Function + Gets the number of Graphs in the Collection - Expression - Whether this should be the inverse function - + - Gets the String representation of the function + Gets the Enumerator for the Collection - + - Gets the Functor of the Expression + Provides access to the Graph URIs of Graphs in the Collection - + - Transforms the Expression using the given Transformer + Gets a Graph from the Collection - Expression Transformer + Graph Uri - + - Represents the Leviathan lfn:cube() function + Disposes of the Graph Collection + Invokes the Dispose() method of all Graphs contained in the Collection - + - Creates a new Leviathan Cube Function + Implementation of a Graph Difference algorithm for RDF Graphs - Expression + + + This algorithm is broadly based upon the methodology fror computing differences in RDF Graphs described in the RDFSync paper by Tummarello et al. This is an implementation purely of a difference algorithm and not the synchronisation aspects described in their paper. Main difference between their algorithm and mine is that mine does not make the input Graphs lean as it is concerned with showing the raw differences between the Graphs and does not concern itself with whether the differences may be semantically irrelevant. + + + To understand this consider the following Graphs: + +

Graph A

+ + _:autos1 rdfs:label "Rob" . + +

Graph B

+ + _:autos1 rdfs:label "Rob" . + _:autos2 rdfs:label "Rob" . + + + Given these Graphs computing the Graph Difference between A and B would report an Added MSG (Minimal Spanning Graph) when in fact the 2nd Graph is non-lean and could be reduced to the same as the 1st Graph + +
- + - Evaluates the expression + Calculates the Difference between the two Graphs i.e. the changes required to get from the 1st Graph to the 2nd Graph - Evaluation Context - Binding ID + First Graph + Second Graph - + - Gets the String representation of the function + Computes MSGs for a Graph - + Graph + Triples that need assigning to MSGs + MSGs list to populate - + - Gets the Type of this expression + Represents the Differences between 2 Graphs + + + The Diff represents the Difference between the 2 Graphs at the time the Difference was calculated - if the Graphs subsequently change then the Diff must be recalculated + + - + - Gets the Functor of the Expression + Gets whether the Graphs were equal at the time the Diff was calculated - + - Transforms the Expression using the given Transformer + Gets whether the Graphs are different sizes, different sized graphs are by definition non-equal - Expression Transformer - - + - Represents the Leviathan lfn:e() function + Provides the mapping from Blank Nodes in 1 Graph to Blank Nodes in another + + + In the case of Equal Graphs this will be a complete mapping, if the Graphs are different then it will be an empty/partial mapping depending on whether Blank Nodes can be mapped from one Graph to another or not + + - + - Creates a new Leviathan E Function + Gets the Ground Triples (i.e. no Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph - Expression - + - Evaluates the expression + Gets the Ground Triples (i.e. no Blank Nodes) that must be removed from the 1st Graph to get the 2nd Graph - Evaluation Context - - - + - Gets the String representation of the function + Gets the MSGs (Minimal Spanning Graphs i.e. sets of Triples sharing common Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph - - + - Gets the Functor of the Expression + Gets the MSGs (Minimal Spanning Graphs i.e. sets of Triples sharing common Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph - + - Gets the type of the expression + A Comparer for Graphs which compares based on number of Triples + + Used internally in computing Graph Differences but made a public Graph as it may occasionally come in useful + - + - Transforms the Expression using the given Transformer + Compares Graphs based on their number of Triples - Expression Transformer + Graph + Graph - + - Represents the Leviathan lfn:factorial() function + Abstract Base Class for Graph Literal Nodes - + - Creates a new Leviathan Factorial Function + Creates a new Graph Literal Node in the given Graph which represents the given Subgraph - Expression + Graph this node is in + Sub Graph this node represents - + - Evaluates the expression + Creates a new Graph Literal Node whose value is an empty Subgraph - Evaluation Context - Binding ID - + Graph this node is in - + - Gets the String representation of the function + Deserializer Constructor - + Serialization Information + Streaming Context - + - Gets the Functor of the Expression + Deserialization Only constructor - + - Gets the type of the expression + Gets the Subgraph that this Node represents - + - Transforms the Expression using the given Transformer + Implementation of the Equals method for Graph Literal Nodes. Graph Literals are considered Equal if their respective Subgraphs are equal - Expression Transformer + Object to compare the Node with - + - Represents the Leviathan lfn:log() function + Implementation of the Equals method for Graph Literal Nodes. Graph Literals are considered Equal if their respective Subgraphs are equal + Object to compare the Node with + - + - Creates a new Leviathan Log Function + Determines whether this Node is equal to a Blank Node (should always be false) - Expression + Blank Node + - + - Creates a new Leviathan Log Function + Determines whether this Node is equal to a Graph Literal Node - Expression - Log Base Expression + Graph Literal Node + - + - Evaluates the expression + Determines whether this Node is equal to a Literal Node (should always be false) - Evaluation Context - Binding ID + Literal Node - + - Gets the String representation of the function + Determines whether this Node is equal to a URI Node (should always be false) + URI Node - + - Gets the Functor of the Expression + Determines whether this Node is equal to a Variable Node (should always be false) + Variable Node + - + - Gets the type of the expression + Determines whether this Node is equal to a Graph Literal Node + Graph Literal Node + - + - Transforms the Expression using the given Transformer + Implementation of ToString for Graph Literals which produces a String representation of the Subgraph in N3 style syntax - Expression Transformer - + - Represents the Leviathan lfn:ln() function + Implementation of CompareTo for Graph Literals + Node to compare to + + + Graph Literal Nodes are greater than Blank Nodes, Uri Nodes, Literal Nodes and Nulls + - + - Creates a new Leviathan Natural Logarithm Function + Returns an Integer indicating the Ordering of this Node compared to another Node - Expression + Node to test against + - + - Evaluates the expression + Returns an Integer indicating the Ordering of this Node compared to another Node - Evaluation Context - Binding ID + Node to test against - + - Gets the String representation of the function + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against - + - Gets the Type of this expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the Functor of the Expression + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Transforms the Expression using the given Transformer + Returns an Integer indicating the Ordering of this Node compared to another Node - Expression Transformer + Node to test against - + - Represents the Leviathan lfn:pow() function + Gets the Serialization Information + + Serialization Information + Streaming Context + + + + Reads the data for XML deserialization + XML Reader - + - Creates a new Leviathan Power Function + Writes the data for XML serialization - First Argument - Second Argument + XML Writer - + - Evaluates the expression + Throws an error as Graph Literals cannot be cast to a string - Evaluation Context - Binding ID - + - Gets the String representation of the function + Throws an error as Graph Literals cannot be cast to an integer - + - Gets the Functor of the Expression + Throws an error as Graph Literals cannot be cast to a decimal + - + - Gets the type of the expression + Throws an error as Graph Literals cannot be cast to a float + - + - Transforms the Expression using the given Transformer + Throws an error as Graph Literals cannot be cast to a double - Expression Transformer - + - Represents the Leviathan lfn:pythagoras() function + Throws an error as Graph Literals cannot be cast to a boolean + - + - Creates a new Leviathan Pythagorean Distance Function + Throws an error as Graph Literals cannot be cast to a date time - First Argument - Second Argument + - + - Evaluates the expression + Throws an error as Graph Literals cannot be cast to a date time - Evaluation Context - Binding ID - + - Gets the String representation of the function + Throws an error as Graph Literals cannot be cast to a time span - + - Gets the Functor of the Expression + Gets the URI of the datatype this valued node represents as a String - + - Gets the type of the expression + Gets the numeric type of the node - + - Transforms the Expression using the given Transformer + Class for representing Graph Literal Nodes which are supported in highly expressive RDF syntaxes like Notation 3 - Expression Transformer - - + - Represents the Leviathan lfn:rnd() function + Creates a new Graph Literal Node in the given Graph which represents the given Subgraph + Graph this node is in - + - Creates a new Leviathan Random Function + Creates a new Graph Literal Node whose value is an empty Subgraph + Graph this node is in + Sub-graph this node represents - + - Creates a new Leviathan Random Function + Deserialization Constructor - Maximum + Serialization Information + Streaming Context - + - Creates a new Leviathan Random Function + Deserialization Only Constructor - Minumum - Maximum - + - Evaluates the expression + Implementation of Compare To for Graph Literal Nodes - Evaluation Context - Binding ID + Graph Literal Node to Compare To + + Simply invokes the more general implementation of this method + - + - Gets the String representation of the function + Determines whether this Node is equal to a Graph Literal Node + Graph Literal Node - + - Gets the Functor of the Expression + Implements a Graph Isomorphism Algorithm + + + The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs but has been expanded upon significantly to use a variety of techniques. + + + Graph Equality is determined according to the following algorithm, we refer to the first graph as the Source Graph and the second graph as the Target Graph: + +
    +
  1. If both graphs are null they are considered equal
  2. +
  3. If only one of the given graph is null then they are not equal
  4. +
  5. If the given graphs are reference equal then they are equal
  6. +
  7. If the given graphs have a different number of Triples they are not equal
  8. +
  9. Declare a list of triples which are the triples of the second graph called TargetTriples
  10. +
  11. Declare two dictionaries of Nodes to Integers which are called SourceClassification and TargetClassification
  12. +
  13. For Each Triple in the Source Graph +
      +
    1. If it is a ground triple and cannot be found and removed from TargetTriples then graphs are not equal since the triple does not exist in both graphs
    2. +
    3. If it contains blank nodes track the number of usages of this blank node in SourceClassification
    4. +
    +
  14. +
  15. If there are any triples remaining in TargetTriples which are ground triples then graphs are not equal since the Source Graph does not contain them
  16. +
  17. If all the triples from both graphs were ground triples (i.e. there were no blank nodes) then the graphs are equal
  18. +
  19. Iterate over the remaining triples in TargetTriples and populate the TargetClassification
  20. +
  21. If the count of the two classifications is different the graphs are not equal since there are differing numbers of blank nodes in the Graph
  22. +
  23. Now build two additional dictionaries of Integers to Integers which are called SourceDegreeClassification and TargetDegreeClassification. Iterate over SourceClassification and TargetClassification such that the corresponding degree classifications contain a mapping of the number of blank nodes with a given degree
  24. +
  25. If the count of the two degree classifications is different the graphs are not equal since there are not the same range of blank node degrees in both graphs
  26. +
  27. For All classifications in SourceDegreeClassification there must be a matching classification in TargetDegreeClassification else the graphs are not equal
  28. +
  29. Then build a possible mapping using the following rules: +
      +
    1. Any blank bode used only once (single-use) in the Source Graph should be mapped to an equivalent blank bode in the Target Graph. If this is not possible then the graphs are not equal
    2. +
    3. Any blank node with a unique degree in the Source Graph should be mapped to an equivalent blank node in the Target Graph. If this is not possible then the graphs are not equal
    4. +
    5. Any blank node used with unique constants (two other ground terms in a triple) in the Source Graph should be mapped to an equivalent blank bode in the Target Graph. If this is not possible then the graphs are not equal.
    6. +
    7. Build up lists of dependent pairs of blank Nodes for both graphs
    8. +
    9. Use these lists to determine if there are any independent nodes not yet mapped in the Source Graph. These should be mapped to equivalent blank nodes in the Target Graph, if this is not possible the graphs are not equal
    10. +
    11. Important: Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    12. +
    13. Use the dependency information and existing mappings to generate a possible mapping
    14. +
    15. If a complete possible mapping (there is a mapping for each blank node from the Source Graph to the Target Graph) then test this mapping. If it succeeds then the graphs are equal
    16. +
    +
  30. +
  31. If we don't yet have a mapping take a divide and conquer approach: +
      +
    1. Take the not yet mapped blank nodes for each graph and sub-divide them into their isolated sub-graphs
    2. +
    3. If there are at least 2 isolated sub-graphs proceed to divide and conquer
    4. +
    5. For Each Isolated Sub-Graph from the Source Graph +
        +
      1. Consider each possible isolated sub-graph of the same size from the target graph, if there are none then graphs are not equal. If there is a single possible equal isolated sub-graph add the mappings for all involved blank nodes.
      2. +
      +
    6. +
    7. If we now have a complete possible mapping (there is a mapping for each blank node from the Source Graph to the Target Graph) then test the mapping. Return success/failure depending on whether the mapping is valid.
    8. +
    9. Important: Keep a copy of the mapping up to this point as a Base Mapping for use as a base for the brute force step
    10. +
    +
  32. +
  33. If we still don't have a complete mapping we now fallback to the Base Mapping and use it as a basis for brute forcing the possible solution space and testing every possibility until either a mapping works or we find the graphs to be non-equal
  34. +
+
- + - Gets the type of the expression + Compares two Graphs for equality + Graph + Graph + - + - Transforms the Expression using the given Transformer + Uses a series of Rules to attempt to generate a mapping without the need for brute force guessing - Expression Transformer + 1st Graph + 2nd Graph + 1st Graph Node classification + 2nd Graph Node classification + 1st Graph Degree classification + 2nd Graph Degree classification - + - Represents the Leviathan lfn:reciprocal() function + Uses a divide and conquer based approach to generate a mapping without the need for brute force guessing + 1st Graph + 2nd Graph + 1st Graph Node classification + 2nd Graph Node classification + Dependencies in the 1st Graph + Dependencies in the 2nd Graph + - + - Creates a new Leviathan Reciprocal Function + Generates and Tests all possibilities in a brute force manner - Expression + 1st Graph + 2nd Graph + 1st Graph Node classification + 2nd Graph Node classification + Dependencies in the 1st Graph + Dependencies in the 2nd Graph + - + - Evaluates the expression + Helper method for brute forcing the possible mappings - Evaluation Context - Binding ID + Base Mapping + Possible Mappings + + The base mapping at the time of the initial call shoudl contain known good mappings + - + - Gets the String representation of the function + Helper method for brute forcing the possible mappings + Base Mapping + Possible Mappings + Node to consider for mapping + + The base mapping contains known good mappings + - + - Gets the Functor of the Expression + Gets the Blank Node Mapping found between the Graphs (if one was found) - + - Gets the type of the expression + Represents a Pair of Nodes that occur in the same Triple - + - Transforms the Expression using the given Transformer + The Graph Persistence Wrapper is a wrapper around another Graph that can be used to batch persistence actions with the ability to Flush/Discard changes as desired. - Expression Transformer - + + + When disposed any outstanding changes are always flushed so if you make changes which you don't want to persist be sure to call the Discard() method before disposing of the Graph + + + Implementors who wish to make persistent graphs should extend this class and override the SupportsTriplePersistence property and the PersistInsertedTriples(), PersistDeletedTriples() and PersistGraph() methods. If you return true for the property then the PersistInsertedTriples() and PersistDeletedTriples() methods will be invoked to do persistence on batches of Triples. If your persistence mechanism requires persisting the entire graph at once return false for the property and override the PersistGraph() method appropriately. + +

Warning

+ + Note that the wrapper does not automatically dispose of the wrapped graph when the wrapper is Dispose, this is by design since disposing of the wrapped Graph can have unintended consequences + +
- + - Represents the Leviathan lfn:root() function + Underlying Graph this is a wrapper around - + - Creates a new Leviathan Root Function + Creates a new Graph Persistence Wrapper around a new Graph - First Argument - Second Argument - + - Evaluates the expression + Creates a new Graph Persistence Wrapper around a new Graph with the given always queue setting - Evaluation Context - Binding ID - + Whether to always queue actions + + The alwaysQueueActions setting when enabled will cause the wrapper to queue Asserts and Retracts for persistence regardless of whether the relevant Triples already exist (i.e. normally if a Triple exists is cannot be asserted again and if it doesn't exist it cannot be retracted). This is useful for creating derived wrappers which operate in write-only mode i.e. append mode for an existing graph that may be too large to reasonably load into memory + - + - Gets the String representation of the function + Creates a new Graph Persistence Wrapper around the given Graph - + Graph - + - Gets the Type of this expression + Creates a new Graph Persistence Wrapper around the given Graph with the given always queue setting + Graph + Whether to always queue actions + + The alwaysQueueActions setting when enabled will cause the wrapper to queue Asserts and Retracts for persistence regardless of whether the relevant Triples already exist (i.e. normally if a Triple exists is cannot be asserted again and if it doesn't exist it cannot be retracted). This is useful for creating derived wrappers which operate in write-only mode i.e. append mode for an existing graph that may be too large to reasonably load into memory + - + - Gets the Functor of the Expression + Deserialization Constructor + Serialization Information + Streaming Context - + - Transforms the Expression using the given Transformer + Destructor for the wrapper to ensure that Dispose() is called and thus that persistence happens - Expression Transformer - - + - Represents the Leviathan lfn:sq() function + Gets/Sets the Base URI of the Graph - + - Creates a new Leviathan Square Function + Gets whether the Graph is empty - Expression - + - Evaluates this expression + Gets the Namespace Map for the Graph - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Gets the Nodes of the Graph - - + - Gets the Type of this expression + Gets the Triple Collection for the Graph - + - Gets the Functor of the Expression + Asserts a Triple in the Graph + Triple - + - Transforms the Expression using the given Transformer + Asserts Triples in the Graph - Expression Transformer - + Triples - + - Represents the Leviathan lfn:sqrt() function + Retracts a Triple from the Graph + Triple - + - Creates a new Leviathan Square Root Function + Retracts Triples from the Graph - Expression + Triples - + - Evaluates the expression + Clears the Graph - Evaluation Context - Binding ID + + + + Creates a new Blank Node with the given Node ID + + Node ID - + - Gets the String representation of the function + Creates a new Blank Node - + - Gets the Functor of the Expression + Gets the next available Blank Node ID + - + - Gets the type of the expression + Creates a new Graph Literal Node with the given sub-graph + Sub-graph + - + - Transforms the Expression using the given Transformer + Creates a new Graph Literal Node - Expression Transformer - + - Represents the Leviathan lfn:ten() function + Creates a new Literal Node + Value + - + - Creates a new Leviathan Ten Function + Creates a new Literal Node with the given Datatype - Expression + Value + Datatype URI + - + - Evaluates the expression + Creates a new Literal Node with the given Language - Evaluation Context - Binding ID + Value + Language - + - Gets the String representation of the function + Creates a new URI Node that references the Graphs Base URI - + - Gets the Functor of the Expression + Creates a new URI Node from a QName + QName + - + - Gets the type of the expression + Creates a new URI Node + URI + - + - Transforms the Expression using the given Transformer + Creates a new Variable Node - Expression Transformer + Variable Name - + - Represents the Leviathan lfn:cartesian() function + Attempts to get the Blank Node with the given ID + Node ID + The Node if it exists or null - + - Creates a new 2D Cartesian Function + Attempts to get the Literal Node with the given Value and Language - Expression for X Coordinate of 1st point - Expression for Y Coordinate of 1st point - Expression for X Coordinate of 2nd point - Expression for Y Coordinate of 2nd point + Value + Language + The Node if it exists or null - + - Creates a new 3D Cartesian Function + Attempts to get the Literal Node with the given Value - Expression for X Coordinate of 1st point - Expression for Y Coordinate of 1st point - Expression for Z Coordiante of 1st point - Expression for X Coordinate of 2nd point - Expression for Y Coordinate of 2nd point - Expression for Z Coordinate of 2nd point + Value + The Node if it exists or null - + - Evaluates the expression + Attempts to get the Literal Node with the given Value and Datatype - Evaluation Context - Binding ID - + Value + Datatype URI + The Node if it exists or null otherwise - + - Internal helper for calculating 2D Cartesian Distance + Gets all the Triples involving the given URI - Evaluation Context - Binding ID - + The URI to find Triples involving + Zero/More Triples - + - Internal helper for calculating 3D Cartesian Distance + Gets all the Triples involving the given Node - Evaluation Context - Binding ID - + The Node to find Triples involving + Zero/More Triples - + - Gets the Variables used in the function + Gets all the Triples with the given URI as the Object + The URI to find Triples with it as the Object + Zero/More Triples - + - Gets the String representation of the function + Gets all the Triples with the given Node as the Object + + The Node to find Triples with it as the Object + + + + + Gets all the Triples with the given Node as the Predicate + The Node to find Triples with it as the Predicate - + - Gets the Type of the Expression + Gets all the Triples with the given Uri as the Predicate + The Uri to find Triples with it as the Predicate + Zero/More Triples - + - Gets the Functor of the Expression + Gets all the Triples with the given Node as the Subject + The Node to find Triples with it as the Subject + Zero/More Triples - + - Gets the Arguments of the Expression + Gets all the Triples with the given Uri as the Subject + The Uri to find Triples with it as the Subject + Zero/More Triples - + - Gets whether an expression can safely be evaluated in parallel + Selects all Triples with the given Subject and Predicate + Subject + Predicate + - + - Transforms the Expression using the given Transformer + Selects all Triples with the given Subject and Object - Expression Transformer + Subject + Object - + - Namespace containing expression classes which provide functions from the XPath function library + Selects all Triples with the given Predicate and Object + Predicate + Object + - + - Namespace containing expression classes which provide cast functions from the XPath function library + Returns the UriNode with the given QName if it exists + The QName of the Node to select + - + - Abstract Expression class used as the base class for implementation of XPath Casting Function expressions + Returns the UriNode with the given Uri if it exists + The Uri of the Node to select + Either the UriNode Or null if no Node with the given Uri exists - + - Expression to be Cast by the Cast Function + Gets whether a given Triple exists in this Graph + Triple to test + - + - Creates a new Base XPath Cast Expression + Merges another Graph into the current Graph - Expression to be Cast + Graph to Merge into this Graph + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - + - Gets the value of casting the result of the inner expression + Merges another Graph into the current Graph - Evaluation Context - Binding ID - + Graph to Merge into this Graph + Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in + + + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. + + + The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised + + - + - Gets the String representation of this Expression + Determines whether a Graph is equal to another Object + Object to test + + + A Graph can only be equal to another Object which is an IGraph + + + Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals + + - + - Gets the enumeration of Variables involved in this expression + Determines whether this Graph is equal to the given Graph + Graph to test for equality + Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes + + + + The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs + + + Graph Equality is determined according to the following algorithm: + +
    +
  1. If the given Graph is null Graphs are not equal
  2. +
  3. If the given Graph is this Graph (as determined by Reference Equality) then Graphs are equal
  4. +
  5. If the Graphs have a different number of Triples they are not equal
  6. +
  7. Declare a list of Triples which are the Triples of the given Graph called OtherTriples
  8. +
  9. Declare two dictionaries of Nodes to Integers which are called LocalClassification and OtherClassification
  10. +
  11. For Each Triple in this Graph +
      +
    1. If it is a Ground Triple and cannot be found and removed from OtherTriples then Graphs are not equal since the Triple does not exist in both Graphs
    2. +
    3. If it contains Blank Nodes track the number of usages of this Blank Node in LocalClassification
    4. +
    +
  12. +
  13. If there are any Triples remaining in OtherTriples which are Ground Triples then Graphs are not equal since this Graph does not contain them
  14. +
  15. If all the Triples from both Graphs were Ground Triples and there were no Blank Nodes then the Graphs are equal
  16. +
  17. Iterate over the remaining Triples in OtherTriples and populate the OtherClassification
  18. +
  19. If the count of the two classifications is different the Graphs are not equal since there are differing numbers of Blank Nodes in the Graph
  20. +
  21. Now build two additional dictionaries of Integers to Integers which are called LocalDegreeClassification and OtherDegreeClassification. Iterate over LocalClassification and OtherClassification such that the corresponding degree classifications contain a mapping of the number of Blank Nodes with a given degree
  22. +
  23. If the count of the two degree classifications is different the Graphs are not equal since there are not the same range of Blank Node degrees in both Graphs
  24. +
  25. For All classifications in LocalDegreeClassification there must be a matching classification in OtherDegreeClassification else the Graphs are not equal
  26. +
  27. Then build a possible mapping using the following rules: +
      +
    1. Any Blank Node used only once should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    2. +
    3. Any Blank Node with a unique degree should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    4. +
    5. Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    6. +
    7. Build up lists of dependent pairs of Blank Nodes for both Graphs
    8. +
    9. Use these lists to determine if there are any independent nodes not yet mapped. These should be mapped to equivalent Blank Nodes in the other Graph, if this is not possible the Graphs are not equal
    10. +
    11. Use the Dependencies and existing mappings to generate a possible mapping
    12. +
    13. If a Complete Possible Mapping (there is a Mapping for each Blank Node from this Graph to the Other Graph) then test this mapping. If it succeeds then the Graphs are equal
    14. +
    15. Otherwise we now fallback to the Base Mapping and use it as a basis for Brute Forcing the possible solution space and testing every possibility until either a mapping works or we find the Graphs to be non-equal
    16. +
    +
  28. +
+
- + - Gets the Type of the Expression + Checks whether this Graph is a sub-graph of the given Graph + Graph + - + - Gets the Functor of the Expression + Checks whether this Graph is a sub-graph of the given Graph + Graph + Mapping of Blank Nodes + - + - Gets the Arguments of the Expression + Checks whether this Graph has the given Graph as a sub-graph + Graph + - + - Gets whether an expression can safely be evaluated in parallel + Checks whether this Graph has the given Graph as a sub-graph + Graph + Mapping of Blank Nodes + - + - Transforms the Expression using the given Transformer + Computes the Difference between this Graph the given Graph - Expression Transformer + Graph + + + Produces a report which shows the changes that must be made to this Graph to produce the given Graph + + - + - Class representing an XPath Boolean Cast Function + Helper function for Resolving QNames to URIs + QName to resolve to a Uri + - + - Creates a new XPath Boolean Cast Function Expression + Event which is raised when a Triple is asserted in the Graph - Expression to be cast - + - Casts the value of the inner Expression to a Boolean + Event which is raised when a Triple is retracted from the Graph - Evaluation Context - Binding ID - - + - Gets the String representation of the Expression + Event which is raised when the Graph contents change - - + - Gets the Functor of the Expression + Event which is raised just before the Graph is cleared of its contents - + - Transforms the Expression using the given Transformer + Event which is raised after the Graph is cleared of its contents - Expression Transformer - - + - Class representing an XPath Date Time Cast Function + Event which is raised when a Merge operation is requested on the Graph - + - Creates a new XPath Date Time Cast Function Expression + Event which is raised when a Merge operation is completed on the Graph - Expression to be cast - + - Casts the value of the inner Expression to a Date Time + Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event - Evaluation Context - Binding ID - + Sender + Triple Event Arguments - + - Gets the String representation of the Expression + Helper method for raising the Triple Asserted event manually - + Triple Event Arguments - + - Gets the Functor of the Expression + Helper method for raising the Triple Asserted event manually + Triple - + - Transforms the Expression using the given Transformer + Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event - Expression Transformer - + Sender + Triple Event Arguments - + - Class representing an XPath Decimal Cast Function + Helper method for raising the Triple Retracted event manually + - + - Creates a new XPath Decimal Cast Function Expression + Helper method for raising the Triple Retracted event manually - Expression to be cast + Triple - + - Casts the Value of the inner Expression to a Decimal + Helper method for raising the Changed event - Evaluation Context - Binding ID - + Triple Event Arguments - + - Gets the String representation of the Expression + Helper method for raising the Changed event - - + - Gets the Functor of the Expression + Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Transforms the Expression using the given Transformer + Helper method for raising the Cleared event - Expression Transformer - - + - Class representing an XPath Double Cast Function + Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Creates a new XPath Double Cast Function Expression + Helper method for raising the Merged event - Expression to be cast - + - Casts the value of the inner Expression to a Double + Helper method for attaching the necessary event Handlers to a Triple Collection - Evaluation Context - Binding ID - + Triple Collection + + May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's + - + - Gets the String representation of the Expression + Helper method for detaching the necessary event Handlers from a Triple Collection - + Triple Collection + + May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's + - + - Gets the Functor of the Expression + Flushes all changes which have yet to be persisted to the underlying storage - + - Transforms the Expression using the given Transformer + Discards all changes which have yet to be persisted so that they are not persisted to the underlying storage - Expression Transformer - - + - Class representing an XPath Float Cast Function + Used to indicate whether the persistence mechansim can persist batches of Triples + + + If true then the PersistInsertedTriples() and PersistDeletedTriples() methods are used to persist changes when the Flush() method is called. If false then the PersistGraph() method will be invoked instead. + + - + - Creates a new XPath Float Cast Function Expression + Persists inserted Triples to the underlying Storage - Expression to be cast + Triples - + - Casts the value of the inner Expression to a Float + Persists deleted Triples to the underlying Storage - Evaluation Context - Vinding ID - + - + - Gets the String representation of the Expression + Persists the entire Graph to the underlying Storage - - + - Gets the Functor of the Expression + Disposes of the persistence wrapper and in doing so persists any changes to the underlying storage - + - Transforms the Expression using the given Transformer + Disposes of the persistence wrapper and in doing so persists any changes to the underlying storage - Expression Transformer - + Whether the method was called from Dispose() or the destructor - + - Class representing an XPath Integer Cast Function + Gets the Serialization Information + Serialization Information + Streaming Context - + - Creates a new XPath Integer Cast Function Expression + Gets the Schema for XML serialization - Expression to be cast + - + - Casts the value of the inner Expression to an Integer + Reads the data for XML deserialization - Evaluation Context - Binding ID - + XML Reader - + - Gets the String representation of the Expression + Writes the data for XML serialization - + XML Writer - + - Gets the Functor of the Expression + The Store Graph Persistence Wrapper is a wrapper around another Graph that will be persisted to an underlying store via a provided IStorageProvider implementation - + - Transforms the Expression using the given Transformer + Creates a new Store Graph Persistence Wrapper - Expression Transformer - + Generic IO Manager + Graph to wrap + Graph URI (the URI the Graph will be persisted as) + Whether to operate in write-only mode + + + Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph + + - + - Class representing an XPath String Cast Function + Creates a new Store Graph Persistence Wrapper + Generic IO Manager + Graph to wrap + Whether to operate in write-only mode + + + Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph + + - + - Creates a new XPath String Cast Function Expression + Creates a new Store Graph Persistence Wrapper - Expression to be cast + Generic IO Manager + Graph to wrap - + - Casts the results of the inner expression to a Literal Node typed xsd:string + Creates a new Store Graph Persistence Wrapper around a new empty Graph - Evaluation Context - Binding ID - + Generic IO Manager + Graph URI (the URI the Graph will be persisted as) + Whether to operate in write-only mode + + + Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property + + + When not operating in write-only mode the existing Graph will be loaded from the underlying store + + - + - Gets the String representation of the Expression + Creates a new Store Graph Persistence Wrapper around a new empty Graph - + Generic IO Manager + Graph URI (the URI the Graph will be persisted as) - + - Gets the Functor of the Expression + Gets whether the in-use IStorageProvider supports triple level updates - + - Transforms the Expression using the given Transformer + Persists the deleted Triples to the in-use IStorageProvider - Expression Transformer - + Triples - + - Namespace containing expression classes which provide date time functions from the XPath function library + Persists the inserted Triples to the in-use IStorageProvider + Triples - + - Abstract Base Class for functions which are Unary functions applied to Date Time objects in the XPath function library + Persists the entire Graph to the in-use IStorageProvider - + - Creates a new Unary XPath Date Time function + The File Graph Persistence Wrapper is a wrapper around antoher Graph that will be persisted to a file - - + - Gets the numeric value of the function in the given Evaluation Context for the given Binding ID + Creates a new File Graph Persistence Wrapper around the given Graph - Evaluation Context - Binding ID - + Graph + File to persist to - + - Abstract method which derived classes must implement to generate the actual numeric value for the function + Creates a new File Graph Persistence Wrapper around a new emtpy Graph - Date Time - + File to persist to + + If the given file already exists then the Graph will be loaded from that file + - + - Gets the String representation of the Function + Returns that Triple persistence is not supported - - + - Gets the Type of the Expression + Persists the entire Graph to a File - + - Represents the XPath day-from-dateTime() function + Interface for RDF Graphs + + + Most implementations will probably want to inherit from the abstract class BaseGraph since it contains reference implementations of various algorithms (Graph Equality/Graph Difference/Sub-Graph testing etc) which will save considerable work in implementation and ensure consistent behaviour of some methods across implementations. + + - + - Creates a new XPath Day from Date Time function + Gets/Sets the Base Uri for the Graph - Expression - + - Calculates the numeric value of the function from the given Date Time + Gets whether a Graph is Empty - Date Time - - + - Gets the String representation of the function + Gets the Namespace Map for the Graph - - + - Gets the Functor of the Expression + Gets the Nodes of the Graph - + - Transforms the Expression using the given Transformer + Gets the Triple Collection for the Graph - Expression Transformer - - + - Represents the XPath hours-from-dateTime() function + Asserts a Triple in the Graph + A Triple - + - Creates a new XPath Hours from Date Time function + Asserts an Enumerable of Triples in the Graph - Expression + An Enumerable of Triples - + - Calculates the numeric value of the function from the given Date Time + Retracts a Triple from the Graph - Date Time - + A Triple - + - Gets the String representation of the function + Retracts an Enumerable of Triples from the Graph - + Enumerable of Triples - + - Gets the Functor of the Expression + Retracts all Triples from the Graph + + + The Graph should raise the ClearRequested event at the start of the Clear operation and abort the operation if the operation is cancelled by an event handler. On completing the Clear the Cleared event should be raised. + + - + - Transforms the Expression using the given Transformer + Creates a URI Node that corresponds to the Base URI of the Graph - Expression Transformer - + - Represents the XPath minutes-from-dateTime() function + Creates a URI Node for the given QName using the Graphs NamespaceMap to resolve the QName + QName + - + - Creates a new XPath Minutes from Date Time function + Selects the Blank Node with the given ID if it exists in the Graph, returns null otherwise - Expression + Node ID + The Node if it exists in the Graph or null - + - Calculates the numeric value of the function from the given Date Time + Selects the Literal Node with the given Value and Language if it exists in the Graph, returns null otherwise - Date Time - + Value of the Literal + Language Specifier of the Literal + The Node if it exists in the Graph or null - + - Gets the String representation of the function + Selects the Literal Node with the given Value if it exists in the Graph, returns null otherwise - + Value of the Literal + The Node if it exists in the Graph or null - + - Gets the Functor of the Expression + Selects the Literal Node with the given Value and DataType if it exists in the Graph, returns otherwise + Value of the Literal + Data Type of the Literal + The Node if it exists in the Graph or null - + - Transforms the Expression using the given Transformer + Selects all Triples which have a Uri Node with the given Uri - Expression Transformer + Uri - + - Represents the XPath month-from-dateTime() function + Selects all Triples which contain the given Node + Node + - + - Creates a new XPath Month from Date Time function + Selects all Triples where the Object is a Uri Node with the given Uri - Expression + Uri + - + - Calculates the numeric value of the function from the given Date Time + Selects all Triples where the Object is a given Node - Date Time + Node - + - Gets the String representation of the function + Selects all Triples where the Predicate is a given Node + Node - + - Gets the Functor of the Expression + Selects all Triples where the Predicate is a Uri Node with the given Uri + Uri + - + - Transforms the Expression using the given Transformer + Selects all Triples where the Subject is a given Node - Expression Transformer + Node - + - Represents the XPath seconds-from-dateTime() function + Selects all Triples where the Subject is a Uri Node with the given Uri + Uri + - + - Creates a new XPath Seconds from Date Time function + Selects all Triples with the given Subject and Predicate - Expression + Subject + Predicate + - + - Calculates the numeric value of the function from the given Date Time + Selects all Triples with the given Subject and Object - Date Time + Subject + Object - + - Gets the String representation of the function + Selects all Triples with the given Predicate and Object + Predicate + Object - + - Gets the Functor of the Expression + Selects the Uri Node with the given QName if it exists in the Graph, returns null otherwise + QName + The Node if it exists in the Graph or null - + - Transforms the Expression using the given Transformer + Selects the Uri Node with the given Uri if it exists in the Graph, returns null otherwise - Expression Transformer + Uri + The Node if it exists in the Graph or null + + + + Gets whether a given Triple is in this Graph + + Triple to test - + - Represents the XPath timezone-from-dateTime() function + Merges the given Graph into this Graph + Graph to merge + + + The Graph should raise the MergeRequested event at the start of the Merge operation and abort the operation if the operation is cancelled by an event handler. On completing the Merge the Merged event should be raised. + + - + - Expression that the Function applies to + Merges the given Graph into this Graph + Graph to merge + Indicates that the Merge should preserve the Graph URIs of Nodes + + + The Graph should raise the MergeRequested event at the start of the Merge operation and abort the operation if the operation is cancelled by an event handler. On completing the Merge the Merged event should be raised. + + - + - Creates a new XPath Timezone from Date Time function + Checks whether a Graph is equal to another Graph and if so returns the mapping of Blank Nodes - Expression + Graph to compare with + Mapping of Blank Nodes + - + - Calculates the value of the function in the given Evaluation Context for the given Binding ID + Checks whether this Graph is a sub-graph of the given Graph - Evaluation Context - Binding ID + Graph - + - Calculates the effective boolean value of the function in the given Evaluation Context for the given Binding ID + Checks whether this Graph is a sub-graph of the given Graph - Evaluation Context - Binding ID + Graph + Mapping of Blank Nodes - + - Gets the Variables used in the function + Checks whether this Graph has the given Graph as a sub-graph + Graph + - + - Gets the String representation of the function + Checks whether this Graph has the given Graph as a sub-graph + Graph + Mapping of Blank Nodes - + - Gets the Type of the Expression + Calculates the difference between this Graph and the given Graph + Graph + + + + Produces a report which shows the changes that must be made to this Graph to produce the given Graph + + - + - Gets the Functor of the Expression + Resolves a QName into a URI using the Namespace Map and Base URI of this Graph + QName + - + - Gets the Arguments of the Expression + Event which is raised when a Triple is asserted in the Graph + + Whenever this event is raised the Changed event should also be raised + - + - Gets whether an expression can safely be evaluated in parallel + Event which is raised when a Triple is retracted from the Graph + + Whenever this event is raised the Changed event should also be raised + - + - Transforms the Expression using the given Transformer + Event which is raised when the Graph contents change - Expression Transformer - - + - Represents the XPath year-from-dateTime() function + Event which is raised just before the Graph is cleared of its contents - + - Creates a new XPath Year from Date Time function + Event which is raised after the Graph is cleared of its contents - Expression - + - Calculates the numeric value of the function from the given Date Time + Event which is raised just before a Merge operation begins on the Graph - Date Time - - + - Gets the String representation of the function + Event which is raised when a Merge operation is completed on the Graph - - + - Gets the Functor of the Expression + Interface for RDF Graphs which provide Transactions i.e. changes to them can be Flushed (committed) or Discard (rolled back) as desired - + - Transforms the Expression using the given Transformer + Flushes any changes to the Graph - Expression Transformer - - + - Namespace containing expression classes which provide numeric functions from the XPath function library + Discards any changes to the Graph - + - Represents the XPath fn:round() function + Interface for Namespace Maps which provide mappings between Namespace Prefixes and Namespace URIs - + - Creates a new XPath RoundHalfToEven function + Adds a Namespace to the Namespace Map - Expression + Namespace Prefix + Namespace Uri - + - Creates a new XPath RoundHalfToEven function + Clears the Namespace Map - Expression - Precision - + - Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Returns the Namespace URI associated with the given Prefix - Evaluation Context - Binding ID - + The Prefix to lookup the Namespace URI for + URI for the Namespace - + - Gets the String representation of the function + Returns the Prefix associated with the given Namespace URI - + The Namespace URI to lookup the Prefix for + String prefix for the Namespace - + - Gets the Type of the Expression + Method which checks whether a given Namespace Prefix is defined + Prefix to test + - + - Gets the Functor of the Expression + Imports the contents of another Namespace Map into this Namespace Map + Namespace Map to import + + Prefixes in the imported Map which are already defined in this Map are ignored, this may change in future releases. + - + - Transforms the Expression using the given Transformer + Event which is raised when a Namespace is Added - Expression Transformer - - + - Represents the XPath fn:abs() function + Event which is raised when a Namespace is Modified - + - Creates a new XPath Absolute function + Event which is raised when a Namespace is Removed - Expression - + - Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Gets a Enumeratorion of all the Prefixes - Evaluation Context - Binding ID - - + - Gets the String representation of the function + A Function which attempts to reduce a Uri to a QName + The Uri to attempt to reduce + The value to output the QName to if possible + + This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. + - + - Gets the Type of the Expression + Removes a Namespace from the Namespace Map + Namespace Prefix of the Namespace to remove - + - Gets the Functor of the Expression + A Namespace Mapper which has an explicit notion of Nesting - + - Transforms the Expression using the given Transformer + Gets the Nesting Level at which the given Namespace is definition is defined - Expression Transformer + Prefix - + - Represents the XPath fn:ceiling() function + Increments the Nesting Level - + - Creates a new XPath Ceiling function + Decrements the Nesting Level - Expression + + When the Nesting Level is decremented any Namespaces defined at a greater Nesting Level are now out of scope and so are removed from the Mapper + - + - Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Gets the current Nesting Level - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Node Type Values - - + - Gets the Type of the Expression + A Blank Node - + - Gets the Functor of the Expression + A Uri Node - + - Transforms the Expression using the given Transformer + A Literal Node - Expression Transformer - - + - Represents the XPath fn:floor() function + A Graph Literal Node - + - Creates a new XPath Floor function + A Variable Node (currently only used in N3) - Expression - + - Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Interface for Nodes - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Nodes have a Type - + Primarily provided so can do quick integer comparison to see what type of Node you have without having to do actual full blown Type comparison - + - Gets the Type of the Expression + Gets the Graph the Node belongs to - + - Gets the Functor of the Expression + Gets/Sets the Graph URI associated with a Node - + - Transforms the Expression using the given Transformer + Gets the String representation of the Node - Expression Transformer - + - Represents the XPath fn:round() function + Gets the String representation of the Node formatted with the given Node formatter + Formatter + - + - Creates a new XPath Round function + Gets the String representation of the Node formatted with the given Node formatter - Expression + Formatter + Triple Segment + - + - Gets the Numeric Value of the function as evaluated in the given Context for the given Binding ID + Interface for URI Nodes - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Gets the URI the Node represents - - + - Gets the Type of the Expression + Interface for Blank Nodes - + - Gets the Functor of the Expression + Gets the Internal ID of the Blank Node - + - Transforms the Expression using the given Transformer + Interface for Literal Nodes - Expression Transformer - - + - Namespace containing expression classes which provide string functions from the XPath function library + Gets the Lexical Value of the Literal - + - Represents the XPath fn:compare() function + Gets the Language specifier (if any) of the Literal or the Empty String - + - Creates a new XPath Compare function + Gets the DataType URI (if any) of the Literal or null - First Comparand - Second Comparand - + - Gets the Value of the function as applied to the given String Literal and Argument + Interface for Graph Literal Nodes - Simple/String typed Literal - Argument - - + - Gets the String representation of the function + Gets the Sub-graph the Graph Literal represents - - + - Gets the Functor of the Expression + Interface for Variable Nodes - + - Transforms the Expression using the given Transformer + Gets the Variable Name - Expression Transformer - - + - Abstract Base class for XPath Binary String functions + Interface for classes which can create Nodes - + - Expression the function applies over + Creates a Blank Node with a new automatically generated ID + - + - Argument expression + Creates a Blank Node with the given Node ID + Node ID + - + - Whether the argument can be null + Creates a Graph Literal Node which represents the empty Subgraph + - + - Type validation function for the argument + Creates a Graph Literal Node which represents the given Subgraph + Subgraph + - + - Creates a new XPath Binary String function + Creates a Literal Node with the given Value and Data Type - Expression - Argument - Whether the argument may be null - Type validator for the argument + Value of the Literal + Data Type URI of the Literal + - + - Gets the Value of the function as evaluated in the given Context for the given Binding ID + Creates a Literal Node with the given Value - Context - Binding ID + Value of the Literal - + - Gets the Value of the function as applied to the given String Literal + Creates a Literal Node with the given Value and Language - Simple/String typed Literal + Value of the Literal + Language Specifier for the Literal - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a URI Node for the given URI - Simple/String typed Literal - Argument + URI - + - Gets the Variables used in the function + Creates a Variable Node for the given Variable Name + + - + - Gets the String representation of the function + Creates a new unused Blank Node ID and returns it - + - Gets the Type of the Expression + Interface for parsers that generate objects of some sort + Generated Object Type + + + Primarily used as a marker interface in relation to MimeTypesHelper to provide a mechanism whereby parsers for arbitrary objects can be registered and associated with MIME Types and File Extensions + + - + - Gets the Functor of the Expression + Parses an Object from an Input Stream + Input Stream + - + - Gets the Arguments of the Expression + Parses an Object from a Text Stream + Text Stream + - + - Gets whether an expression can safely be evaluated in parallel + Parses an Object from a File + Filename + - + - Transforms the Expression using the given Transformer + Parses an Object from a String - Expression Transformer + String - + - Abstract Base Class for XPath Unary String functions + Parses an Object from a Parameterized String + Parameterized String + - + - Expression the function applies over + Interface for Handlers which handle the RDF produced by parsers - + - Creates a new XPath Unary String function + Start the Handling of RDF - Expression + May be thrown if the Handler is already in use and the implementation is not thread-safe - + - Gets the Value of the function as evaluated in the given Context for the given Binding ID + End the Handling of RDF - Context - Binding ID - + Whether parsing finished without error - + - Gets the Value of the function as applied to the given String Literal + Handles a Namespace Definition - Simple/String typed Literal - + Namespace Prefix + Namespace URI + Should return true if parsing should continue or false if it should be aborted - + - Gets the Variables used in the function + Handles a Base URI Definition + Base URI + Should return true if parsing should continue or false if it should be aborted - + - Gets the String representation of the function + Handles a Triple - + Triple + Should return true if parsing should continue or false if it should be aborted - + - Gets the Type of the Expression + Gets whether the Handler will always handle all data (i.e. won't terminate parsing early) - + - Gets the Functor of the Expression + Interface for Handlers which wrap other Handlers - + - Gets the Arguments of the Expression + Gets the Inner Handlers used by this Handler - + - Gets whether an expression can safely be evaluated in parallel + Interface to be implemented by RDF Readers which parse Concrete RDF Syntax - + - Transforms the Expression using the given Transformer + Method for Loading a Graph from some Concrete RDF Syntax via some arbitrary Stream - Expression Transformer - + Graph to load RDF into + The reader to read input from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the Stream - + - Represents the XPath fn:concat() function + Method for Loading a Graph from some Concrete RDF Syntax via some arbitrary Input + Graph to load RDF into + The reader to read input from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the Stream - + - Creates a new XPath Concatenation function + Method for Loading a Graph from some Concrete RDF Syntax from a given File - First Expression - Second Expression + Graph to load RDF into + The Filename of the File to read from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the File - + - Creates a new XPath Concatenation function + Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax via some arbitrary Stream - Enumeration of expressions + RDF Handler to use + The reader to read input from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the Stream - + - Gets the Value of the function as evaluated in the given Context for the given Binding ID + Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax via some arbitrary Stream - Context - Binding ID - + RDF Handler to use + The reader to read input from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the Stream - + - Gets the Arguments the function applies to + Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax from a given File + RDF Handler to use + The Filename of the File to read from + Thrown if the Parser tries to output something that is invalid RDF + Thrown if the Parser cannot Parse the Input + Thrown if the Parser encounters an IO Error while trying to access/parse the Stream - + - Gets whether an expression can safely be evaluated in parallel + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - + - Gets the Variables used in the function + Interface for Parsers that support Tokeniser Tracing - + - Gets the String representation of the function + Gets/Sets whether Tokeniser Tracing is used - - + - Gets the Type of the Expression + Interface for Parsers that support Parser Tracing - + - Gets the Functor of the Expression + Gets/Sets whether Parser Tracing is used - + - Transforms the Expression using the given Transformer + Interface for parsers that use token based parsing - Expression Transformer - - + - Represents the XPath fn:encode-for-uri() function + Gets/Sets the token queue mode used - + - Creates a new XPath Encode for URI function + Class for representing errors in parsing RDF - Expression - + - Gets the Value of the function as applied to the given String Literal + Creates a new RDF Parse Exception with the given Message - Simple/String typed Literal - + Error Message - + - Gets the String representation of the function + Creates a new RDF Parse Exception with the given Message and Inner Exception - + Error Message + Inner Exception - + - Gets the Functor of the Expression + Creates a new RDF Parse Exception which contains Position Information taken from the given Token + Error Message + Token - + - Transforms the Expression using the given Transformer + Creates a new RDF Parse Exception which contains Position Information taken from the given Token - Expression Transformer - + Error Message + Token + Inner Exception - + - Represents the XPath fn:ends-with() function + Creates a new RDF Parse Exception which contains Position Information + Error Message + Line the error occurred on + Column Position the error occurred at + Exeception that caused this exception - + - Creates a new XPath Ends With function + Creates a new RDF Parse Exception which contains Position Information - Expression - Suffix Expression + Error Message + Line the error occurred on + Column Position the error occurred at - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a new RDF Parse Exception which contains Position Information - Simple/String typed Literal - Argument - + Error Message + Line the error occurred on + Column Position the error starts at + Column Position the error ends at + Error that caused this exception - + - Gets the String representation of the function + Creates a new RDF Parse Exception which contains Position Information - + Error Message + Line the error occurred on + Column Position the error starts at + Column Position the error ends at - + - Gets the Functor of the Expression + Creates a new RDF Parse Exception which contains Position Information + Error Message + Line the error starts on + Line the error ends on + Column Position the error starts at + Column Position the error ends at + Error that caused this exception - + - Transforms the Expression using the given Transformer + Creates a new RDF Parse Exception which contains Position Information - Expression Transformer - + Error Message + Line the error starts on + Line the error ends on + Column Position the error starts at + Column Position the error ends at - + - Represents the XPath fn:escape-html-uri() function + Creates a new RDF Parse Exception which contains Position Information + Error Message + Position Information + Error that caused this exception - + - Creates a new XPath Escape HTML for URI function + Creates a new RDF Parse Exception which contains Position Information - Expression + Error Message + Position Information - + - Gets the Value of the function as applied to the given String Literal + Gets whether the Exception has any position information - Simple/String typed Literal - - + - Gets the String representation of the function + Gets the Start Line of the Error or -1 if no position information - - + - Gets the Functor of the Expression + Gets the End Line of the Error or -1 if no position information - + - Transforms the Expression using the given Transformer + Gets the Start Column of the Error or -1 if no position information - Expression Transformer - - + - Represents the XPath fn:lower-case() function + Gets the End Column of the Error or -1 if no position information - + - Creates a new XPath Lower Case function + Class of exceptions that may occur when doing multi-threaded parsing of RDF - Expression + + + Used when a process may result in multiple errors from different threads + + - + - Gets the Value of the function as applied to the given String Literal + Creates a new Threaded RDF Parsing Exception - Simple/String typed Literal - + Error Message - + - Gets the String representation of the function + Adds an Exception to the list of Inner Exceptions - + Exception - + - Gets the Functor of the Expression + Gets the enumeration of Exceptions - + - Transforms the Expression using the given Transformer + Class for representing errors in selecting an appropriate parser to parse RDF with - Expression Transformer - - + - Represents the XPath fn:normalize-space() function + Creates a new RDF Parser Selection Exception with the given Message + Error Message - + - Creates a new XPath Normalize Space function + Creates a new RDF Parser Selection Exception with the given Message and Inner Exception - Expression + Error Message + Inner Exception - + - Gets the Value of the function as applied to the given String Literal + Class for representing that a parser has been terminated by a IRdfHandler - Simple/String typed Literal - + + Used internally to help force execution to jump back to the point where we can handle by safely discarding this exception and stop parsing + - + - Gets the String representation of the function + Creates a new Parsing Terminated exception - - + - Gets the Functor of the Expression + + Namespace for Parsing classes and variety of supporting Classes. + + + Classes here are primarily implementations of IRdfReader with some implementations of IStoreReader and a few other specialised classes. + + + Has child namespaces Events and Tokens for supporting Event and Token based Parsing. + - + - Transforms the Expression using the given Transformer + + Namespace for Parser Context classes, these are classes that are used internally by parsers to store their state. This allows parsers to be safely used in a multi-threaded scenario so the parsing of one Graph/Store cannot affect the parsing of another. + - Expression Transformer - - + - Represents the XPath fn:normalize-unicode() function + Base Class for Parser Contexts - + - Creates a new XPath Normalize Unicode function + RDF Handler used to handle the generated RDF - Expression - + - Creates a new XPath Normalize Unicode function + Is Parsing Traced? - Expression - Normalization Form - + - Gets the Value of the function as applied to the given String Literal + Creates a new Base Parser Context - Simple/String typed Literal - + Graph to parse into - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a new Base Parser Context - Simple/String typed Literal - Argument - + Graph to parse into + Whether to trace parsing - + - Gets the String representation of the function + Creates a new Base Parser Context - + RDF Handler - + - Gets the Functor of the Expression + Creates a new Base Parser Context + RDF Handler + Whether to trace parsing - + - Transforms the Expression using the given Transformer + Gets the Handler used to handle the generated RDF - Expression Transformer - - + - Represents the XPath fn:replace() function + Gets/Sets whether to trace parsing - + - Creates a new XPath Replace function + Gets the Namespace Map for the parsing context - Text Expression - Search Expression - Replace Expression - + - Creates a new XPath Replace function + Gets the Base URI for the parsing context - Text Expression - Search Expression - Replace Expression - Options Expression - + - Configures the Options for the Regular Expression + Class for Parser Contexts for Tokeniser based Parsing - Node detailing the Options - Whether errors should be thrown or suppressed - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Tokeniser - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Is Tokeniser traced? - - + - Gets the enumeration of Variables involved in this Expression + Local Tokens - + - Gets the Type of the Expression + Creates a new Tokenising Parser Context with default settings + Graph to parse into + Tokeniser to use - + - Gets the Functor of the Expression + Creates a new Tokenising Parser Context with custom settings + Graph to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Gets the Arguments of the Expression + Creates a new Tokenising Parser Context with custom settings + Graph to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets whether an expression can safely be evaluated in parallel + Creates a new Tokenising Parser Context with custom settings + Graph to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Transforms the Expression using the given Transformer + Creates a new Tokenising Parser Context with default settings - Expression Transformer - + RDF Handler + Tokeniser to use - + - Represents the XPath fn:starts-with() function + Creates a new Tokenising Parser Context with custom settings + RDF Handler + Tokeniser to use + Tokeniser Queue Mode - + - Creates a new XPath Starts With function + Creates a new Tokenising Parser Context with custom settings - Expression - Prefix Expression + RDF Handler + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a new Tokenising Parser Context with custom settings - Simple/String typed Literal - Argument - + RDF Handler + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the String representation of the function + Gets the Token Queue - - + - Gets the Functor of the Expression + Gets the Local Tokens stack - + - Transforms the Expression using the given Transformer + Gets/Sets whether tokeniser tracing is used - Expression Transformer - - + - Represents the XPath fn:string-length() function + Base class for SPARQL Results Parser Contexts - + - Creates a new XPath String Length function + Controls parser tracing behaviour - Expression - + - Gets the Value of the function as applied to the given String Literal + Creates a new Results Parser Context - Simple/String typed Literal - + Result Set + Whether to trace parsing - + - Gets the String representation of the function + Creates a new Results Parser Context - + Result Set - + - Gets the Functor of the Expression + Creates a new Parser Context + Results Handler + Whether to trace parsing - + - Transforms the Expression using the given Transformer + Creates a new Results Parser Context - Expression Transformer - + Results Handler - + - Represents the XPath fn:contains() function + Gets the Results Handler to be used - + - Creates a new XPath Contains function + Gets the Variables that have been seen - Expression - Search Expression - + - Gets the Value of the function as applied to the given String Literal and Argument + Gets/Sets whether Parser Tracing is used - Simple/String typed Literal - Argument - - + - Gets the String representation of the function + Class for Tokenising SPARQL Results Parser Contexts - - + - Gets the Functor of the Expression + Tokeniser - + - Transforms the Expression using the given Transformer + Is Tokeniser traced? - Expression Transformer - - + - Represents the XPath fn:substring-after() function + Local Tokens - + - Creates a new XPath Substring After function + Creates a new Tokenising Parser Context with default settings - Expression - Search Expression + Result Set to parse into + Tokeniser to use - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a new Tokenising Parser Context with custom settings - Simple/String typed Literal - Argument - + Result Set to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Gets the String representation of the function + Creates a new Tokenising Parser Context with custom settings - + Result Set to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Functor of the Expression + Creates a new Tokenising Parser Context with custom settings + Result Set to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Transforms the Expression using the given Transformer + Creates a new Tokenising Parser Context with default settings - Expression Transformer - + Results Handler + Tokeniser to use - + - Represents the XPath fn:substring-before() function + Creates a new Tokenising Parser Context with custom settings + Results Handler + Tokeniser to use + Tokeniser Queue Mode - + - Creates a new XPath Substring Before function + Creates a new Tokenising Parser Context with custom settings - Expression - Search Expression + Results Handler + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Value of the function as applied to the given String Literal and Argument + Creates a new Tokenising Parser Context with custom settings - Simple/String typed Literal - Argument - + Results Handler + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the String representation of the function + Gets the Token Queue - - + - Gets the Functor of the Expression + Gets the Local Tokens stack - + - Transforms the Expression using the given Transformer + Gets/Sets whether tokeniser tracing is used - Expression Transformer - - + - Represents the XPath fn:substring() function + Base Class for Store Parser Contexts - + - Creates a new XPath Substring function + Is Parsing Traced? - Expression - Start - + - Creates a new XPath Substring function + Creates a new Store Parser Context - Expression - Start - Length + RDF Handler + Whether to trace parsing - + - Returns the value of the Expression as evaluated for a given Binding as a Literal Node + Creates a new Store Parser Context - Evaluation Context - Binding ID - + RDF Handler - + - Gets the Variables used in the function + Creates a new Base Store Parser Context + Triple Store - + - Gets the String representation of the function + Creates a new Base Parser Context - + Triple Store + Whether to trace parsing - + - Gets the Type of the Expression + Gets/Sets whether to trace parsing - + - Gets the Functor of the Expression + Gets the RDF Handler that is in-use - + - Gets the Arguments of the Expression + Gets the Namespace Map for the parser context - + - Gets whether an expression can safely be evaluated in parallel + Gets the Base URI for the parser context - + - Transforms the Expression using the given Transformer + Class for Store Parser Contexts for Tokeniser based Parsing - Expression Transformer - - + - Represents the XPath fn:upper-case() function + Tokeniser - + - Creates a new XPath Upper Case function + Is Tokeniser traced? - Expression - + - Gets the Value of the function as applied to the given String Literal + Local Tokens - Simple/String typed Literal - - + - Gets the String representation of the function + Creates a new Tokenising Store Parser Context with default settings - + Store to parse into + Tokeniser to use - + - Gets the Functor of the Expression + Creates a new Tokenising Store Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Transforms the Expression using the given Transformer + Creates a new Tokenising Store Parser Context with custom settings - Expression Transformer - + Store to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Represents the XPath boolean() function + Creates a new Tokenising Store Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Creates a new XPath Boolean Function + Creates a new Tokenising Store Parser Context with default settings - Expression to compute the Effective Boolean Value of + Store to parse into + Tokeniser to use - + - Evaluates the expression + Creates a new Tokenising Store Parser Context with custom settings - Evaluation Context - Binding ID - + Store to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Gets the String representation of the function + Creates a new Tokenising Store Parser Context with custom settings - + Store to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Functor of the Expression + Creates a new Tokenising Store Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Type of the Expression + Gets the Token Queue - + - Transforms the Expression using the given Transformer + Gets the Local Tokens stack - Expression Transformer - - + - Represents an Unknown Function that is not supported by dotNetRDF + Gets/Sets whether tokeniser tracing is used - - - This exists as a placeholder class so users may choose to parse Unknown Functions and have them appear in queries even if they cannot be evaluated. This is useful when you wish to parse a query locally to check syntactic validity before passing it to an external query processor which may understand how to evaluate the function. Using this placeholder also allows queries containing Unknown Functions to still be formatted properly. - - - + - Creates a new Unknown Function that has no Arguments + Interface for Event Parser contexts - Function URI + Event Type - + - Creates a new Unknown Function that has a Single Argument + Queue of Events - Function URI - Argument Expression - + - Creates a new Unknown Function that has multiple Arguments + Represents an incomplete Triple as part of the RDFa parsing process - Function URI - Argument Expressions - + - Gives null as the Value since dotNetRDF does not know how to evaluate Unknown Functions + Creates a new Incomplete Triple - Evaluation Context - Binding ID - + Predicate + Direction - + - Gets the Variables used in the Function + Gets the Predicate of the Incomplete Triple - + - Gets the Expression Type + Gets the Direction of the Incomplete Triple - + - Gets the Function URI of the Expression + Possible Directions for Incomplete Triples - + - Gets the Arguments of the Expression + Forward - + - Gets whether an expression can safely be evaluated in parallel + Reverse - + - Gets the String representation of the Expression + Interface for Parser Contexts - - + - Transforms the Expression using the given Transformer + Gets the RDF Handler which is used to instantiate Nodes and to handle the generated RDF - Expression Transformer - - + - Interface for implementing SPARQL custom expression factories which turn URI specified functions into SPARQL Expressions + Gets/Sets whether Parser Tracing should be used (if the Parser supports it) - + - Tries to Create a SPARQL Expression for a function with the given URI and set of arguments + Gets the Namespace Map for the Handler - URI of the function - List of Arguments - Dictionary of Scalar Arguments which are supportable by aggregates when Syntax is set to SPARQL 1.1 Extended - Resulting Expression if able to generate - True if an expression is generated, false if not - + - Gets the Extension Function URIs that this Factory provides + Gets the Base URI for the Handler - + - Gets the Extension Aggregate URIs that this Factory provides + Interface for Parser Contexts which use Tokeniser based parsing - + - Factory Class for generating Expressions for Sparql Extension Functions + Gets/Sets whether Tokenisation is Traced - - - Allows for users of the Library to implement and register Custom Expression Factories which can generate Expressions for their own Extension functions which they wish to use in their SPARQL queries. Custom factories may be globally scoped by registering them with the AddCustomFactory() method or locally by passing them to the three argument constructor of the CreateExpression() method. - - - + - List of Custom Expression factories + Gets the Local Tokens Stack - - All the standard function libraries (XPath, Leviathan and ARQ) included in dotNetRDF are automatically registered - - + - Tries to create an Expression from the given function Uri and list of argument expressions + Gets the Token Queue - Function Uri - List of Argument Expressions - - - - Uses only the globally scoped custom expression factories - - - + - Tries to create an Expression from the given function Uri and list of argument expressions + Interface for SPARQL Results Parser Contexts - Function Uri - List of Argument Expressions - Enumeration of locally scoped expression factories to use - - - - Globally scoped custom expression factories are tried first and then any locally scoped expression factories are used - - - + - Tries to create an Expression from the given function Uri and list of argument expressions + Gets the SPARQL Results Handler to use - Function Uri - List of Argument Expressions - Scalar Arguments - Enumeration of locally scoped expression factories to use - - - - Globally scoped custom expression factories are tried first and then any locally scoped expression factories are used - - - + - Registers a Custom Expression Factory + Interface for Store Parser Contexts - A Custom Expression Factory - + - Gets the Global Custom Expression Factories that are in use + Gets the RDF Handler which is used to instantiate Nodes and to handle the generated RDF - + - - Namespace containing all the classes related to the execution of expressions in SPARQL queries. Any valid expression should be able to be modelled and executed using these clases. - + Gets/Sets whether Parser Tracing should be used (if the Parser supports it) - + - Namespace containing expression classes pertaining to arithmetic operations + Gets the Namespace Map for the Handler - + - Class representing Arithmetic Addition expressions + Gets the Base URI for the Handler - + - Creates a new Addition Expression + Parser Context for RDF/JSON Parsers - Left Hand Expression - Right Hand Expression - + - Calculates the Numeric Value of this Expression as evaluated for a given Binding + Creates a new JSON Parser Context - Evaluation Context - Binding ID - + Graph to parse into + JSON Text Reader to read from - + - Gets the String representation of this Expression + Creates a new JSON Parser Context - + RDF Handler to use + JSON Text Reader to read from - + - Gets the Type of the Expression + Gets the JSON Text Reader which input is read from - + - Gets the Functor of the Expression + Gets the Current Position of the JSON Text Reader - + - Transforms the Expression using the given Transformer + Gets the Position range from the given Start Position to the current Position - Expression Transformer + Start Position - + - Class representing Arithmetic Division expressions + Parser Context for Notation 3 Parsers - + - Creates a new Division Expression + Creates a new Notation 3 Parser Context with default settings - Left Hand Expression - Right Hand Expression + Graph to parse into + Tokeniser to use - + - Calculates the Numeric Value of this Expression as evaluated for a given Binding + Creates a new Notation 3 Parser Context with custom settings - Evaluation Context - Binding ID - + Graph to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Gets the String representation of this Expression + Creates a new Notation 3 Parser Context with custom settings - + Graph to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Type of the Expression + Creates a new Notation 3 Parser Context with custom settings + Graph to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Functor of the Expression + Creates a new Notation 3 Parser Context with default settings + RDF Handler to use + Tokeniser to use - + - Transforms the Expression using the given Transformer + Creates a new Notation 3 Parser Context with custom settings - Expression Transformer - + RDF Handler to use + Tokeniser to use + Tokeniser Queue Mode - + - Class representing Unary Minus expressions (sign of numeric expression is reversed) + Creates a new Notation 3 Parser Context with custom settings + RDF Handler to use + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Creates a new Unary Minus Expression + Creates a new Notation 3 Parser Context with custom settings - Expression to apply the Minus operator to + RDF Handler to use + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Calculates the Numeric Value of this Expression as evaluated for a given Binding + Gets/Sets whether Keywords Mode is in use - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets the list of in-use Keywords - - + - Gets the Type of the Expression + Gets the Variable Context for Triples - + - Gets the Functor of the Expression + Pushes the current in-scope Graph onto the Graph stack and creates a new empty Graph to be the in-scope Graph + + Used for Graph Literal parsing - Base Uri and Namespace Maps of the outermost Graph is propogated to the innermost Graph + - + - Transforms the Expression using the given Transformer + Pops a Graph from the Graph stack to become the in-scope Graph - Expression Transformer - + + Used for Graph Literal parsing + - + - Class representing Arithmetic Multiplication expressions + Gets the current sub-graph (if any) - + - Creates a new Multiplication Expression + Gets whether the Context is currently for a Graph Literal - Left Hand Expression - Right Hand Expression - + - Calculates the Numeric Value of this Expression as evaluated for a given Binding + Evaluation Context for RDFa Parsers - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Creates a new RDFa Evaluation Context - + Base URI - + - Gets the Type of the Expression + Creates a new RDFa Evaluation Context + Base URI + Namepace Map - + - Gets the Functor of the Expression + Gets/Sets the Base URI - + - Transforms the Expression using the given Transformer + Gets/Sets the Parent Subject - Expression Transformer - - + - Class representing Arithmetic Subtraction expressions + Gets/Sets the Parent Object - + - Creates a new Subtraction Expression + Gets the Namespace Map - Left Hand Expression - Right Hand Expression - + - Calculates the Numeric Value of this Expression as evaluated for a given Binding + Gets/Sets the Language - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets the list of incomplete Triples - - + - Gets the Type of the Expression + Gets/Sets the Local Vocabulary - + - Gets the Functor of the Expression + Parser Context for RDFa Parsers - + - Transforms the Expression using the given Transformer + Creates a new Parser Context - Expression Transformer - + Graph + XML Document - + - Namespace containing expression classes pertaining to comparison operations + Creates a new Parser Context + Graph + HTML Document + Whether to Trace Parsing - + - Class representing Relational Equality expressions + Creates a new Parser Context + RDF Handler to use + HTML Document + Whether to Trace Parsing - + - Creates a new Equality Expression + Creates a new Parser Context - Left Hand Expression - Right Hand Expression + RDF Handler to use + HTML Document - + - Evaluates the expression + Gets the HTML Document - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets/Sets whether xml:base is allowed in the embedded RDF - - + - Gets the Type of the Expression + Gets/Sets the Default Vocabularly - + - Gets the Functor of the Expression + Gets/Sets the RDFa syntax in use - + - Transforms the Expression using the given Transformer + Parser Context for RDF/XML Parser - Expression Transformer - - + - Class representing Relational Greater Than Expressions + Creates a new Parser Context + Graph + XML Document - + - Creates a new Greater Than Relational Expression + Creates a new Parser Context - Left Hand Expression - Right Hand Expression + Graph + XML Document + Whether to Trace Parsing - + - Evaluates the expression + Creates a new Parser Context - Evaluation Context - Binding ID - + RDF Handler + XML Document - + - Gets the String representation of this Expression + Creates a new Parser Context - + RDF Handler + XML Document + Whether to Trace Parsing - + - Gets the Type of the Expression + Creates a new Parser Context which uses Streaming parsing + Graph + Stream - + - Gets the Functor of the Expression + Creates a new Parser Context which uses Streaming parsing + RDF Handler + Stream - + - Transforms the Expression using the given Transformer + Creates a new Parser Context which uses Streaming parsing - Expression Transformer - + Graph + Input - + - Class representing Relational Greater Than or Equal To Expressions + Creates a new Parser Context which uses Streaming parsing + RDF Handler + Input - + - Creates a new Greater Than or Equal To Relational Expression + Gets the Event Queue - Left Hand Expression - Right Hand Expression - + - Evaluates the expression + Gets the Mapping of in-use IDs - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + SPARQL JSON Parser Context - - + - Gets the Type of the Expression + Creates a new Parser Context + JSON Text Reader + Results Handler - + - Gets the Functor of the Expression + Creates a new Parser Context + JSON Text Reader + SPARQL Result Set - + - Transforms the Expression using the given Transformer + Gets the JSON Text Reader - Expression Transformer - - + - Class representing Relational Less Than Expressions + Parser Context for SPARQL Query parser - + - Creates a new Less Than Relational Expression + Creates a new SPARQL Query Parser Context with default settings - Left Hand Expression - Right Hand Expression + Tokeniser to use - + - Evaluates the expression + Creates a new SPARQL Query Parser Context with custom settings - Evaluation Context - Binding ID - + Tokeniser to use + Tokeniser Queue Mode - + - Gets the String representation of this Expression + Creates a new SPARQL Query Parser Context with custom settings - + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Type of the Expression + Creates a new SPARQL Query Parser Context with custom settings + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Functor of the Expression + Creates a new SPARQL Query Parser Context for parsing sub-queries + Parent Query Parser Context + Tokens that need parsing to form a subquery - + - Transforms the Expression using the given Transformer + Creates a new Query Parser Context from the given Token Queue - Expression Transformer - + Token Queue - + - Class representing Relational Less Than or Equal To Expressions + Gets the Query that this Parser Context is populating - + - Creates a new Less Than or Equal To Relational Expression + Gets/Sets whether the Query Verb has been seen - Left Hand Expression - Right Hand Expression - + - Evaluates the expression + Returns whether this Parser Context is for a sub-query - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets/Sets the Syntax that should be supported - - + - Gets the Type of the Expression + Gets/Sets the default Base Uri to resolve relative URIs against - + - Gets the Functor of the Expression + Gets the Expression Parser - + - Transforms the Expression using the given Transformer + Gets the Property Path Parser - Expression Transformer - - + - Class representing Relational Non-Equality expressions + Gets/Sets the current Graph Pattern ID - + - Creates a new Non-Equality Expression + Gets a new Blank Node ID - Left Hand Expression - Right Hand Expression + - + - Evaluates the expression + Gets the mapping of in use Blank Nodes IDs - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets the last Blank Node ID that was issued - - + - Gets the Type of the Expression + Gets/Sets whether Blank Node scoping must be checked + + If false then only name tracking will be done to prevent auto-generated IDs colliding with user allocated IDs + - + - Gets the Functor of the Expression + Gets the Next Available Alias ID for aliasing Project Expressions and Aggregates which don't have an Aggregate Specified - + - Transforms the Expression using the given Transformer + Gets the Custom Expression Factories valid for this Parser - Expression Transformer - - + - Namespace containing expression classes pertaining to conditional operations + Parser Context for SPARQL RDF Parser - + - Class representing Conditional And expressions + Creates a new Parser Context + Graph to parse from + Results Handler - + - Creates a new Conditional And Expression + Creates a new Parser Context - Left Hand Expression - Right Hand Expression + Graph to parse from + Results Handler - + - Evaluates the expression + Gets the Graph being parsed from - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Parser Context for SPARQL Update Parser - - + - Gets the Type of the Expression + Creates a new SPARQL Update Parser Context + Tokeniser - + - Gets the Functor of the Expression + Creates a new SPARQL Update Parser Context with custom settings + Tokeniser to use + Tokeniser Queue Mode - + - Transforms the Expression using the given Transformer + Creates a new SPARQL Update Parser Context with custom settings - Expression Transformer - + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Class representing logical Not Expressions + Creates a new SPARQL Update Parser Context with custom settings + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Creates a new Negation Expression + Gets the Update Command Set that is being populated - Expression to Negate - + - Evaluates the expression + Gets the Expression Parser - Evaluation Context - Binding ID - - + - Gets the String representation of this Expression + Gets the Path Parser - - + - Gets the Type of the Expression + Gets the Query Parser - + - Gets the Functor of the Expression + Gets the Namespace Map - + - Transforms the Expression using the given Transformer + Gets/Sets the locally scoped custom expression factories - Expression Transformer - - + - Class representing Conditional Or expressions + Gets the set of BNodes used in INSERT DATA commands so far - + - Creates a new Conditional Or Expression + Parser Context for SPARQL XML Results parsers - Left Hand Expression - Right Hand Expression - + - Evaluates the expression + Creates a new Parser Context - Evaluation Context - Binding ID - + XML Reader + Results Handler - + - Gets the String representation of this Expression + Creates a new Parser Context - + XML Reader + Results Set to load into - + - Gets the Type of the Expression + Gets the XML Reader - + - Gets the Functor of the Expression + Parser Context class for TriG Parsers - + - Transforms the Expression using the given Transformer + Creates a new TriG Parser Context with default settings - Expression Transformer - + Store to parse into + Tokeniser to use - + - Namespace containing expression classes representing primary constructs in SPARQL expression trees i.e. constants, modifiers and variables + Creates a new TrigG Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Class for representing Aggregate Expressions which have Numeric Results + Creates a new TriG Parser Context with custom settings + Store to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Creates a new Aggregate Expression Term that uses the given Aggregate + Creates a new TriG Parser Context with custom settings - Aggregate + Store to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Evaluates the aggregate expression + Creates a new TriG Parser Context with default settings - Evaluation Context - Binding ID - + Store to parse into + Tokeniser to use - + - Gets the Aggregate this Expression represents + Creates a new TrigG Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode - + - Gets the String representation of the Aggregate Expression + Creates a new TriG Parser Context with custom settings - + Store to parse into + Tokeniser to use + Whether to trace parsing + Whether to trace tokenisation - + - Gets the enumeration of variables that are used in the the aggregate expression + Creates a new TriG Parser Context with custom settings + Store to parse into + Tokeniser to use + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Type of the Expression + Gets/Sets whether the Default Graph exists - + - Gets the Functor of the Expression + Gets/Sets the Syntax to be used - + - Gets the Arguments of the Expression + Parser Context for Turtle parsing - + - Gets whether the expression can be parallelised + Creates a new Turtle Parser Context with default settings + Graph to parse into + Tokeniser to use + Turtle Syntax - + - Transforms the Expression using the given Transformer + Creates a new Turtle Parser Context with custom settings - Expression Transformer - + Graph to parse into + Tokeniser to use + Turtle Syntax + Tokeniser Queue Mode - + - Class for representing the All Modifier + Creates a new Turtle Parser Context with custom settings + Graph to parse into + Tokeniser to use + Turtle Syntax + Whether to trace parsing + Whether to trace tokenisation - + - Throws a NotImplementedException since this class is a placeholder and only used in parsing + Creates a new Turtle Parser Context with custom settings - SPARQL Evaluation Context - Binding ID - + Graph to parse into + Tokeniser to use + Turtle Syntax + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Returns an empty enumerable + Creates a new Turtle Parser Context with default settings + RDF Handler + Tokeniser to use + Turtle Syntax - + - Gets the Type of the Expression + Creates a new Turtle Parser Context with custom settings + RDF Handler + Tokeniser to use + Turtle Syntax + Tokeniser Queue Mode - + - Gets the Functor of the Expression + Creates a new Turtle Parser Context with custom settings + RDF Handler + Tokeniser to use + Turtle Syntax + Whether to trace parsing + Whether to trace tokenisation - + - Gets the Arguments of the Expression + Creates a new Turtle Parser Context with custom settings + RDF Handler + Tokeniser to use + Turtle Syntax + Tokeniser Queue Mode + Whether to trace parsing + Whether to trace tokenisation - + - Gets the String representation of the Expression + Gets the Turtle Syntax being used - - + - Transforms the Expression using the given Transformer + Function for unescaping QNames - Expression Transformer - - + - Gets whether an expression can safely be evaluated in parallel + Namespace for Event classes which are used to support Event Based parsing of RDF syntaxes - + - Class for representing constant terms + Abstract Base Class for IEvent implementations - + - Node this Term represents + Creates a new Event + Event Type + Position Information - + - Creates a new Constant + Creates a new Event - Valued Node + Event Type - + - Creates a new Constant + Gets the Type for this Event - Node - + - Evaluates the expression + Gets the Position Information (if any) - Evaluation Context - Binding ID - + + Availability of Position Information depends on the how the source document was parsed + - + - Gets the String representation of this Expression + Abstract Base Class for IRdfXmlEvent implementations - - + - Gets an Empty Enumerable since a Node Term does not use variables + Creates an Event and fills in its Values + Type of the Event + Source XML that generated the Event + Position of the XML Event - + - Gets the Type of the Expression + Creates an Event and fills in its Values + Type of the Event + Source XML that generated the Event - + - Gets the Functor of the Expression + Gets the XML that this Event was generated from - + - Gets the Arguments of the Expression + Abstract Base Class for IRdfAEvent implementations - + - Gets whether an expression can safely be evaluated in parallel + Creates a new RDFa Event + Event Type + Position Info + Attributes - + - Node this Term represents + Gets the attributes of the event i.e. the attributes of the source element - + - Transforms the Expression using the given Transformer + Gets whether the Event has a given attribute - Expression Transformer + Attribute Name - + - Class for representing the Distinct Modifier + Gets the value of a specific attribute + Attribute Name + - + - Throws a NotImplementedException since this class is a placeholder and only used in parsing + Represents a Queue of events for use by event based parsers - SPARQL Evaluation Context - Binding ID - - + - Returns an empty enumerable + Queue of Events - + - Gets the Type of the Expression + Creates a new Event Queue - + - Gets the Functor of the Expression + Creates a new Event Queue with the given Event Generator + Event Generator - + - Gets the Arguments of the Expression + Dequeues and returns the next event in the Queue + - + - Gets whether an expression can safely be evaluated in parallel + Adds an event to the end of the Queue + Event - + - Gets the String representation of the Expression + Peeks and returns the next event in the Queue - + - Transforms the Expression using the given Transformer + Clears the Queue - Expression Transformer - - + - Class for representing Graph Pattern Terms (as used in EXISTS/NOT EXISTS) + Gets the number of events currently in the Queue - + - Creates a new Graph Pattern Term + Represents a Queue of events which are streamed from an instance of a IJitEventGenerator for use by an event based parser - Graph Pattern - + - Gets the value of this Term as evaluated for the given Bindings in the given Context + Creates a new Streaming Event Queue - - - + Event Generator - + - Gets the Graph Pattern this term represents + Gets the Count of events in the queue - + - Gets the Variables used in the Expression + Adds an event to the Queue + Event - + - Gets the Type of the Expression + Gets the next event from the Queue and removes it from the Queue + - + - Gets the Functor of the Expression + Gets the next event from the Queue while leaving the Queue unchanged + - + - Gets the Arguments of the Expression + An wrapper which exposes a subset of an event queue + The type of event queued - + - Gets whether an expression can safely be evaluated in parallel + Create a new wrapper that exposes a subset of specific event queue + The event queue to be wrapper + The number of events to leave in the wrapped queue. When the wrapped event + queue contains this number of events or fewer, this wrapper will treat it as an empty queue - + + + + + + + + + + + + + + + + - Transforms the Expression using the given Transformer + Interface for parser events - Expression Transformer - - + - Class representing Variable value expressions + Gives some Integer representing the Event Type as understood by a specific Parser implementation - + - Creates a new Variable Expression + Gets the Position Information (if any) - Variable Name + + Availability of Position Information depends on the how the source document was parsed + - + - Evaluates the expression + Interface for Event Generators - Evaluation Context - Binding ID - + + + An Event Generator is a class which takes an input stream which contains XML and generates a series of events from it + + + This interface is a marker interface which indicates that the class is an event generator, implementations should implement one of the concrete derived interfaces as appropriate to their mode of operation. + + - + - Gets the String representation of this Expression + Interface for pre-processing event generators - + Event Type + Event Parser Context Type - + - Gets the enumeration containing the single variable that this expression term represents + Gets all available events + Context - + - Gets the Type of the Expression + Interface for event generators which generate all RDF/XML events in one go prior to parsing taking place - + - Gets the Functor of the Expression + Interface for Just-in-time event generators + Event Type - + - Gets the Arguments of the Expression + Gets the next available event + - + - Gets whether an expression can safely be evaluated in parallel + Gets whether the Event Generator has finished reading events i.e. there are no further events available - + - Transforms the Expression using the given Transformer + Interface for RDF/XML event generators which generate events as required during the parsing process - Expression Transformer - - + - An Expression Transformer is a class that can traverse a SPARQL Expression tree and apply transformations to it + Interface for implementing Event Queues which provide Bufferable wrappers to Event Generators - + - Transforms the expression using this transformer + Removes the first Event from the Queue - Expression - + - Abstract implementation of an Expression Transformer which substitutes primary expressions + Adds an Event to the end of the Queue + Event to add - + - Transforms an expression into a form where primary expressions may be substituted + Gets the first Event from the Queue without removing it - Expression - + - Returns the substitution for a given primary expression + Gets the Event Generator that this Queue uses - Expression - - + - Expression Factory which generates ARQ Function expressions + Clears the Event Queue - - - Designed to help provide feature parity with the ARQ query engine contained in Jena - - - + - ARQ Function Namespace + Gets the number of Events in the Queue - + - Constants for ARQ Numeric functions + Gets/Sets whether Generator Tracing should be used - + - Constants for ARQ Numeric functions + Gets the Event Type of the last Event dequeued - + - Constants for ARQ Numeric functions + Abstract base implementation of an Event Queue - + - Constants for ARQ Numeric functions + Generator used to fill the Event Queue - + - Constants for ARQ Graph functions + Variable indicating whether Generator Tracing is enabled - + - Constants for ARQ Graph functions + Type of Last Event dequeued - + - Constants for ARQ Graph functions + Dequeues an Event from the Queue + - + - Constants for ARQ String functions + Adds an Event to the Queue + Event - + - Constants for ARQ String functions + Gets the next Event from the Queue without removing it from the queue + - + - Constants for ARQ String functions + Gets the Event Generator used by the Queue - + - Constants for ARQ Miscellaneous functions + Clears the Event Queue - + - Constants for ARQ Miscellaneous functions + Gets the number of Events in the Queue - + - Array of Extension Function URIs + Gets/Sets Tracing for the Event Queue - + - Tries to create an ARQ Function expression if the function Uri correseponds to a supported ARQ Function + Gets the Event Type of the last Event dequeued - Function Uri - Function Arguments - Scalar Arguments - Generated Expression - Whether an expression was successfully generated - + - Gets the Extension Function URIs supported by this Factory + Interface for RDFa events - + - Gets the Extension Aggregate URIs supported by this Factory + Gets the attributes of the event i.e. the attributes of the source element - + - Abstract base class for Unary Expressions + Gets whether the Event has a given attribute + Attribute Name + - + - The sub-expression of this Expression + Gets the value of a specific attribute + Attribute Name + - + - Creates a new Base Unary Expression + Interface for RDF/XML events as defined by the RDF/XML Specification - Expression + Used to provide polymorphism for the XML/RDF Parser - + - Evaluates the expression + Gets the XML that produced the given event (if any) - Evaluuation Context - Binding ID - - + - Gets the String representation of the Expression + A DOM Based event generator for RDF/XML parser that uses System.Xml DOM to parse events - - + - Gets an enumeration of all the Variables used in this expression + Creates a new DOM Based event generator + XML Document - + - Gets the Type of the Expression + Creates a new DOM Based event generator + Input Stream - + - Gets the Functor of the Expression + Creates a new DOM Based event generator + Input File - + - Gets the Arguments of the Expression + Gets all events from the XML DOM + Parser Context - + - Gets whether an expression can safely be evaluated in parallel + Given an XML Node that is the Root of the RDF/XML section of the Document Tree creates the RootEvent and generates the rest of the Event Tree by recursive calls to the GenerateEvents method + Parser Context + XML Node that is the Root of the RDF/XML section of the Document Tree + - + - Transforms the arguments of the expression using the given transformer + Given an XML Node creates the relevant RDF/XML Events for it and recurses as necessary - Expression Transformer + Parser Context + The Node to create Event(s) from + The Parent Node of the given Node - + - Abstract base class for Binary Expressions + Checks whether a given XML Node can be discarded as it does not have any equivalent Event in the RDF/XML Syntax model + XML Node to test + True if the Node can be ignored + Comment and Text Nodes are ignored. Text Nodes will actually become Text Events but we'll access the Text using the InnerText property of the Element Nodes instead - + - The sub-expressions of this Expression + Takes the Event Tree and Flattens it into a Queue as per the rules laid out in the RDF/XML Specification + Parser Context + Event which is the Root of the Tree (not necessarily a RootEvent) + A numeric value used for Parser Tracing to indicate nesting levels of the Event Tree - + - The sub-expressions of this Expression + A JIT event generator for RDF/XML parsing that uses Streaming parsing to parse the events + + Currently unimplemented stub class + - + - Creates a new Base Binary Expression + Creates a new Streaming Event Generator - Left Expression - Right Expression + Stream - + - Evaluates the expression + Creates a new Streaming Event Generator - Evaluation Context - Binding ID - + Stream + Base URI - + - Gets the String representation of the Expression + Creates a new Streaming Event Generator - + Text Reader - + - Gets an enumeration of all the Variables used in this expression + Creates a new Streaming Event Generator + Text Reader + Base URI - + - Gets the Type of the Expression + Creates a new Streaming Event Generator + Filename - + - Gets the Functor of the Expression + Creates a new Streaming Event Generator + Filename + Base URI - + - Gets the Arguments of the Expression + Initialises the XML Reader settings + - + - Gets whether an expression can safely be evaluated in parallel + Gets the next event from the XML stream + - + - Transforms the arguments of the expression using the given transformer + Gets whether the event generator has finished generating events - Expression Transformer - - + - Expression Factory which generates Leviathan Function expressions + Static Class which defines the Event Types for RDF/XML Events - + - Leviathan Function Namespace + Constants for Event Types - + - Constants for Leviathan String Functions + Constants for Event Types - + - Constants for Leviathan String Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Constants for Event Types - + - Constants for Leviathan Numeric Functions + Event representing the Root Node of the Document - + - Constants for Leviathan Numeric Functions + Creates a new Root Event + Base Uri of the Document + Source XML of the Document + Position Info - + - Constants for Leviathan Numeric Functions + Creates a new Root Event + Base Uri of the Document + Source XML of the Document - + - Constants for Leviathan Numeric Functions + Gets/Sets the ElementEvent that represents the actual DocumentElement - + - Constants for Leviathan Numeric Functions + Gets all the Child ElementEvents of the Document Root - + - Constants for Leviathan Numeric Functions + Gets the Base Uri of the Node - + - Constants for Leviathan Numeric Functions + Gets the Language of the Node - + - Constants for Leviathan Numeric Functions + Event representing a Node from the XML Document - + - Constants for Leviathan Numeric Functions + Creates a new Element Event + QName of the XML Node + Base Uri of the XML Node + Source XML of the XML Node + Position Info - + - Constants for Leviathan Numeric Functions + Creates a new Element Event + QName of the XML Node + Base Uri of the XML Node + Source XML of the XML Node - + - Constants for Leviathan Numeric Functions + Creates new Element Event + Local Name of the XML Node + Namespace Prefix of the XML Node + Base Uri of the XML Node + Source XML of the XML Node + Position Info - + - Constants for Leviathan Numeric Functions + Creates new Element Event + Local Name of the XML Node + Namespace Prefix of the XML Node + Base Uri of the XML Node + Source XML of the XML Node - + - Constants for Leviathan Numeric Functions + Gets the Local Name of this Element Event - + - Constants for Leviathan Numeric Functions + Gets the Namespace of this Element Event - + - Constants for Leviathan Numeric Functions + Gets the QName of this Element Event - + + + Gets the Child Element Events + + These correspond to the Child Nodes of the XML Node + + - Constants for Leviathan Numeric Functions + Gets/Sets the Base Uri of the XML Node - + - Constants for Leviathan Boolean Aggregates + Gets the Attribute Events + These correspond to the Attributes of the XML Node (with some exceptions as defined in the RDF/XML specification) - + - Constants for Leviathan Boolean Aggregates + Gets the Namespace Attribute Events + + These correspond to all the Namespace Attributes of the XML Node + - + - Constants for Leviathan Boolean Aggregates + Gets/Sets the List Counter - + - Constants for Leviathan Numeric Aggregates + Gets/Sets the Language of this Event - + - Constants for Leviathan Numeric Aggregates + Gets/Sets the Subject Event of this Event + Will be assigned according to the Parsing rules during the Parsing process and later used to generate a Subject Node - + - Constants for other Leviathan Aggregate + Gets/Sets the Subject Node of this Event + Will be created from the Subject at some point during the Parsing process - + - Constants for other Leviathan Aggregate + Gets/Sets the Parse Type for this Event - + - Array of Extension Function URIs + Method which sets the Uri for this Element Event + Uri Reference to set Uri from + This can only be used on Elements which are rdf:li and thus need expanding into actual list elements according to List Expansion rules. Attempting to set the Uri on any other Element Event will cause an Error message. - + - Array of Extension Aggregate URIs + Gets the String representation of the Event + - + - Tries to create an Leviathan Function expression if the function Uri correseponds to a supported Leviathan Function + An Event for representing the End of Elements - Function Uri - Function Arguments - Scalar Arguments - Generated Expression - Whether an expression was successfully generated - + - Gets the Extension Function URIs supported by this Factory + Creates a new EndElementEvent - + - Gets the Extension Aggregate URIs supported by this Factory + Creates a new EndElementEvent - + - Expression Factory which generates SPARQL Function expressions + An Event for representing Attributes of XML Node - - This supports the requirement of SPARQL 1.1 that all functions can be accessed via URI as well as by keyword. This also means that SPARQL 1.1 functions can be used in SPARQL 1.0 mode by using their URIs instead of their keywords and they are then treated simply as extension functions - - + - Namespace Uri for SPARQL Built In Functions Namespace + Creates a new Attribute Event from an XML Attribute + QName of the Attribute + Value of the Attribute + Source XML of the Attribute + Position Info - + - Tries to create a SPARQL Function expression if the function Uri correseponds to a supported SPARQL Function + Creates a new Attribute Event from an XML Attribute - Function Uri - Function Arguments - Scalar Arguments - Generated Expression - Whether an expression was successfully generated + QName of the Attribute + Value of the Attribute + Source XML of the Attribute - + - Gets the URIs of available SPARQL Functions + Creates a new Attribute Event from an XML Attribute + Local Name of the Attribute + Namespace Prefix of the Attribute + Value of the Attribute + Source XML of the Attribute + Position Info - + - Gets the URIs of available SPARQL Aggregates + Creates a new Attribute Event from an XML Attribute + Local Name of the Attribute + Namespace Prefix of the Attribute + Value of the Attribute + Source XML of the Attribute - + - Expression Factory which generates XPath Function expressions + Gets the Local Name of the Attribute - + - Namespace Uri for XPath Functions Namespace + Gets the Namespace Prefix of the Attribute - + - Constant representing the XPath boolean functions + Gets the QName of the Attribute - + - Constant representing the XPath boolean functions + Gets the Value of the Attribute - + - Constants representing the names of XPath String functions + An Event for representing Namespace Attributes of an XML Node - + - Constants representing the names of XPath String functions + Creates a new Namespace Attribute Event + Namespace Prefix + Namespace Uri + Source XML + Position Info - + - Constants representing the names of XPath String functions + Creates a new Namespace Attribute Event + Namespace Prefix + Namespace Uri + Source XML - + - Constants representing the names of XPath String functions + Gets the Namespace Prefix - + - Constants representing the names of XPath String functions + Gets the Namespace Uri - + - Constants representing the names of XPath String functions + An Event for representing Language Attributes of an XML Node - + - Constants representing the names of XPath String functions + Creates a new Language Attribute Event + Language + Source XML + Position Info - + - Constants representing the names of XPath String functions + Creates a new Language Attribute Event + Language + Source XML - + - Constants representing the names of XPath String functions + Gets the Language - + - Constants representing the names of XPath String functions + An Event for representing rdf:parseType Attributes of an XML Node - + - Constants representing the names of XPath String functions + Creates a new Parse Type Attribute Event + Parse Type + Source XML + Position Info - + - Constants representing the names of XPath String functions + Creates a new Parse Type Attribute Event + Parse Type + Source XML - + - Constants representing the names of XPath String functions + Gets the Parse Type - + - Constants representing the names of XPath String functions + An Event for representing xml:base attributes of XML Nodes - + - Constants representing the names of XPath String functions + Creates a new XML Base Attribute + Base URI + Source XML + Position Info - + - Constants representing the names of XPath String functions + Creates a new XML Base Attribute + Base URI + Source XML - + - Constants representing the names of XPath String functions + Gets the Base URI - + - Constants representing the names of XPath String functions + Event for representing plain text content (XML Text Nodes) - + - Constants representing the names of XPath String functions + Creates a new Text Node + Textual Content of the XML Text Node + Source XML of the Node + Position Info - + - Constants representing the XPath Boolean constructor functions + Creates a new Text Node + Textual Content of the XML Text Node + Source XML of the Node - + - Constants representing the XPath Boolean constructor functions + Gets the Textual Content of the Event - + - Constants representing the XPath Numeric Functions + Gets the String representation of the Event + - + - Constants representing the XPath Numeric Functions + Event for representing URIRefs - + - Constants representing the XPath Numeric Functions + Creates a new URIRef Event from a URIRef in an XML Attribute value or similar + URIRef + Source XML of the URIRef + Position Info - + - Constants representing the XPath Numeric Functions + Creates a new URIRef Event from a URIRef in an XML Attribute value or similar + URIRef + Source XML of the URIRef - + - Constants representing the XPath Numeric Functions + Gets the URIRef - + - Constants representing the XPath Date Time functions + Event for representing QNames - + - Constants representing the XPath Date Time functions + Creates a new QName Event + QName + Source XML of the QName + Position Info - + - Constants representing the XPath Date Time functions + Creates a new QName Event + QName + Source XML of the QName - + - Constants representing the XPath Date Time functions + Gets the QName - + - Constants representing the XPath Date Time functions + Event for representing the need for a Blank Node - + - Constants representing the XPath Date Time functions + Creates a new Blank Node ID Event for a named Blank Node + Node ID for the Blank Node + Source XML + Position Info - + - Constants representing the XPath Date Time functions + Creates a new Blank Node ID Event for a named Blank Node + Node ID for the Blank Node + Source XML - + - Constants representing the XPath Date Time functions + Creates a new Blank Node ID Event for an anonymous Blank Node + Source XML + Position Info - + - Constants representing the Normalization Form values supported by the XPath normalize-unicode() function + Creates a new Blank Node ID Event for an anonymous Blank Node + Source XML - + - Constants representing the Normalization Form values supported by the XPath normalize-unicode() function + Gets the Blank Node ID (if any) - + - Constants representing the Normalization Form values supported by the XPath normalize-unicode() function + An Event for representing Plain Literals - + - Constants representing the Normalization Form values supported by the XPath normalize-unicode() function + Creates a new Plain Literal Event + Value of the Literal + Language Specifier of the Literal + Source XML of the Event + Position Info - + - Constants representing the Normalization Form values supported by the XPath normalize-unicode() function + Creates a new Plain Literal Event + Value of the Literal + Language Specifier of the Literal + Source XML of the Event - + - Argument Type Validator for validating that a Literal either has no datatype or is a String + Gets the Value of the Plain Literal - + - Argument Type Validator for validating that a Literal has an Integer datatype + Gets the Langugage Specifier of the Plain Literal - + - Argument Type Validator for validating that a Literal has a Numeric datatype + An Event for representing Typed Literals - + - Tries to create an XPath Function expression if the function Uri correseponds to a supported XPath Function + Creates a new Typed Literal Event - Function Uri - Function Arguments - Scalar Arguments - Generated Expression - Whether an expression was successfully generated + Value of the Literal + DataType Uri of the Literal + Source XML of the Event + Position Info - + - Gets the Extension Function URIs supported by this Factory + Creates a new Typed Literal Event + Value of the Literal + DataType Uri of the Literal + Source XML of the Event - + - Gets the Extension Aggregate URIs supported by this Factory + Gets the Value of the Typed Literal - + - Numeric Types for Sparql Numeric Expressions + Gets the DataType of the Typed Literal - All Numeric expressions in Sparql are typed as Integer/Decimal/Double - + - Not a Number + Gets the String representation of the Event + - + - An Integer + An Event for representing that the Event Queue should be cleared of previously queued events - + - A Decimal + Creates a new Clear Queue Event - + - A Single precision Floating Point + + Namespace for RDF and SPARQL Results Handlers + + + Handlers are a powerful low level part of the parsers API, they allow you to parse RDF, RDF Datasets and SPARQL Results in such a way that you can take arbitrary actions with the data and choose to end parsing as soon as desired. + - + - A Double precision Floating Point + A RDF Handler which just determines whether any Triples are present terminating parsing as soon as the first triple is received - + - SPARQL Expression Types + Creates a new Any Handler - + - The Expression is a Primary Expression which is a leaf in the expression tree + Gets whether any Triples have been parsed - + - The Expression is a Unary Operator which has a single argument + Starts handling RDF by resetting the Any flag to false - + - The Expression is a Binary Operator which has two arguments + Handles Base URIs by ignoring them + Base URI + - + - The Expression is a Function which has zero/more arguments + Handles Namespaces by ignoring them + Prefix + Namespace URI + - + - The Expression is an Aggregate Function which has one/more arguments + Handles Triples by setting the Any flag and terminating parsing + Triple + - + - The Expression is a Set Operator where the first argument forms the LHS and all remaining arguments form a set on the RHS + Gets that this handler does not accept all triples since it stops as soon as it sees the first triple - + - The Expression is a Unary Operator that applies to a Graph Pattern + Abstract Base Class for Handlers - + - Gets the SPARQL Expression Type + Creates a new Handler - + - Gets the Function Name or Operator Symbol - function names may be URIs of Keywords or the empty string in the case of primary expressions + Creates a new Handler using the given Node Factory + Node Factory - + - Gets the Arguments of this Expression + Gets/Sets the in-use Node Factory - + - Interface for SPARQL Expression Terms that can be used in Expression Trees while evaluating Sparql Queries + Creates a Blank Node + - + - Evalutes a SPARQL Expression for the given binding in a given context + Creates a Blank Node with the given ID - Evaluation Context - Binding ID + Node ID - - - Newly introduced in Version 0.6.0 to replace the variety of functions that were used previously for numeric vs non-numeric versions to allow our code to be simplified and improve performance - - - + - Gets an enumeration of all the Variables used in an expression + Creates a Graph Literal Node + - + - Transforms the arguments of the expression using the given transformer + Creates a Graph Literal Node with the given sub-graph - Expression Transformer + Sub-graph - + - Gets whether an expression can safely be evaluated in parallel + Creates a Literal Node with the given Datatype + Value + Datatype URI + - + - Abstract base class for SPARQL Operators which also makes their configuration serializable + Creates a Literal Node + Value + - + - Gets the operator this implementation represents + Creates a Literal Node with the given Language + Value + Language + - + - Gets whether the operator can be applied to the given inputs + Creates a URI Node - Inputs - True if applicable to the given inputs + URI + - + - Applies the operator + Creates a Variable Node - Inputs + Variable Name - + - Serializes the configuration of the operator + Gets the next available Blank Node ID - Serialization Context + - + - Abstract base operator for date time operations + Abstract Base Class for RDF Handlers - + - Gets whether the arguments are applicable for this operator + Creates a new RDF Handler - Arguments - - + - Abstract base operator for time span operations + Creates a new RDF Handler using the given Node Factory + Node Factory - + - Gets whether the operator is applicable for the arguments + Starts the Handling of RDF - Arguments - - + - Represents the time span subtraction operator + Optionally used by derived Handlers to do additional actions on starting RDF handling - - Allows queries to subtract time spans from each other - - + - Gets the operator type + Ends the Handling of RDF + Whether the parsing completed without error - + - Applies the operator + Optionally used by derived Handlers to do additional actions on ending RDF handling - Arguments - + Whether the parsing completed without error - + - Represents the date time subtraction operation + Handles Namespace declarations - - Allows queries to subtract a duration from a date time - + Prefix + Namespace URI + - + - Gets the operator type + Optionally used by derived Handlers to do additional actions on handling namespace declarations + Prefix + Namespace URI + - + - Applies the operator + Handles Base URI declarations - Arguments + Base URI - + - Represents the date time addition operator + Optionally used by derived Handlers to do additional actions on handling Base URI declarations - - - Allows for queries to add durations to date times - - + Base URI + - + - Gets the operator type + Handles Triples + Triple + - + - Applies the operator + Must be overridden by derived handlers to take appropriate Triple handling action - Arguments + Triple - + - Represents the time span addition operation + Gets whether the Handler will accept all Triples i.e. it will never abort handling early - - Allows queries to add time spans together - - + - Gets the operator type + Abstract Base Class for SPARQL Results Handlers - + - Applies the operator + Creates a new SPARQL Results Handler - Arguments - + Node Factory - + - - Namespace which provides implementations of which allow for embedding date time arithmetic into SPARQL queries - + Creates a new SPARQL Results Handler - + - Interface which represents an operator in SPARQL e.g. + + Starts Results Handling - + - Gets the Operator this is an implementation of + Optionally used by derived classes to take additional actions on starting Results Handling - + - Gets whether the operator can be applied to the given inputs + Ends Results Handling - Inputs - True if applicable to the given inputs + Whether parsing completed without error - + - Applies the operator to the given inputs + Optionally used by derived classes to take additional actions on ending Results Handling - Inputs - - Thrown if an error occurs in applying the operator + Whether parsing completed without error - + - Represents the numeric addition operator + Handles a Boolean Results + Result - + - Gets the operator type + Must be overridden by derived handlers to appropriately handle boolean results + Result - + - Applies the operator + Handles a Variable declaration - Arguments + Variable Name - + - Abstract base class for numeric operators + Must be overridden by derived handlers to appropriately handle variable declarations + Variable Name + - + - Operator is applicable if at least one input and all inputs are numeric + Handlers SPARQL Results - Inputs + Result - + - Represents the numeric multiplication operator + Must be overridden by derived handlers to appropriately handler SPARQL Results + Result + - + - Gets the operator type + A RDF Handler which wraps another Handler allowing handling to be cancelled - + - Applies the operator + Creates a new Cancellable Handler - Arguments - + - + - Represents the numeric division operator + Gets the Inner Handler wrapped by this Handler - + - Gets the operator type + Starts RDF Handling on the inner Handler - + - Applies the operator + Ends RDF Handling on the inner Handler - Arguments - + Indicates whether parsing completed without error - + - Represents the numeric subtraction operator + Handles Base URIs by passing them to the inner handler and cancelling handling if it has been requested + Base URI + - + - Gets the operator type + Handles Namespace Declarations by passing them to the inner handler and cancelling handling if it has been requested + Namespace Prefix + Namespace URI + - + - Applies the operator + Handles Triples by passing them to the inner handler and cancelling handling if it has been requested - Arguments + Triple - + - - Namespace which provides implementations of which provide the default numeric implementations of operators as required by the SPARQL specification - + Gets that this Handler does not accept all Triples - + - Registry of SPARQL Operators + Informs the Handler that it should cancel handling at the next point possible assuming handling has not already completed - + - Initializes the Operators registry + A Handler which passes the RDF to be handled through a sequence of Handlers where Handling is terminated as soon as any Handler returns false + + + This differs from the MultiHandler in that as soon as any Handler indicates that handling should stop by returning false handling is immediately terminated. All Handlers will always have their StartRdf and EndRdf methods called + + - + - Registers a new operator + Creates a new Chained Handler - Operator + Inner Handlers to use - + - Removes the registration of an operator by instance reference + Gets the Inner Handlers used by this Handler - Operator Reference - + - Removes the registration of an operator by instance type of the operator + Starts the Handling of RDF for each inner handler - Operator - + - Resets Operator registry to default state + Ends the Handling of RDF for each inner handler + Whether parsing completed without errors - + - Returns whether the given operator is registered + Handles Base URIs by getting each inner handler to attempt to handle it - Operator + Base URI - Checking is done both by reference and instance type so you can check if an operator is registered even if you don't have the actual reference to the instance that registered + Handling terminates at the first Handler which indicates handling should stop - + - Gets all registered Operators + Handles Namespaces by getting each inner handler to attempt to handle it + Namespace Prefix + Namespace URI + + Handling terminates at the first Handler which indicates handling should stop + - + - Gets all registered operators for the given Operator Type + Handles Triples by getting each inner handler to attempt to handle it - Operator Type + Triple + + Handling terminates at the first Handler which indicates handling should stop + - + - Tries to return the operator which applies for the given inputs + Gets that this Handler accepts all Triples if all inner handlers do so - Operator Type - Operator - Inputs - - + - Possible SPARQL operand types + A RDF Handler which simply counts the Triples - + - Addition + Creates a Handler which counts Triples - + - Subtraction + Resets the current count to zero - + - Multiplication + Handles the Triple by incrementing the Triple count + Triple + - + - Division + Gets the Count of Triples handled in the most recent parsing operation + + Note that each time you reuse the handler the count is reset to 0 + - + - - Namespace which provides classes which represent the implementation of various operators in SPARQL. This allows for some of the basic operators like + and - to be extended to allow functionality beyond the SPARQL specification such as date time arithmetic. - + Gets that the Handler accepts all Triples - + - An Algebra Optimiser which implements the Filtered Product optimisation + A RDF Handler which asserts Triples into a Graph - - - A filtered product is implied by any query where there is a product over a join or within a BGP around which there is a Filter which contains variables from both sides of the product. So rather than computing the entire product and then applying the filter we want to push filter application into the product computation. - - - + - Optimises the Algebra to use implict joins where applicable + Creates a new Graph Handler - Algebra - + Graph - + - Returns that this optimiser is applicable to all queries + Gets the Base URI of the Graph currently being parsed into - Query - - + - Returns that this optimiser is applicable to all updates + Gets the Graph that this handler wraps - Updates - - + - An Algebra Optimiser which implements the Implicit Join optimisation + Starts Handling RDF ensuring that if the target Graph is non-empty RDF is handling into a temporary Graph until parsing completes successfully - - - An implict join is implied by a query like the following: - -
-            SELECT *
-            WHERE
-            {
-              ?x a ?type .
-              ?y a ?type .
-              FILTER (?x = ?y) .
-            }
-            
- - Such queries can be very expensive to calculate, the implict join optimisation attempts to substitute one variable for the other and use a BIND to ensure both variables are visible outside of the graph pattern affected i.e. the resulting query looks like the following: - -
-            SELECT *
-            WHERE
-            {
-              ?x a ?type .
-              ?x a ?type .
-              BIND (?x AS ?y)
-            }
-            
- - Under normal circumstances this optimisation is only used when the implict join is denoted by a SAMETERM expression or the optimiser is sure the variables don't represent literals (they never occur in the Object position) since when value equality is involved substituing one variable for another changes the semantics of the query and may lead to unexpected results. Since this optimisation may offer big performance benefits for some queries (at the cost of potentially incorrect results) this form of the optimisation is allowed when you set to true. - - - This optimiser is also capable of generating special algebra to deal with the case where there is an implicit join but the substitution based optimisation does not apply because variables cannot be substituted into the inner algebra, in this case a is generated instead. - -
- + - Optimises the Algebra to use implict joins where applicable + Ends Handling RDF discarding the handled Triples if parsing failed (indicated by false for the ok parameter) and otherwise merging the handled triples from the temporary graph into the target graph if necessary - Algebra - + Indicates whether parsing completed OK - + - Determines whether an expression is an Implicit Join Expression + Handles Namespace Declarations by adding them to the Graphs Namespace Map - Expression - LHS Variable - RHS Variable - Whether the expression is an equals (true) or a same term (false) + Namespace Prefix + Namespace URI - + - Returns that this optimiser is applicable to all queries + Handles Base URI Declarations by setting the Graphs Base URI - Query + Base URI - + - Returns that this optimiser is applicable to all updates + Handles Triples by asserting them in the Graph - Updates + - + - An optimizer that handles a special case for ORDER BY + DISTINCT combinations which can significantly improve performance by eliminating duplicates prior to sorting when the default SPARQL behaviour is to do a potentially costly sort over many duplicates and then eliminate distincts. + Gets that this Handler accepts all Triples - - Only applies to queries which meet the following criteria: -
    -
  • Has an ORDER BY and a DISTNCT on the same level of the query
  • -
  • Selects a fixed list of variables i.e. not a SELECT DISTINCT *
  • -
  • All variables used in the ORDER BY expressions also occur in the project list
  • -
-
- + - Optimizes the given algebra + A RDF Handler that rewrites the Graph URIs of Triples before passing them to an inner handler - Algebra - Optimized algebra - + - Returns true if the query is a SELECT DISTINCT or SELECT REDUCED and has an ORDER BY + Creates a new Graph URI rewriting handler - Query - + Handler to wrap + Graph URI to rewrite to - + - Returns that this is not applicable to updates + Gets the Inner Handler - Update commands - - + - An algebra optimiser that looks for property functions specified by simple triple patterns in BGPs and replaces them with actual property function patterns + Starts handling of RDF - + - Optimises the algebra to include property functions + Ends handling of RDF - Algebra - + Whether parsing completed OK - + - Returns that the optimiser is applicable + Handles a Base URI declaration - Query + Base URI - + - Returns that the optimiser is applicable + Handles a Namespace declaration - Update Commands + Namespace Prefix + Namespace URI - - - An optimiser for walking algebra and expression trees and replacing a Variable with another Variable or a Constant - - - - - Create a transform that replaces one variable with another - - Find Variable - Replace Variable - - + - Create a transform that replaces a variable with a constant + Handles a Triple by rewriting the Graph URI and passing it to the inner handler - Find Variable - Replace Constant + Triple + - + - Gets/Sets whethe the Transformer is allowed to replace objects + Returns true since this handler accepts all triples - - - The transformer will intelligently select this depending on whether it is replacing with a constant (defaults to true) or a variable (defaults to false), when replacing a variable the behaviour changes automatically. If you set it explicitly the transformer will respect your setting regardless. - - - + - Attempts to do variable substitution within the given algebra + Static Class of extension methods for use with Handler classes - Algebra - - + - Returns false because this optimiser is never globally applicable + Gets the Base URI from the RDF Handler - Query + RDF Handler - + - Returns false because this optimiser is never globally applicable + Applies the triples of a Graph to an RDF Handler - Update Commands - + RDF Handler + Graph - + - Tries to substitute variables within primary expressions + Applies the triples to an RDF Handler - Expression - + RDF Handler + Triples - + - - Namespace containing classes that are used in the Optimisation of SPARQL Queries. Includes the interfaces IQueryOptimiser and IAlgebraOptimiser which can be used to implement custom query optimisation. - + Applies the result set to a Results Handler + Results Handler + Result Set - + - Abstract Base Class for Algebra Transformers where the Transformer may care about the depth of the Algebra in the Algebra Tree + A Results Handler which extracts URIs from one/more variables in a Result Set - + - Attempts to optimise an Algebra to another more optimal form + Creates a new List URIs Handler - Algebra - + Variable to build the list from - + - Transforms the Algebra to another form tracking the depth in the Algebra tree + Creates a new List URIs Handler - Algebra - Depth - + Variables to build the list from - + - Determines whether the Optimiser can be applied to a given Query + Gets the URIs - Query - - + - Determines whether the Optimiser can be applied to a given Update Command Set + Starts handling results - Command Set - - + - A basic abstract implementation of a Query Optimiser + Handles boolean results - - - Derived implementations may use override the virtual properties to control what forms of optimisation are used. Derived implementations must override the GetRankingComparer() method, optimisers which do not wish to change the order of Triple Patterns should return the NoReorderCompaper in their implementation as a basic sort of Triple Patterns is done even if ShouldReorder is overridden to return false - - + Result - + - Causes the Graph Pattern to be optimised if it isn't already + Handles variable declarations - Graph Pattern - Variables that have occurred prior to this Pattern + Variable + - + - Gets a comparer on Triple Patterns that is used to rank Triple Patterns + Handles results by extracting any URI values from the relevant variables + Result - - By overriding this in derived classes you can change how the Optimiser weights different patterns and thus the resultant ordering of Triple Patterns - - + - Controls whether the Optimiser will attempt to reorder Triple Patterns + A Results Handler which extracts Literals from one/more variables in a Result Set - - It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. - - + - Controls whether the Optimiser will split Filters + Creates a new List Strings handler - - - If a Filter contains an and expression it may be split into its constituent parts and applied separately. This option only applies if filter placement also applies. - - - Defaults to false since it is unclear if this actually benefits performance - - + Variable to build the list from - + - Controls whether the Optimiser will place Filters + Creates a new List Strings handler - - It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. - + Variables to build the list from - + - Controls whether the Optimiser will place Assignments + Gets the Strings - - It is recommended that derived classes do not change this setting as this may hurt performance. If you want to control the optimisation process in detail we suggest you implement IQueryOptimiser directly in your own class and not derive from this implementation. - - + - Tries to reorder patterns when the initial ordering is considered poor + Starts handling results - Graph Pattern - Variables that are desired - Point at which to start looking for better matches - Point at which to move the better match to - + - Tries to place filters at the earliest point possible i.e. the first point after which all required variables have occurred + Handles boolean results - Graph Pattern - Filter to place - + Result - + - Tries to place assignments at the earliest point possible i.e. the first point after which all required variables have occurred + Handles variable declarations - Graph Pattern - Assignment (LET/BIND) + Variable - + - An Algebra Optimiser that optimises Algebra to use LazyBgp's wherever possible + Handles results by extracting strings from relevant variables + Result + - + - Optimises an Algebra to a form that uses LazyBgp where possible + A Handler which passes the RDF to be handled to multiple Handlers where Handling terminates in the handling request where one of the Handlers returns false - Algebra - Depth - - By transforming a query to use LazyBgp we can achieve much more efficient processing of some forms of queries + This differs from ChainedHandler in that even if one Handler indicates that handling should stop by returning false all the Handlers still have a chance to handle the Base URI/Namespace/Triple before handling is terminated. All Handlers will always have their StartRdf and EndRdf methods called - + - Determines whether the query can be optimised for lazy evaluation + Creates a new Multi Handler - Query - + Inner Handlers for this Handler - + - Returns that the optimiser does not apply to SPARQL Updates + Creates a new Multi Handler with a known Node Factory - Updates - + Inner Handlers for this Handler + Node Factory to use for this Handler - + - An Algebra Optimiser that optimises Algebra to use AskBgp's wherever possible + Gets the Inner Handlers used by this Handler - + - Optimises an Algebra to a form that uses AskBgp where possible + Starts RDF Handling by starting handling on all inner handlers - Algebra - Depth + + + + Ends RDF Handling by ending handling on all inner handlers + + Whether parsing completed without error + + + + Handles Base URIs by getting all inner handlers to handle the Base URI + + Base URI - - By transforming a query to use AskBgp we can achieve much more efficient processing of some forms of queries - + Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first - + - Determines whether the query can be optimised for ASK evaluation + Handles Namespace Declarations by getting all inner handlers to handle it - Query + Namespace Prefix + Namespace URI + + Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first + - + - Returns that the optimiser does not apply to SPARQL Updates + Handles Triples by getting all inner handlers to handler it - Updates + Triple + + Handling ends if any of the Handlers indicates it should stop but all Handlers are given the chance to finish the current handling action first + - + - An Algebra Optimiser is a class that can transform a SPARQL algebra from one form to another typically for optimisation purposes + Gets whether this Handler accepts all Triples based on whether all inner handlers do so - + - Optimises the given Algebra + A SPARQL Results Handler which loads directly into a Multiset - Algebra to optimise - - Important: An Algebra Optimiser must guarantee to return an equivalent algebra to the given algebra. In the event of any error the optimiser should still return a valid algebra (or at least the original algebra) + Primarily intended for internal usage for future optimisation of some SPARQL evaluation - + - Determines whether an Optimiser is applicable based on the Query whose Algebra is being optimised + Creates a new Multiset Handler - SPARQL Query - + Multiset - + - Determines whether an Optimiser is applicable based on the Update Command Set being optimised + Handles a Boolean Result by doing nothing - Update Command Set - + Boolean Result - + - An Algebra Optimiser which implements the Identity Filter optimisation + Handles a Variable by adding it to the Multiset + Variable + - + - Optimises the Algebra to use Identity Filters where applicable + Handles a Result by adding it to the Multiset - Algebra + Result - + - Determines whether an expression is an Identity Expression + A RDF Handler that ignores everything it handles - Expression - Variable - Term - Whether it is an equals expression (true) or a same term expression (false) - + + Useful if you simply want to parse some RDF to see if it parses and don't care about the actual data being parsed + - + - Returns that this optimiser is applicable to all queries + Creates a new Null Handler - Query - - + - Returns that this optimiser is applicable to all updates + Handles a Triple by doing nothing - Updates + Triple - + - A Query Optimiser is a class which optimises Graph Patterns in a Query by reordering Graph Patterns + Indicates that the Handler accepts all Triples - + - Attempts to optimise the given Graph Pattern + A RDF Handler which wraps another handler passing only the chunk of triples falling within a given limit and offset to the underlying Handler - Graph Pattern - Variables that are present prior to the pattern + + This handler does not guarantee that you will receive exactly the chunk specified by the limit and offset for two reasons: +
    +
  1. It does not perform any sort of data de-duplication so it is possible that if this handler receives duplicate triples and the underlying handler performs de-duplication then you may see less triples than you expect in your final output since although the underlying handler will receive at most the specified chunk size of triples it may not retain them all
  2. +
  3. If there are fewer triples than the chunk size or if the chunk exceeds the bounds of the data then you will only receive the triples that fall within the chunk (if any)
  4. +
+
- + - An Algebra Optimiser which looks for unions and joins that can be evaluated in parallel to improve query evaluation speed in some cases + Creates a new Paging Handler + Inner Handler to use + Limit + Offset - - Using this feature allows you to use experimental parallel SPARQL evaluation optimisations which may improve query evaluation speed for some queries. A query must either use UNION or have joins which are disjoint in order for any parallel evaluation to take place. - - - Users should be aware that using this optimiser may actually increase evaluation speed in some cases e.g. where either side of a disjoint join will return empty especially when it is the left hand side that will do so. - - - Also note that while use of this optimiser should not cause queries to return incorrect results as it does not change the semantics of the evaluation as it only parallelises independent operators we cannot guarantee that all parallelised queries will return identical results to their non-parallelised counterparts. If you find a query that you believe is giving incorrect results when used with this optimiser please test without the optimiser enabled to check that the apparent incorrect result is not an artifact of this optimisation. - + If you just want to use an offset and not apply a limit then set limit to be less than zero - + - Optimises the algebra to use parallelised variants of Join and Union where possible + Creates a new Paging Handler - Algebra - + Inner Handler to use + Limit - + - Returns that the optimser is applicable to all queries + Gets the Inner Handler wrapped by this Handler - Query - - + - Returns that the optimiser is not applicable to updates + Starts RDF Handler - Updates - - + - Default SPARQL Query Optimiser + Ends RDF Handler + Indicated whether parsing completed without error - + - Gets the Default Comparer for Triple Patterns to rank them + Handles a Triple by passing it to the Inner Handler only if the Offset has been passed and the Limit has yet to be reached + Triple + + Terminates handling immediately upon the reaching of the limit + - + - SPARQL Query Optimiser which does no reordering + Handles Namespace Declarations by allowing the inner handler to handle it + Namespace Prefix + Namespace URI + - + - Gets that Triple Patterns should not be reordered + Handles Base URI Declarations by allowing the inner handler to handle it + Base URI - + - Gets a comparer which does not change the order of Triple Patterns + Gets whether the Handler will accept all Triples based on its Limit setting - - + - A Comparer which ranks all Triple Patterns as equal + A SPARQL Results Handler which just counts Results + + Note: For a Boolean Result Set the counter will either be 1 for true or 0 for false + - + - Compares two Triple Patterns are always returns that they are ranking equal + Creates a new Result Count Handler - First Triple Pattern - Second Triple Pattern - - + - Static Helper class which provides global registry of Algebra Optimisers and the global Query Optimiser + Starts Results Handling and resets the counter to zero - + - Namespace URI for the Optimiser Statistics vocabulary + Handles a Boolean Result + Result - + - Gets/Sets the global Query Optimiser that is used by default + Handles a Variable Declaration - - - Note: May be overridden by the Optimiser setting on a SparqlQueryParser - - - Unlike previous releases a Query may be reoptimised using a different optimiser if desired by calling the Optimise() method again and providing a different Optimiser. This may not always fully reoptimise the query since the first optimisation will have caused any Filters and Assignments to be placed in the Triple Pattern - - - Warning: Setting this to null has no effect, to disable automatic optimisation use the global property Options.QueryOptimisation. Even with this option disabled a Query can still be optimised manually by calling its Optimise() method. - - + Variable Name + - + - Gets the global Algebra Optimisers that are in use + Handles a SPARQL Result by incrementing the counter - - - Unlike Query Optimisation multiple Algebra Optimisations may apply. Algebra optimisers may also be specified and apply locally by the use of the relevant properties on the SparqlQueryParser and SparqlQuery classes. Those specified on a parser will automatically be passed through to all queries parsed by the parser. Locally specified optimisers apply prior to globally specified ones. - - + Result + - + - Adds a new Algebra Optimiser + Gets the Count of Results - Optimiser + + For Boolean Results counter will be either 1 or 0 depending on whether the result was True/False + - + - Removes an Algebra Optimiser + A SPARQL Results Handler which loads Results into a SparqlResultSet - - + - Resets Optimisers to default settings + Creates a new Result Set Handler + Result Set - + - The Strict Algebra Optimiser is an optimiser that takes our BGPs which typically contain placed FILTERs and BINDs and transforms them into their strict algebra form using Filter() and Extend() + Starts Results Handling - + - Optimises BGPs in the Algebra to use Filter() and Extend() rather than the embedded FILTER and BIND + Handles a Boolean Result by setting the Result property of the Result Set - Algebra to optimise - + Result - + - Returns that the optimiser is applicable to all queries + Handles a Variable Declaration by adding the Variable to the Result Set - Query + Variable Name - + - Returns that the optimiser is applicable to all updates + Handles a Result by adding it to the Result Set - Updates + Result - - - Abstract implementation of an algebra optimiser and expression transformer which optimises the algebra to replace any Node terms with Virtual Node terms for more efficient querying of virtualised RDF data - - Node ID Type - Graph ID Type - - + - Virtual RDF Provider + A SPARQL Results Handler which allows you to load multiple Result Sets into a single SparqlResultSet which the standard ResultSetHandler does not permit - + - Creates a new Virtual Algebra Optimiser + Creates a new Merging Result Set Handler - Virtual RDF Provider + Result Set - + - Optimises the algebra so that all Node terms are virtualised + Overrides the base classes logic to avoid the empty check on the Result Set thus allowing multiple result sets to be merged - Algebra - - + - Transforms an expression so Node terms are virtualised + A RDF Handler which simply counts the Triples and Graphs - Expression - - + - Substitutes a primary expression which is a Node term for a virtual Node term + Creates a new Store Count Handler - Expression - - + - Creates a virtual Node based on a given Value + Starts RDF Handling by reseting the counters - Node ID - Value - - + - Returns that the optimiser is applicable to all queries + Handles Triples/Quads by counting the Triples and distinct Graph URIs - Query + Triple - + - Returns that the optimiser is applicable to all updates + Gets the count of Triples - Updates - - + - A concrete implementation of a Virtual Algebra Optimiser where the virtual IDs are simply integers + Gets the count of distinct Graph URIs - + - Creates a new Simple Virtual Algebra Optimiser + Gets that this Handler accepts all Triples - Virtual RDF provider - + - Creates a new Virtual Node using the virtual RDF provider this optimiser was instantiated with + A RDF Handler that loads Quads into a ITripleStore instance - Virtual Node ID - Node Value - - + - The Weighted Optimiser is a Query Optimiser that orders Triple Patterns based on weighting computed calculated against + Creates a new Store Handler + Triple Store - + - Default Weight for Subject Terms + Gets the Triple Store that this Handler is populating - + - Default Weight for Predicate Terms + Handles namespaces by adding them to each graph + Namespace Prefix + Namespace URI + - + - Default Weight for Object Terms + Handles Triples by asserting them into the appropriate Graph creating the Graph if necessary + Triple + - + - Default Weight for Variables + Starts handling RDF - + - Creates a new Weighted Optimiser + Ends RDF handling and propogates all discovered namespaces to all discovered graphs + Whether parsing completed successfully - + - Creates a new Weighted Optimiser which reads weights from the given RDF Graph + Gets that the Handler accepts all Triples - Graph - + - Creates a new Weighted Optimiser which reads weights from the given RDF Graph + An RDF Handler which wraps another handler, stripping explicit xsd:string datatypes on object literals - Graph - Default Subject Weight - Default Predicate Weight - Default Object Weight - + - Gets the comparer used to order the Triple Patterns based on their computed weightings + Creates a new StripStringHandler - + Inner handler to use - + - Represents Weightings for the WeightedOptimiser + Handles triples by stripping explicit xsd:string datatype on object literals before delegating to inner handler - + - Class for representing Graph Patterns in Sparql Queries + Gets the handler wrapped by this handler - + - Creates a new Graph Pattern + Starts inner handler - + - Creates a new Graph Pattern copied from an existing Graph Pattern + Ends inner handler - Graph Pattern - + - Adds a Triple Pattern to the Graph Pattern respecting any BGP breaks + Delegates base Uri handling to inner handler - Triple Pattern - + - Adds an Assignment to the Graph Pattern respecting any BGP breaks + Delegates namespace handling to inner handler - Assignment Pattern - + - Adds a Filter to the Graph Pattern + Gets whether inner handler accepts all triples - Filter - + - Resets the set of Unplaced Filters to be a new set of + A decorator for handlers which ensures that all blank nodes get unique IDs even if a blank node identifier is reused - Filters + + + In most parsing scenarios this handler is not suitable for usage as it may unintentionally modify the RDF data being parsed, in non-parsing scenarios where this handler is instead being used as a means to generate RDF data from some non-RDF source it may prove very useful. + + + This handler essentially works by redirecting all calls to the argument taking form of with the non-argument form which should always generate a new blank node thus guaranteeing the uniqueness of nodes. + + - + - Adds a child Graph Pattern to the Graph Pattern respecting any BGP breaks + Creates a new Unique Blank Nodes handler - Graph Pattern + - + - Adds inline data to a Graph Pattern respecting any BGP breaks + Gets the inner handler - - + - Tells the Graph Pattern that any subsequent Graph/Triple Patterns added go in a new BGP + Creates a Blank Node + Node ID which will be ignored by this Handler + - + - Swaps the position of the two given Triple Patterns + Starts handling RDF - First Position - Second Position - - Intended for use by Query Optimisers - - + - Inserts a Filter at a given position + Ends handling RDF - Filter - Position to insert at - - Intended for use by Query Optimisers - + Whether parsing completed OK - + - Inserts an Assignment at a given position + Handles a Base URI declaration - Assignment - Position to insert at - - Intended for use by Query Optimisers - + Base URI + - + - Gets/Sets whether the Graph Pattern is Optional + Handles a Namespace declaration + Prefix + Namespace URI + - + - Gets/Sets whether the Graph Pattern is Filtered + Handles a Triple + Triple + - + - Gets/Sets whether the Graph Pattern is a Union of its Child Graph Patterns + Gets whether the inner handler accepts all - + - Gets/Sets whether the Graph Pattern operates on a specific Graph + A RDF Handler which writes the handled Triples out to a TextWriter using a provided ITripleFormatter - + - Gets whether this is an empty Graph Pattern + Creates a new Write-Through Handler + Triple Formatter to use + Text Writer to write to + Whether to close the writer at the end of RDF handling - + - Gets/Sets whether the Graph Pattern is an EXISTS clause + Creates a new Write-Through Handler + Triple Formatter to use + Text Writer to write to - + - Gets/Sets whether the Graph Pattern is a NOT EXISTS clause + Creates a new Write-Through Handler + Type of the formatter to create + Text Writer to write to + Whether to close the writer at the end of RDF handling - + - Gets/Sets whether the Graph Pattern is a MINUS clause + Creates a new Write-Through Handler + Type of the formatter to create + Text Writer to write to - + - Gets/Sets whether the Graph Pattern is a SERVICE clause + Starts RDF Handling instantiating a Triple Formatter if necessary - + - Gets whether Optimisation has been applied to this query + Ends RDF Handling closing the TextWriter being used if the setting is enabled - - This only indicates that an Optimiser has been applied to the Pattern. You can always reoptimise by calling the Optimise() method with an optimiser of your choice on the query to which this Pattern belongs - + Indicates whether parsing completed without error - + - Gets whether Evaluation Errors in this Graph Pattern are suppressed (currently only valid with SERVICE) + Handles Namespace Declarations passing them to the underlying formatter if applicable + Namespace Prefix + Namespace URI + - + - Gets whether this Graph Pattern contains an Inline Data block (VALUES clause) + Handles Base URI Declarations passing them to the underlying formatter if applicable + Base URI + - + - Determines whether the Graph Pattern has any kind of Modifier (GRAPH, MINUS, OPTIONAL etc) applied + Handles Triples by writing them using the underlying formatter + Triple + - + - Gets/Sets the FILTER that applies to this Graph Pattern + Gets that the Handler accepts all Triples - + - Gets/Sets the Graph Specifier that applies to this Graph Pattern + A Results Handler which writes the handled Results out to a TextWriter using a provided IResultFormatter - - This property is also used internally for SERVICE specifiers to save adding an additional property unnecessarily - - + - Checks whether this Pattern has any Child Graph Patterns + Creates a new Write-Through Handler + Triple Formatter to use + Text Writer to write to + Whether to close the writer at the end of RDF handling - + - Gets the Last Child Graph Pattern of this Pattern and removes it from this Pattern + Creates a new Write-Through Handler + Triple Formatter to use + Text Writer to write to - + - Gets the Child Graph Patterns of this Pattern + Creates a new Write-Through Handler + Type of the formatter to create + Text Writer to write to + Whether to close the writer at the end of RDF handling - + - Gets the Triple Patterns in this Pattern + Creates a new Write-Through Handler + Type of the formatter to create + Text Writer to write to - + - Gets whether this Pattern can be simplified + Starts writing results - + - Gets whether this Graph Pattern is a Sub-query which can be simplified + Ends the writing of results closing the TextWriter depending on the option set when this instance was instantiated + - + - Gets whether the Graph Pattern uses the Default Dataset + Writes a Boolean Result to the output - - Graph Patterns generally use the Default Dataset unless they are a GRAPH pattern or they contain a Triple Pattern, child Graph Pattern or a FILTER/BIND which does not use the default dataset - + Boolean Result - + - Gets the enumeration of Filters that apply to this Graph Pattern which will have yet to be placed within the Graph Pattern + Writes a Variable declaration to the output + Variable Name + - + - Gets the enumeration of LET assignments that are in this Graph Pattern which will be placed appropriately later + Writes a Result to the output + SPARQL Result + - + - Gets the Variables used in the Pattern + A RDF Handler which writes the Triples being parsed directly to a IStorageProvider in batches provided the manager supports the UpdateGraph() method - + - Gets the inline data (VALUES block if any) + Default Batch Size for writes - + - Optimises the Graph Pattern using the current global optimiser + Creates a new Write to Store Handler + Manager to write to + Graph URI to write Triples from the default graph to + Batch Size - + - Optimises the Graph Pattern using the given optimiser + Creates a new Write to Store Handler - Query Optimiser - - - Important: If a Pattern has already been optimized then calling this again is a no-op. - - - For finer grained control of what gets optimized you can use to disable automatic optimisation and then manually call this method as necessary - - + Manager to write to + Graph URI to write Triples from the default graph to - + - Optimises the Graph Pattern using the given optimiser and with the given variables + Creates a new Write to Store Handler - Query Optimiser - Variables - - - Important: If a Pattern has already been optimized then calling this again is a no-op. - - - For finer grained control of what gets optimized you can use to disable automatic optimisation and then manually call this method as necessary - - - The vars parameter contains Variables mentioned in the parent Graph Pattern (if any) that can be used to guide optimisation of child graph patterns - - + Manager to write to + Batch Size - + - Gets the String representation of the Graph Pattern + Creates a new Write to Store Handler - + Manager to write to - + - Gets the Algebra representation of the Graph Pattern + Starts RDF Handling by ensuring the queue of Triples to write is empty - - + - Class for representing property function patterns in SPARQL Query + Ends RDF Handling by ensuring the queue of Triples to write has been processed + Indicates whether parsing completed without error - + - Creates a new Property Function pattern + Handles Triples by queuing them for writing and enacting the writing if the Batch Size has been reached/exceeded - Function information - Property Function + Triple + - + - Creates a new Property Function pattern + Gets that the Handler accepts all Triples - Original Triple Patterns - Subject Arguments - Object Arguments - Property Function - + - Gets the Pattern Type + + Namespace for Token classes which are used to support Token Based parsing of RDF syntaxes + - + - Gets the Subject arguments + Base Implementation of IToken used by all derived tokens for ease of implementation - + - Gets the Object arguments + Variables for representing the Type and Position of the Token - + - Gets the original triple patterns + Variables for representing the Type and Position of the Token - + - Gets the property function + Variables for representing the Type and Position of the Token - + - Returns the empty enumerable as cannot guarantee any variables are bound + Variables for representing the Type and Position of the Token - + - Returns all variables mentioned in the property function as we can't guarantee they are bound + Variables for representing the Type and Position of the Token - + - Evaluates the property function + Variable containg the value of the Token - Evaluation Context - + - Returns false because property functions are not accept-alls + Creates a Token and fills in its Values + Integer denoting the Tokens Type + String value that the Token represents (if any) + Line at which the Token starts + Line at which the Token ends + Column at which the Token starts + Column at which the Token ends + All the derived classes use this Constructor to fill in the basic values of a Token - + - Returns true if none of the + Gets an arbitrary integer which indicates the Type of the Token - + - Compares a property function pattern to another + Gets the String Value which this Token represents (if any) - Pattern - - + - Compares a property function pattern to another + Gets the Line at which this Token Starts - Pattern - - + - Gets the string representation of the pattern + Gets the Line at which this Token Ends - - + - Class for representing Node Patterns in Sparql Queries + Gets the Column at which this Token Starts - + - Binding Context for Pattern Item + Gets the Column at which this Token Ends - + - Checks whether the Pattern Item accepts the given Node in the given Context + Gets the Length of the Tokens Value - Evaluation Context - Node to test - - + - Constructs a Node based on this Pattern for the given Set + Gets a String representation of the Token Type and Value - Construct Context - + - Sets the Binding Context for the Pattern Item + Gets a Hash Code for a Token + - + - Gets/Sets whether rigorous evaluation is used, note that this setting may be overridden by the global option + Possible Escape Handling Modes for the Tokeniser - + - Gets the String representation of the Pattern + Escaping for URIs (only \u and \U escapes are valid) - - + - Gets the Variable Name if this is a Variable Pattern or null otherwise + Permissive escaping for URIs (only \" is invalid) - + - Gets/Sets whether the Variable is repeated in the Pattern + Escaping for Quoted Literals (every escape but \< and \' is valid) - + - Class for representing Triple Patterns in SPARQL Queries + Escaping for single Quoted Literals (every escape but \< and \" is valid) - + - Creates a new Triple Pattern + Escaping for Quoted Literals (every escape but \< is valid), this differs from and in that it allows both \' and \" - Subject Pattern - Predicate Pattern - Object Pattern - + - Gets whether a given Triple is accepted by this Pattern in the given Context + Escaping for QNames (only Unicode espaces are valid) - Evaluation Context - Triple to test - - + - Gets the pattern type + Abstract Base Class for Tokeniser which handles the Position tracking - + - Gets the Index Type we will use for this Pattern + Constructor for the BaseTokeniser which takes in a TextReader that the Tokeniser will generate Tokens from + TextReader to generator Tokens from - + - Subject Pattern + Gets/Sets the Format that this Tokeniser is used for + The value set here will replace any instances of {0} specified in inputs to the Error() function allowing messages regarding certain syntaxes not being valid in a given format to be provided - + - Predicate Pattern + Gets the Next available Token from the Input + + Parser Exception if a valid Token cannot be retrieved - + - Object Pattern + Informs the Helper that you wish to start reading a new Token - + - Returns all variables mentioned as a match guarantees all variables are bound + Peeks at the next Character + - + - Returns an empty enumeration as a match guarantees all variables are bound + Allows you to Backtrack one character (and no more) - + - Returns whether the Triple Pattern is an accept all + Gets the value of the Output Buffer - - True if all three Pattern Items are VariablePattern and all the Variables names are distinct - - + - Evaluates a Triple Pattern in the given Evaluation Context + Gets the current length of the Output Buffer - Evaluation Context - + - Gets the Enumeration of Triples that should be assessed for matching the pattern + Gets the Current Line in the Input Stream - Evaluation Context - - + - Takes an enumerable and extracts Triples which match this pattern as results + Gets the Current Position in the Input Stream - SPARQL Evaluation Context - Enumerable of Triples - + - Generates a Result Set for a Triple that matches the Pattern + Gets the Start Line in the Input Stream of the current Token - Triple - - + - Constructs a Triple from a Set based on this Triple Pattern + Gets the Start Position in the Input Stream of the current Token - Construct Context - - + - Gets whether the Pattern contains no Variables of any kind + Gets the End Line in the Input Stream of the current Token - + - Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) + Gets the End Position in the Input Stream of the current Token - + - Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) + Gets/Sets the Last Token Type - + - Compares a triple pattern to another + Gets whether the Tokeniser has backtracked - Pattern - - + - Compares a triple pattern to another + Consumes a single Character into the Output Buffer and increments the Position Counters - Pattern - + Thrown if the caller tries to read beyond the end of the Stream - + - Gets the String representation of this Pattern + Consumes a single Character into the Output Buffer and increments the Position Counters - + Whether EOF is allowed + True if the EOF is reached + + If is set to false then the normal behaviour is used and an error will be thrown on end of file + - + - Possible Types of Triple Pattern + Consumes a New Line (which may be a single \n or \r or the two characters following each other) + Whether the New Line should be added to the Output Buffer - + - Simple pattern matching + Consumes a New Line (which may be a single \n or \r or the two characters following each other) + Whether the New Line should be added to the Output Buffer + Whether EOF is permitted instead of a New Line - + - FILTER application + Skips a single Character and increments the Position Counters + Use when you are reading characters into some local buffer and not the global token buffer, used in String escaping + Thrown if the caller tries to read beyond the end of the Stream - + - BIND assignment + Helper function which discards White Space which the Tokeniser doesn't care about and increments position counters correctly - + - LET assignment + Handles the standard escapes supported in all the UTF-8 based RDF serializations - + - Sub-query + Handles the complex escapes that can occur in a local name + + Unlike HandleEscapes() this only unescapes unicode escapes, other escapes are simply validated and passed through for later unescaping + - + - Property Path + Determines whether a given Character can be valid as a Hex Digit + Character to test + - + - Property Function + Helper Function for generating Standardised Parser Errors + The Error Message + - + - Comparer for Triple Pattern Types + Helper Function for generating Standardised Parser Errors about unexpected characters + Unexpected Character + Message detailing what was expected (may be empty if no explicit expectation) + - + - Compares two triple pattern types + Helper Function for generating Standardised Parser Errors about unexpected end of input - Pattern Type - Pattern Type + Message detailing what was expected (may be empty if no explicit expectation) - + - Namespace for Pattern Classes that are used in the Graph and Triple matching process for executing SPARQL queries on IInMemoryQueryableStore objects + Helper Function for generating Standardised Parser Errors about unexpected new lines + Message detailing what was expected (may be empty if no explicit expectation) + - + - Class for representing BIND assignments in SPARQL Queries + Helper Function for generating Standardised Parser Errors about unexpected tokens + Message detailing what was expected (may be empty if no explicity expectation) + Token that was parsed + - + - Creates a new BIND Pattern + Basic Token Queue which provides no Buffering except in the sense that it queues all possible Tokens when the InitialiseBuffer method is called - Variable to assign to - Expression which generates a value which will be assigned to the variable - + - Evaluates a BIND assignment in the given Evaluation Context + Internal Queue object which this class is a wrapper around - Evaluation Context - + - Gets the Pattern Type + Creates a new Token Queue - + - Returns that this is not an accept all since it is a BIND assignment + Creates a new Token Queue with the given Tokeniser + Tokeniser - + - Gets the Expression that is used to generate values to be assigned + Removes and returns the first Token from the Queue + First Token in the Queue - + - Gets the Name of the Variable to which values will be assigned + Adds a Token to the end of the Queue + Token to add - + - Returns an empty enumeration as any evaluation error will result in an unbound value so we can't guarantee any variables are bound + Gets the first Token from the Queue without removing it + First Token in the Queue - + - Returns the variable being assigned to as any evaluation error will result in an unbound value so we can't guarantee it is bound + Empties the Token Queue - + - Gets whether the Pattern uses the Default Dataset + Gets the number of Tokens in the Queue - + - Returns true as a BIND can never contain a Blank Variable + Initialises the Token Queue Buffer - + - Gets the string representation of the LET assignment + Initialises the Token Queue Buffer to the set Buffer Amount - + Amount of Tokens to Buffer - + - Compares this Bind to another Bind + Gets the underlying Queue of Tokens - Bind to compare to - Just calls the base compare method since that implements all the logic we need - + - Compares this Bind to another Bind + Internal Helper Method for Tokeniser Tracing - Bind to compare to - Just calls the base compare method since that implements all the logic we need + - + - Pattern which matches temporary variables + Token Queue which is not backed by a Tokeniser + + Designed to be explicitly populated with Tokens for when a Parser needs to be invoked on a subset of the overall Tokens + - + - Creates a new Pattern representing a Blank Node + Creates a new non-Tokenised Queue - Blank Node ID - + - Creates a new Pattern representing a Blank Node + Removed and returns the first Token from the Queue - Blank Node ID - Whether to force rigorous evaluation + - + - Gets the Blank Node ID + Gets the first Token from the Queue without removing it + First Token in the Queue - + - Checks whether the given Node is a valid value for the Temporary Variable + Initialises the Buffer by doing nothing since there is no buffering on this Queue - Evaluation Context - Node to test - - + - Constructs a Node based on the given Set + A Buffered Queue for a Tokeniser which synchronously buffers a number of Tokens when the Queue is accessed and nothing is Buffered - Construct Context - - + - Gets the String representation of this Pattern + Variable storing the Buffer Size - - + - Gets the Temporary Variable Name of this Pattern + Creates a new Buffered Queue for the given Tokeniser + Tokeniser to Buffer - + - Pattern which matches the Blank Node with the given Internal ID regardless of the Graph the nodes come from + Creates a new Buffered Queue - + - Creates a new Fixed Blank Node Pattern + Gets the next Token in the Queue and removes it from the Queue - ID + Token at the front of the Queue - + - Gets the Blank Node ID + Gets the next Token in the Queue without removing it from the Queue + Token at the front of the Queue - + - Checks whether the pattern accepts the given Node + Causes the Buffer to be filled using the Default Buffering level of 10 - SPARQL Evaluation Context - Node to test - - + - Returns a Blank Node with a fixed ID scoped to whichever graph is provided + Causes the Buffer to be filled and sets the Buffering level for the Queue - Construct Context + Number of Tokens to Buffer + If a Buffer amount of less than zero is given then Buffer size will stay at default size (10) or existing size if it's previously been set - + - Gets the String representation of the Pattern Item + Internal Helper Method which performs the Buffering - - + - Pattern which matches specific Nodes + An Asynchronous version of BufferedTokenQueue which automatically Buffers as many Tokens as possible in a Background thread + + Periodic instablility is fixed to the best of my knowledge, it is still recommended to use a BufferedTokenQueue or the basic TokenQueue. This implementation offers little/no performance improvement over the other types of Token Queue. + - + - Creates a new Node Match Pattern + Creates a new Asynchronous Buffered Queue with the given Tokeniser - Exact Node to match + Tokeniser to Buffer - + - Creates a new Node Match Pattern + Creates a new Asynchronous Buffered Queue - Exact Node to match - Whether to force rigorous evaluation regardless of the global setting - + - Checks whether the given Node matches the Node this pattern was instantiated with + Gets the next Token in the Queue and removes it from the Queue - Evaluation Context - Node to test - + Token at the front of the Queue - + - Constructs a Node based on the given Set + Gets the next Token in the Queue without removing it from the Queue - Construct Context + Token at the front of the Queue - + - Gets a String representation of the Node + Internal Helper Method which starts the Background Buffering if not already running - - + - Gets the Node that this Pattern matches + Internal Thread Method which does the Background Buffering - + - Pattern which matches Variables + Tokeniser for tokenising CSV inputs - + - Creates a new Variable Pattern + Creates a new CSV Tokeniser - Variable name + Text Reader - + - Creates a new Variable Pattern + Creates a new CSV Tokeniser - Variable name - Whether to force rigorous evaluation + Stream Reader - + - Checks whether the given Node is a valid value for the Variable in the current Binding Context + Gets the next available token from the input - Evaluation Context - Node to test - + - Constructs a Node based on the given Set + Interface for defining Token classes to be used in Parsing RDF - Construct Context - The Node which is bound to this Variable in this Solution - + - Gets the String representation of this pattern + Gives some Integer representing the Token Type as understood by a specific Parser implementation - - + - Gets the Name of the Variable this Pattern matches + Gives the Value of the Token - + - Base class for representing all types of Triple Patterns in SPARQL queries + Gives the Line at which the Token starts - + - Stores the list of variables that are used in the Pattern + Gives the Line at which the Token ends - + - Evaluates the Triple Pattern in the given Evaluation Context + Gives the Position within the Start Line that the Token starts - Evaluation Context - + - Returns whether the Triple Pattern is an accept all + Gives the Position within the End Line that the Token ends - + - Gets the Triple Pattern Type + Gives the Length of the Token - + - Gets the List of Variables used in the Pattern + Token Queue Mode Constants - - These are sorted in alphabetical order - - + - Gets the enumeration of floating variables in the pattern i.e. variables that are not guaranteed to have a bound value + No Buffering used - + - Gets the enumeration of fixed variables in the pattern i.e. variables that are guaranteed to have a bound value + Synchronous Buffering used - + - Compares a Triple Pattern to another Triple Pattern + Asynchronous Buffering used - Other Triple Pattern - - - - The aim of this function is to sort Triple Patterns into what is hopefully an optimal order such that during execution the query space is restricted as early as possible. - - - The basic rules of this are as follows: -
    -
  1. Patterns with fewer variables should be executed first
  2. -
  3. Patterns using the same variables should be executed in sequence
  4. -
  5. Patterns using indexes which are considered more useful should be executed first
  6. -
-
-
- + - Gets whether a Triple Pattern is Thread Safe when evaluated + Interface for Tokenisers - Almost all Triple Patterns are Thread Safe unless they are subquery patterns which themselves are not thread safe + A Tokeniser is a class that takes an input stream and produces textual tokens from it for use in token based parsers - + - Gets whether the Pattern has no blank variables + Causes the Tokeniser to attempt to retrieve the next Token + + Thrown if a valid Token cannot be parsed + Thrown if there is a problem reading the Input Stream - + - Gets the String representation of the Pattern + Interface for implementing Token Queues which provide Bufferable wrappers to Tokenisers - - + - Represents a set of Bindings for a SPARQL Query or part thereof i.e. represents the VALUES clause + Removes the first Token from the Queue + - + - Creates a new Empty Bindings Pattern + Adds a Token to the end of the Queue + Token to add - + - Creates a new Bindings Pattern + Gets the first Token from the Queue without removing it - Variables + - + - Gets the enumeration of Variables + Tokeniser that this is a Queue for - + - Get the enumeration of fixed variables i.e. those guaranteed to be bound + Clears the Token Queue - + - Gets the enumeration of floating variables i.e. those not guaranteed to be bound + Gets the number of Tokens in the Queue - + - Gets the enumeration of Tuples + Initialises the Buffer - + - Adds a Tuple to the Bindings pattern + Initialises the Buffer and sets the Buffering Level - + Buffering Amount - + - Converts a Bindings Clause to a Multiset + Gets the underlying Queue of Tokens - - + - Gets the String representation of the Pattern + Gets/Sets whether Tokeniser Tracing should be used - - + - Represents a Tuple in a BINDINGS clause + Gets the Token Type of the last Token dequeued - + - Creates a new Binding Tuple + Abstract base implementation of a Token Queue - Variables - Values - + - Gets the enumeration of Variable-Value pairs + Tokeniser used to fill the Token Queue - + - Gets the Value for a Variable + Variable indicating whether Tokeniser Tracing is enabled - Variable - - + - Gets whether this is an empty tuple + Type of Last Token dequeued - + - Gets whether the Tuple is complete i.e. has no undefined entries + Abstract Definition of Interface Method + - + - Gets whether the given variable is bound for this tuple i.e. is not UNDEF + Abstract Definition of Interface Method - Variable - True if the variable exists in the tuple and is bound, false otherwise - + - Gets the String representation of the Tuple + Abstract Definition of Interface Method - - + - Class for representing Filter Patterns in SPARQL Queries + Sets the Tokeniser used by the Queue - - A Filter Pattern is any FILTER clause that can be executed during the process of executing Triple Patterns rather than after all the Triple Patterns and Child Graph Patterns have been executed - + Setting the Tokeniser causes the Queue to clear itself - + - Creates a new Filter Pattern with the given Filter + Abstract Definition of Interface Method - Filter - + - Evaluates a Filter in the given Evaluation Context + Abstract Definition of Interface Property - Evaluation Context - + - Gets the Pattern Type + Abstract Definition of Interface Method - + - Returns that the Pattern is not an accept all (since it's a Filter) + Abstract Definition of Interface Method + Buffering Amount - + - Gets the Filter that this Pattern uses + Abstract Definition of Interface Property - + - Returns the empty enumerable as don't know which variables will be bound + Gets/Sets Tracing for the Token Queue - + - Returns the empty enumerable as don't know which variables will be bound + Gets the Token Type of the last Token dequeued - + - Gets whether the Pattern uses the Default Dataset + A Class for Reading an Input Stream and generating Notation 3 Tokens from it - + - Returns true as a FILTER cannot contain blank variables + Pattern for Valid QNames that use only the Latin Alphabet - - Technically blank nodes may appear in a FILTER as part of an EXISTS/NOT EXISTS clause but in that case they would not be visible outside of the FILTER and so are not relevant - - + - Compares a filter pattern to another + Patter for Valid Variable Names - Pattern - - + - Compares a filter pattern to another + Creates a new Instance of the Tokeniser - Pattern - + The Input Stream to generate Tokens from - + - Returns the string representation of the Pattern + Creates a new Instance of the Tokeniser - + The Input Stream to generate Tokens from - + - Interface for Triple Patterns + Creates a new Instance of the Tokeniser + The Input to generate Tokens from - + - Evaluates the Triple Pattern in the given Evaluation Context + Gets the next parseable Token from the Input or raises an Error - Query Evaluation Context + + Occurs when a Token cannot be parsed - + - Gets the Pattern Type + Internal Helper method which attempts to get a Comment Token + - + - Gets whether the Pattern accepts all + Determines whether a given Token represents an RDF Term or part thereof - - Indicates that a Pattern is of the form ?s ?p ?o - + Token Type to test + - + - Gets the List of Variables used in the Pattern + Tokeniser for NTriples RDF Syntax - + - Gets the enumeration of floating variables in the pattern i.e. variables that are not guaranteed to have a bound value + Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Stream to read Tokens from + NTriples syntax to tokenise - + - Gets the enumeration of fixed variables in the pattern i.e. variables that are guaranteed to have a bound value + Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Stream to read Tokens from - + - Gets whether a Triple Pattern uses the Default Dataset when evaluated + Creates a new NTriples Tokeniser which reads Tokens from the given Stream - - Almost all Triple Patterns use the Default Dataset unless they are sub-query patterns which themselves don't use the Default Dataset or they contain an expression (in the case of BIND/LET/FILTERs) which does not use the Default Dataset - + Stream to read Tokens from - + - Gets whether a Triple Pattern does not contain any Blank Variables + Creates a new NTriples Tokeniser which reads Tokens from the given Input + Input to read Tokens from - + - Interface for Triple Patterns that can be used in a CONSTRUCT pattern + Creates a new NTriples Tokeniser which reads Tokens from the given Stream + Stream to read Tokens from + NTriples syntax to tokenise - + - Constructs a Triple from a Set based on this Triple Pattern + Creates a new NTriples Tokeniser which reads Tokens from the given Input - Construct Context - + Input to read Tokens from + NTriples syntax to tokenise - + - Gets the Subject of the Pattern + Gets/Sets the NTriples syntax that should be supported - + - Gets the Predicate of the Pattern + Gets/Sets whether the output should be altered slightly to support NQuads parsing + + + This is used internally to alter how DataTypes get tokenised, normally these are just returned as a UriToken since a Literal can only occur as the Object in NTriples and so if we see a Uri after a Literal it must be it's datatype and not part of another Triple. + + + In the case of NQuads a UriToken may follow a Literal as the Context of that Triple and not its datatype so it's important to distinguish by using a DataTypeToken instead + + - + - Gets the Object of the Pattern + Gets the next available Token from the Input Stream + - + - Gets whether the Pattern contains no Variables of any kind + Special Token which acts as a Placeholder for SPARQL Property Paths - + - Gets whether the Pattern contains no Explicit Variables (i.e. Blank Node Variables are ignored) + Creates a new Path Token + Path - + - Inteface for Triple Patterns that do simple pattern matching + Gets the Path this Token acts as a placeholder for - + - Gets the Index type that should be used in Pattern execution + A Class for Reading an Input Stream and generating SPARQL Tokens - + - Gets the Subject of the Pattern + Creates a new Instance of the Tokeniser + The Input Stream to generate Tokens from + Syntax Mode to use when parsing - + - Gets the Predicate of the Pattern + Creates a new Instance of the Tokeniser + The Input Stream to generate Tokens from + Syntax Mode to use when parsing - + - Gets the Object of the Pattern + Creates a new Instance of the Tokeniser + The Input to generate Tokens from + Syntax Mode to use when parsing - + - Gets the Triples that match this pattern + Gets the next parseable Token from the Input or raises an Error - Evaluation Context + Occurs when a Token cannot be parsed - + - Gets whether a given triple is accepted by this pattern + Token which represents the SPARQL SELECT Keyword - Context - Triple - - + - Creates a set from a Triple + Creates a new SELECT Keyword Token - Triple - + Line the Keyword occurs on + Position the Keyword occurs at - + - Interface for Triple Patterns that apply filters + Token which represents the SPARQL ASK Keyword - + - Gets the filter to apply + Creates a new ASK Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Interface for Triple Patterns that represent Assignment operators + Token which represents the SPARQL DESCRIBE Keyword - + - Gets the Assignment Expression that is used + Creates a new DESCRIBE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Name of the Variable which is assigned to + Token which represents the SPARQL CONSTRUCT Keyword - + - Interface for Triple Patterns that do sub-queries + Creates a new CONSTRUCT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the sub-query + Token which represents the use of the * character to mean All - + - Interface for Triple Patterns that do property paths + Creates a new All Token + Line the * occurs on + Position the * occurs at - + - Gets the Subject of the Pattern + Token which represents the SPARQL ABS Keyword - + - Gets the property path + Creates a new ABS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Object of the Pattern + Token which represents the SPARQL ALL Keyword - + - Interface for Triple Patterns that do property functions + Creates a new ALL Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Subject arguments of the function + Token which represents the SPARQL AS Keyword - + - Gets the Object arguments of the function + Creates a new AS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the property function + Token which represents the SPARQL ASC Keyword - + - Gets the original triple patterns that made up this pattern + Creates a new ASC Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Class for representing LET Patterns in SPARQL Queries + Token which represents the SPARQL AVG Keyword - + - Creates a new LET Pattern + Creates a new AVG Keyword Token - Variable to assign to - Expression which generates a value which will be assigned to the variable + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a LET assignment in the given Evaluation Context + Token which represents the SPARQL BIND Keyword - Evaluation Context - + - Gets the Pattern Type + Creates a new BIND Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Returns that this is not an accept all since it is a LET assignment + Token which represents the SPARQL BINDINGS Keyword - + - Gets the Expression that is used to generate values to be assigned + Creates a new BINDINGS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Name of the Variable to which values will be assigned + Token which represents the SPARQL BNODE Keyword - + - Returns an empty enumeration as any evaluation error will result in an unbound value so we can't guarantee any variables are bound + Creates a new BNODE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Returns the variable being assigned to as any evaluation error will result in an unbound value so we can't guarantee it is bound + Token which represents the SPARQL BOUND Keyword - + - Gets whether the Pattern uses the Default Dataset + Creates a new BOUND Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Returns true as a LET can never contain Blank Nodes + Token which represents the SPARQL CALL Keyword - + - Gets the string representation of the LET assignment + Creates a new CALL Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Compares this Let to another Let + Token which represents the SPARQL CEIL Keyword - Let to compare to - Just calls the base compare method since that implements all the logic we need - + - Compares this Let to another Let + Creates a new CEIL Keyword Token - Let to compare to - Just calls the base compare method since that implements all the logic we need + Line the Keyword occurs on + Position the Keyword occurs at - + - Class for representing property patterns in SPARQL Queries + Token which represents the SPARQL COALESCE Keyword - + - Creates a new Property Path Pattern + Creates a new COALESCE Keyword Token - Subject - Property Path - Object + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the pattern type + Token which represents the SPARQL CONCAT Keyword - + - Gets the Subject of the Property Path + Creates a new CONCAT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Property Path + Token which represents the SPARQL COUNT Keyword - + - Gets the Object of the Property Path + Creates a new COUNT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a property path pattern + Token which represents the SPARQL DATATYPE Keyword - Evaluation Context - + - Gets whether the Pattern accepts all Triple Patterns + Creates a new DATATYPE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Returns false a property path may always contain implicit blank variables + Token which represents the SPARQL DAY Keyword - + - Compares a property path pattern to another + Creates a new DAY Keyword Token - Pattern - + Line the Keyword occurs on + Position the Keyword occurs at - + - Compares a property path pattern to another + Token which represents the SPARQL DESC Keyword - Pattern - - + - Gets the String representation of the Pattern + Creates a new DESC Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Class for representing Sub-queries which occur as part of a SPARQL query + Token which represents the SPARQL DISTINCT Keyword - + - Creates a new Sub-query pattern which represents the given sub-query + Creates a new DISTINCT Keyword Token - Sub-query + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Sub-Query + Token which represents the SPARQL ENCODE_FOR_URI Keyword - + - Gets the enumeration of floating variables in the algebra i.e. variables that are not guaranteed to have a bound value + Creates a new ENCODE_FOR_URI Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the enumeration of fixed variables in the algebra i.e. variables that are guaranteed to have a bound value + Token which represents the SPARQL EXISTS Keyword - + - Gets the pattern type + Creates a new EXISTS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a Sub-query in the given Evaluation Context + Token which represents the SPARQL FILTER Keyword - Evaluation Context - + - Returns that the Pattern is not an accept all since it is a Sub-query + Creates a new FILTER Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets whether the Sub-query is Thread Safe + Token which represents the SPARQL FLOOR Keyword - + - Returns true as while a sub-query may contain blank node variables they will not be in scope here + Creates a new FLOOR Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Compares a sub-query pattern to another + Token which represents the SPARQL FROM Keyword - Pattern - - + - Compares a sub-query pattern to another + Creates a new FROM Keyword Token - Pattern - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the string representation of the sub-query + Token which represents the SPARQL FROM NAMED Keyword combination - - + - Abstract Base class for Unary Filters that operate on a single Expression + Creates a new FROM NAMED Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Expression which is the Argument to the Filter + Token which represents the SPARQL GRAPH Keyword - + - Creates a new Base Unary Filter + Creates a new GRAPH Keyword Token - Argument to the Filter + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a filter in the given Evaluation Context + Token which represents the SPARQL GROUP BY Keyword - Evaluation Context - + - Gets the String representation of the Filter + Creates a new GROUP BY Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the enumeration of Variables used in the Filter + Token which represents the SPARQL GROUP_CONCAT Keyword - + - Gets the inner expression this Filter uses + Creates a new GROUP_CONCAT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Filter that represents the Sparql BOUND() function + Token which represents the SPARQL HAVING Keyword - + - Creates a new Bound Filter + Creates a new HAVING Keyword Token - Variable Expression + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a filter in the given Evaluation Context + Token which represents the SPARQL HOURS Keyword - Evaluation Context - + - Gets the String representation of the Filter + Creates a new HOURS Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Generic Filter for Filters which take a single sub-expression as an argument + Token which represents the SPARQL IF Keyword - + - Creates a new Unary Expression Filter which filters on the basis of a single sub-expression + Creates a new IF Keyword Token - Expression to filter with + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a filter in the given Evaluation Context + Token which represents the SPARQL IN Keyword - Evaluation Context - + - Gets the String representation of the Filter + Creates a new IN Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Generic Filter for use where multiple Filters are applied on a single Graph Pattern + Token which represents the SPARQL IRI Keyword - + - Creates a new Chain Filter + Creates a new IRI Keyword Token - First Filter - Second Filter + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new Chain Filter + Token which represents the SPARQL ISBLANK Keyword - Filters - + - Creates a new Chain Filter + Creates a new ISBLANK Keyword Token - First Filter - Additional Filters + Line the Keyword occurs on + Position the Keyword occurs at - + - Evaluates a filter in the given Evaluation Context + Token which represents the SPARQL ISIRI Keyword - Evaluation Context - + - Adds an additional Filter to the Filter Chain + Creates a new ISIRI Keyword Token - A Filter to add + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the String representation of the Filters + Token which represents the SPARQL ISLITERAL Keyword - - + - Gets the enumeration of Variables used in the chained Filters + Creates a new ISLITERAL Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Inner Expression used by the Chained Filters + Token which represents the SPARQL ISNUMERIC Keyword - - Equivalent to ANDing all the Chained Filters expressions - - + - - Namespace containing classes pertaining to the filtering of the results of SPARQL queries - + Creates a new ISNUMERIC Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Interface for Classes which implement SPARQL Filter Functions + Token which represents the SPARQL ISURI Keyword - + - Evaluates a Filter in the given Evaluation Context + Creates a new ISURI Keyword Token - Evaluation Context + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the enumeration of Variables that are used in the Filter + Token which represents the SPARQL LANG Keyword - + - Gets the Expression that this Filter uses + Creates a new LANG Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Base Class for implementing Sparql ORDER BYs + Token which represents the SPARQL LANGMATCHES Keyword - + - Holds the Child Order By (if any) + Creates a new LANGMATCHES Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Stores the Evaluation Context + Token which represents the SPARQL LCASE Keyword - + - Modifier used to make ordering Descending + Creates a new LCASE Keyword Token - Implementations derived from this class should multiply their comparison results by the modifier to automatically provide Ascending/Descending order + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets/Sets the Child Order By + Token which represents the SPARQL LENGTH Keyword - + - Sets the Evaluation Context for the Ordering + Creates a new LENGTH Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Sets the Ordering to Descending + Token which represents the SPARQL LET Keyword - + - Gets whether the Ordering is Simple + Creates a new LET Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets all the Variables used in the Ordering + Token which represents the SPARQL LIMIT Keyword - + - Gets the Expression used in the Ordering + Creates a new LIMIT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Abstract Compare method which derived classes should implement their ordering in + Token which represents the SPARQL MAX Keyword - A Set - A Set - - + - Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Creates a new MAX Keyword Token - Triple Pattern - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the String representation of the Order By + Token which represents the SPARQL MD5 Keyword - - + - An ORDER BY which orders on the values bound to a particular variable + Creates a new MD5 Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new Ordering based on the Value of a given Variable + Token which represents the SPARQL MEDIAN Keyword - Variable to order upon - + - Compares Sets on the basis of their values for the Variable the class was instaniated with + Creates a new MEDIAN Keyword Token - A Set - A Set - + Line the Keyword occurs on + Position the Keyword occurs at - + - Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Token which represents the SPARQL MIN Keyword - Triple Pattern - - + - Gets whether the Ordering is Simple + Creates a new MIN Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets all the Variables used in the Ordering + Token which represents the SPARQL MINUTES Keyword - + - Gets the Variable Expression Term used in the Ordering + Creates a new MINUTES Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the String representation of the Order By + Token which represents the SPARQL MINUS Keyword - - + - An ORDER BY which orders based on the values of a Sparql Expression + Creates a new MINUS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new Order By using the given Expression + Token which represents the SPARQL MODE Keyword - Expression to order by - + - Orders the sets based on the values resulting from evaluating the expression for both solutions + Creates a new MODE Keyword Token - A Set - A Set - + Line the Keyword occurs on + Position the Keyword occurs at - + - Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Token which represents the SPARQL MONTH Keyword - Triple Pattern - - + - Gets whether the Ordering is Simple + Creates a new MONTH Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets all the Variables used in the Ordering + Token which represents the SPARQL NAMED Keyword - + - Gets the Expression used for Ordering + Creates a new NAMED Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the String representation of the Order By + Token which represents the SPARQL NOT IN Keyword - - + - - Namespace containing classes used to order the results of SPARQL queries - + Creates a new NOT IN Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Interface for classes that represent SPARQL ORDER BY clauses + Token which represents the SPARQL NMAX Keyword - A SPARQL Order By clause provides a list of orderings, when parsed into the dotNetRDF model this is represented as a single ISparqlOrderBy for the first term in the clause chained to ISparqlOrderBy's for each subsequent term via the Child property. - + - Gets/Sets the Child Ordering that applies if the two Objects are considered equal + Creates a new NMAX Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Sets the Evaluation Context for the Order By + Token which represents the SPARQL NMIN Keyword - + - Sets whether the Ordering is Descending + Creates a new NMIN Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets whether the Ordering is simple (i.e. applies on variables only) + Token which represents the SPARQL NOT EXISTS Keyword - + - Gets all the Variables used in the Ordering + Creates a new NOT EXISTS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Expression used to do the Ordering + Token which represents the SPARQL NOW Keyword - + - Generates a Comparer than can be used to do Ordering based on the given Triple Pattern + Creates a new NOW Keyword Token - Triple Pattern - + Line the Keyword occurs on + Position the Keyword occurs at - + - Interface for Property Function factories + Token which represents the SPARQL OFFSET Keyword - + - Gets whether the factory considers the given URI as representing a property function + Creates a new OFFSET Keyword Token - URI - + Line the Keyword occurs on + Position the Keyword occurs at - + - Tries to create a property function pattern with the given information + Token which represents the SPARQL OPTIONAL Keyword - Function information - Property Function pattern - - + - Helper Class containing functions useful in working with property functions + Creates a new OPTIONAL Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Used to extract the patterns that make up property functions + Token which represents the SPARQL ORDER BY Keyword combination - Triple Patterns - - + - Used to extract the patterns that make up property functions + Creates a new ORDER BY Keyword Token - Triple Patterns - Locally scoped factories - + Line the Keyword occurs on + Position the Keyword occurs at - + - Used to help extract the patterns that make up a property function pattern + Token which represents the SPARQL RAND Keyword - Key - Subject - Patterns - Function Information - Argument List to add discovered arguments to - + - Interface for SPARQL property functions + Creates a new RAND Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Function URI + Token which represents the SPARQL REDUCED Keyword - + - Evaluates the function in the given context + Creates a new REDUCED Keyword Token - Context - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the variables used in the function + Token which represents the SPARQL REGEX Keyword - + - Factory for creating property functions + Creates a new REGEX Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the number of globally registered factories + Token which represents the SPARQL REPLACE Keyword - + - Adds a globally registered factory + Creates a new REPLACE Keyword Token - Factory + Line the Keyword occurs on + Position the Keyword occurs at - + - Removes a globally registered factory + Token which represents the SPARQL ROUND Keyword - Factory - + - Gets whether a factory is registered + Creates a new ROUND Keyword Token - Factory Type - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets whether a factory is registered + Token which represents the SPARQL SAMETERM Keyword - Factory - - + - Gets whether a URI is considered a property function by the global factories + Creates a new SAMETERM Keyword Token - Function URI - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets whether a URI is considered a property function by any global/local factory + Token which represents the SPARQL SAMPLE Keyword - Function URI - Locally scoped factories - - + - Tries to create a property function + Creates a new SAMPLE Keyword Token - Property Function information - Property Function - + Line the Keyword occurs on + Position the Keyword occurs at - + - Tries to create a property function + Token which represents the SPARQL SECONDS Keyword - Property Function information - Locally Scoped factories - Property Function - - + - Represents information about a property function + Creates a new SECONDS Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates new function information + Token which represents the SPARQL SEPARATOR Keyword - Function URI - + - Gets the function URI + Creates a new SEPARATOR Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the triple patterns that compose the property function + Token which represents the SPARQL SERVICE Keyword - + - Gets the subject arguments to the function + Creates a new SERVICE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the object arguments to the function + Token which represents the SPARQL SHA1 Keyword - + - Namespace which provide classes relating to the property function extension point of SPARQL + Creates a new SHA1 Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Internal Class which parses SPARQL Expressions into Expression Trees + Token which represents the SPARQL SHA224 Keyword - + - Creates a new SPARQL Expression Parser + Creates a new SHA224 Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new SPARQL Expression Parser which has a reference back to a Query Parser + Token which represents the SPARQL SHA256 Keyword - Query Parser - + - Creates a new SPARQL Expression Parser + Creates a new SHA256 Keyword Token - Whether Aggregates are allowed in Expressions + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new SPARQL Expression Parser which has a reference back to a Query Parser + Token which represents the SPARQL SHA384 Keyword - Query Parser - Whether Aggregates are allowed in Expressions - + - Sets the Base Uri used to resolve URIs and QNames + Creates a new SHA384 Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Sets the Namespace Map used to resolve QNames + Token which represents the SPARQL SHA512 Keyword - + - Gets/Sets whether Aggregates are permitted in Expressions + Creates a new SHA512 Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets/Sets the Syntax that should be supported + Token which represents the SPARQL STR Keyword - + - Sets the Query Parser that the Expression Parser can call back into when needed + Creates a new STR Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets/Sets the locally scoped custom expression factories + Token which represents the SPARQL STRAFTER Keyword - + - Parses a SPARQL Expression + Creates a new STRAFTER Keyword Token - Tokens that the Expression should be parsed from - + Line the Keyword occurs on + Position the Keyword occurs at - + - Helper method for raising informative standardised Parser Errors + Token which represents the SPARQL STRBEFORE Keyword - The Error Message - The Token that is the cause of the Error - - + - Class for representing a Row of a Sparql Result Set + Creates a new STRBEFORE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new empty SPARQL Result which can only be filled by methods internal to the dotNetRDF Library + Token which represents the SPARQL CONTAINS Keyword - + - Creates a new SPARQL Result from the given Set + Creates a new CONTAINS Keyword Token - Set + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new SPARQL Result from the given Set which contains only the given variables in the given order + Token which represents the SPARQL STRDT Keyword - Set - Variables - + - Deserialization only constructor + Creates a new STRDT Keyword Token - Serialization Info - Streaming Context + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Value that is bound to the given Variable + Token which represents the SPARQL STRENDS Keyword - Variable whose Value you wish to retrieve - - Thrown if there is nothing bound to the given Variable Name for this Result - + - Gets the Value that is bound to the given Variable + Creates a new STRENDS Keyword Token - Variable whose Value you wish to retrieve - - Thrown if there is nothing bound to the given Variable Name for this Result + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Value that is bound at the given Index + Token which represents the SPARQL STRLANG Keyword - Index whose Value you wish to retrieve - - - As of 1.0.0 the order of variables in a result may/may not vary depending on the original query. If a specific variable list was declared dotNetRDF tries to preserve that order but this may not always happen depending on how results are received. - - Thrown if there is nothing bound at the given Index - + - Tries to get a value (which may be null) for the variable + Creates a new STRLANG Keyword Token - Variable - Value - True if the variable was present (even it was unbound) and false otherwise + Line the Keyword occurs on + Position the Keyword occurs at - + - Tries to get a non-null value for the variable + Token which represents the SPARQL STRLEN Keyword - Variable - Value - True if the variable was present and bound, false otherwise - + - Gets the number of Variables for which this Result contains Bindings + Creates a new STRLEN Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Internal Only Method for setting the Value of a Result + Token which represents the SPARQL STRSTARTS Keyword - Variable Name - Value bound to the Variable - + - Sets the variable ordering for the result + Creates a new STRSTARTS Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Checks whether a given Variable has a value (which may be null) for this result + Token which represents the SPARQL STRUUID Keyword - Variable Name - True if the variable is present, false otherwise - Returns true even if the value is null, use instead to see whether a non-null value is present for a variable. - + - Checks whether a given Variable has a non-null value for this result + Creates a new STRUUID Keyword Token - Variable Name - True if the variable is present and has a non-null value, false otherwise + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the set of Variables that are bound in this Result + Token which represents the SPARQL SUBSTR Keyword - + - Gets whether a Result is a Ground Result + Creates a new SUBSTR Keyword Token - - A Ground Result is a result which is considered to be a fixed fact. In practise this means it contains no Blank Nodes - + Line the Keyword occurs on + Position the Keyword occurs at - + - Removes all Variables Bindings where the Variable is Unbound + Token which represents the SPARQL SUM Keyword - + - Displays the Result as a comma separated string of pairs of the form ?var = value + Creates a new SUM Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Displays the Result as a comma separated string of paris of the form ?var = value where values are formatted using the given Node Formatter + Token which represents the SPARQL TIMEZONE Keyword - Node Formatter - - + - Override of the Equals method for Results + Creates a new TIMEZONE Keyword Token - - - Used implicitly in applying Distinct and Reduced modifiers to the Result Set + Line the Keyword occurs on + Position the Keyword occurs at - + - Override of the GetHashCode method for Results + Token which represents the SPARQL TZ Keyword - - Used implicitly in applying Distinct and Reduced modifiers to the Result Set - + - Enumerates the Bindings of Variable Names to Values in this Result + Creates a new TZ Keyword Token - - - Does not respect the ordering of the variables (if any) - + Line the Keyword occurs on + Position the Keyword occurs at - + - Enumerates the Bindings of Variable Names to Values in this Result + Token which represents the SPARQL UCASE Keyword - - + - Gets the data for serialization + Creates a new UCASE Keyword Token - Serialization Information - Streaming Context + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the schema for XML serialization + Token which represents the SPARQL UNDEF Keyword - - + - Writes the data for XML serialization (.Net serialization not the official SPARQL results serialization) + Creates a new UNDEF Keyword Token - XML Writer + Line the Keyword occurs on + Position the Keyword occurs at - + - Reads the data for XML deserialization (.Net serialization not the official SPARQL results serialization) + Token which represents the SPARQL UNION Keyword - XML Reader - + - Helper Class used in the execution of Sparql Queries + Creates a new UNION Keyword Token - - + Line the Keyword occurs on + Position the Keyword occurs at - + - Internal Empty Constructor for derived classes + Token which represents the SPARQL UNSAID Keyword - + - Creates a new Results Binder + Creates a new UNSAID Keyword Token - Query this provides Result Binding to + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Variables that the Binder stores Bindings for + Token which represents the SPARQL URI Keyword - + - Gets the enumeration of valid Binding IDs + Creates a new URI Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the set of Groups that result from the Query this Binder provides Binding to + Token which represents the SPARQL UUID Keyword - + - Gets the Value bound to a given Variable for a given Binding ID + Creates a new UUID Keyword Token - Variable Name - Binding ID - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Group referred to by the given ID + Token which represents the SPARQL VALUES Keyword - Group ID - - + - Checks whether the given ID refers to a Group + Creates a new VALUES Keyword Token - Group ID - + Line the Keyword occurs on + Position the Keyword occurs at - + - Sets the Group Context for the Binder + Token which represents the SPARQL WHERE Keyword - Whether you want to access the Group Contents or the Groups themselves - + - Disposes of a Result Binder + Creates a new WHERE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Results Binder used by Leviathan + Token which represents the SPARQL YEAR Keyword - + - Creates a new Leviathan Results Binder + Creates a new YEAR Keyword Token - Evaluation Context + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Value for a given Variable from the Set with the given Binding ID + Token which represents Mathematical Plus - Variable - Set ID - - + - Gets the Variables contained in the Input + Creates a new Mathematical Plus Token + Line the Token occurs on + Position the Token occurs at - + - Gets the IDs of Sets + Token which represents Mathematical Minus - + - Determines whether a given ID is for of a Group + Creates a new Mathematical Minus Token - Group ID - + Line the Token occurs on + Position the Token occurs at - + - Returns the Group with the given ID + Token which represents Mathematical Multiply - Group ID - - + - Sets the Group Context for the Binder + Creates a new Mathematical Multiply Token - Whether you want to access the Group Contents or the Groups themselves + Line the Token occurs on + Position the Token occurs at - + - Special Temporary Results Binder used during LeftJoin's + Token which represents Mathematical Divide - + - Creates a new LeftJoin Binder + Creates a new Mathematical Divide Token - Input Multiset + Line the Token occurs on + Position the Token occurs at - + - Gets the Value for a given Variable from the Set with the given Binding ID + Token which represents Logical Not Equals - Variable - Set ID - - + - Gets the Variables in the Input Multiset + Creates a new Logical Not Equals Token + Line the Token occurs on + Position the Token occurs at - + - Gets the IDs of Sets + Token which represents Logical Negation - + - Class of Sparql Variables + Creates a new Logical Negation Token + Line the Token occurs on + Position the Token occurs at - + - Creates a new Sparql Variable + Token which represents Logical And - Variable Name - Does this Variable appear in the Result Set? - + - Creates a new Sparql Variable + Creates a new Logical And Token - Variable Name (with leading ?/$ removed) + Line the Token occurs on + Position the Token occurs at - + - Creates a new Sparql Variable which is an Aggregate + Token which represents Logical Or - Variable Name (with leading ?/$ removed) - Aggregate Function - All Aggregate Variables are automatically considered as Result Variables - + - Creates a new Sparql Variable which is a Projection Expression + Creates a new Logical Or Token - Variable Name (with leading ?/$ removed) - Projection Expression + Line the Token occurs on + Position the Token occurs at - + - Variable Name + Token which represents Relational Less Than - + - Gets whether the Variable appears in the Result Set + Creates a new Relation Less Than Token + Line the Token occurs on + Position the Token occurs at - + - Gets whether the Variable is an Aggregate + Token which represents Relational Less Than or Equal To - + - Gets whether the Variable is a Projection Expression + Creates a new Relation Less Than or Equal To Token + Line the Token occurs on + Position the Token occurs at - + - Gets the Aggregate Function for this Variable + Token which represents Relational Greater Than - + - Gets the Projection Expression for this Variable + Creates a new Relation Greater Than Token + Line the Token occurs on + Position the Token occurs at - + - Get the String representation of the Variable + Token which represents Greater Than or Equal To - - + - Types of SPARQL Query + Creates a new Relation Greater Than or Equal To Token + Line the Token occurs on + Position the Token occurs at - + - Unknown + Token which represents the SPARQL Update ADD Keyword - + - Ask + Creates a new ADD Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Constuct + Token which represents the SPARQL Update CLEAR Keyword - + - Describe + Creates a new CLEAR Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Describe All + Token which represents the SPARQL Update COPY Keyword - + - Select + Creates a new COPY Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Select Distinct + Token which represents the SPARQL Update CREATE Keyword - + - Select Reduced + Creates a new CREATE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Select All + Token which represents the SPARQL Update DATA Keyword - + - Select All Distinct + Creates a new DATA Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Select All Reduced + Token which represents the SPARQL Update DEFAULT Keyword - + - Types of Special SPARQL Query which may be optimised in special ways by the libraries SPARQL Engines + Creates a new DEFAULT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - The Query is of the form SELECT DISTINCT ?g WHERE {GRAPH ?g {?s ?p ?o}} + Token which represents the SPARQL Update DELETE Keyword - + - The Query has no applicable special optimisation + Creates a new DELETE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - The Query has not yet been tested to determine if special optimisations are applicable + Token which represents the SPARQL Update DROP Keyword - + - The Query is of the form ASK WHERE {?s ?p ?o} + Creates a new DROP Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Represents a SPARQL Query + Token which represents the SPARQL Update INSERT Keyword - - - Note: This class is purposefully sealed and most setters are private/protected internal since generally you create a query by using the to parse a query string/file. - - - To build a query programmatically you can use the class to generate a new query and then various extension methods to modify that query using a fluent style API. A query is not immutable - so if you use that API you are modifying the query, if you want to generate new queries by modifying an existing query consider using the method to take a copy of the existing query. - - - + - Creates a new SPARQL Query + Creates a new INSERT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Creates a new SPARQL Query + Token which represents the SPARQL Update INTO Keyword - Whether the Query is a Sub-query - + - Creates a copy of the query + Creates a new INTO Keyword Token - + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Namespace Map for the Query + Token which represents the SPARQL Update LOAD Keyword - + - Gets/Sets the Base Uri for the Query + Creates a new LOAD Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Default Graph URIs for the Query + Token which represents the SPARQL Update MOVE Keyword - + - Gets the Named Graph URIs for the Query + Creates a new MOVE Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the Variables used in the Query + Token which represents the SPARQL Update SILENT Keyword - + - Gets the Variables, QNames and URIs used in the Describe Query + Creates a new SILENT Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the type of the Query + Token which represents the SPARQL Update TO Keyword - + - Gets the Special Type of the Query (if any) + Creates a new TO Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets the top level Graph Pattern of the Query + Token which represents the SPARQL Update USING Keyword - + - Gets/Sets the Construct Template for a Construct Query + Creates a new USING Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets/Sets the Ordering for the Query + Token which represents the SPARQL Update WITH Keyword - + - Gets/Sets the Grouping for the Query + Creates a new WITH Keyword Token + Line the Keyword occurs on + Position the Keyword occurs at - + - Gets/Sets the Having Clause for the Query + Static Class which defines the Integer Constants used for Token Types - + - Gets/Sets the VALUES Clause for the Query which are bindings that should be applied + Constants defining Token Types - + - Gets/Sets the ISparqlDescribe which provides the Describe algorithm you wish to use + Constants defining Token Types - - By default this will be the ConciseBoundedDescription (CBD) algorithm. - - + - Gets/Sets the locally scoped Algebra Optimisers that are used to optimise the Query Algebra in addition to (but before) any global optimisers (specified by SparqlOptimiser.AlgebraOptimisers) that are applied + Constants defining Token Types - + - Gets/Sets the locally scoped Expression Factories that may be used if the query is using the CALL() function to do dynamic function invocation + Constants defining Token Types - + - Gets/Sets the locally scoped Property Function factories that may be used by the when generating the algebra for the query + Constants defining Token Types - + - Gets the Result Set Limit for the Query + Constants defining Token Types - Values less than zero are counted as -1 which indicates no limit - + - Gets/Sets the Result Set Offset for the Query + Constants defining Token Types - Values less than zero are treated as 0 which indicates no offset - + - Gets/Sets the Query Execution Timeout in milliseconds + Constants defining Token Types - - - This Timeout (typically) only applies when executing the Query in memory. If you have an instance of this class and pass its string representation (using ToString()) you will lose the timeout information as this is not serialisable in SPARQL syntax. - - - + - Gets/Sets whether Partial Results should be returned in the event of Query Timeout + Constants defining Token Types - - - Partial Results (typically) only applies when executing the Query in memory. If you have an instance of this class and pass its string representation (using ToString()) you will lose the partial results information as this is not serialisable in SPARQL syntax. - - - + - Gets the Time taken to execute a Query + Constants defining Token Types - Thrown if you try and inspect the execution time before the Query has been executed - + - Gets whether the Query has an Aggregate as its Result + Constants defining Token Types - + - Gets whether Optimisation has been applied to the query + Constants defining Token Types - - This only indicates that an Optimiser has been applied. You can always reoptimise the query using a different optimiser by using the relevant overload of the Optimise() method. - - + - Gets whether this Query is a Sub-Query in another Query + Constants defining Token Types - + - Gets whether a Query has a DISTINCT modifier + Constants defining Token Types - + - Gets whether the Query has a Solution Modifier (a GROUP BY, HAVING, ORDER BY, LIMIT or OFFSET) + Constants defining Token Types - + - Adds a Variable to the Query + Constants defining Token Types - Variable Name - + - Adds a Variable to the Query + Constants defining Token Types - Variable Name - Does the Variable occur in the Output Result Set/Graph - + - Adds a Variable to the Query + Constants defining Token Types - Variable - + - Adds a Describe Variable to the Query + Constants defining Token Types - Variable/Uri/QName Token - + - Adds a Default Graph URI + Constants defining Token Types - Graph URI - + - Adds a Named Graph URI + Constants defining Token Types - Graph URI - + - Removes all Default Graph URIs + Constants defining Token Types - + - Removes all Named Graph URIs + Constants defining Token Types - + - Evaluates the SPARQL Query against the given Triple Store + Constants defining Token Types - Triple Store - - Either a SparqlResultSet or a Graph depending on the type of query executed - - + - Evaluates the SPARQL Query against the given Triple Store processing the results with the appropriate handler from those provided + Constants defining Token Types - RDF Handler - Results Handler - Triple Store - + - Evaluates the SPARQL Query against the given Dataset + Constants defining Token Types - Dataset - - Either a SparqlResultSet or a IGraph depending on the type of query executed - - + - Evaluates the SPARQL Query against the given Dataset processing the results with an appropriate handler form those provided + Constants defining Token Types - RDF Handler - Results Handler - Dataset - + - Processes the Query using the given Query Processor + Constants defining Token Types - SPARQL Query Processor - - + - Applies optimisation to a Query using the default global optimiser + Constants defining Token Types - + - Applies optimisation to a Query using the specific optimiser + Constants defining Token Types - Query Optimiser - + - Helper method which rewrites Blank Node IDs for Describe Queries + Constants defining Token Types - Triple - Mapping of IDs to new Blank Nodes - Graph of the Description - - + - Generates a String representation of the Query + Constants defining Token Types - - This method may not return a complete representation of the Query depending on the Query it is called on as not all the classes which can be included in a Sparql query currently implement ToString methods - + - Converts the Query into it's SPARQL Algebra representation (as represented in the Leviathan API) + Constants defining Token Types - - + - Applies Algebra Optimisations to the Query + Constants defining Token Types - Query Algebra - The Query Algebra which may have been transformed to a more optimal form - + - Gets whether the Query's ORDER BY clause can be optimised with Lazy evaluation + Constants defining Token Types - + - Gets whether a Query uses the Default Dataset against which it is evaluated + Constants defining Token Types - - - If the value is true then the Query will use whatever dataset is it evaluated against. If the value is false then the query changes the dataset at one/more points during its evaluation. - - - Things that may change the dataset and cause a query not to use the Default Dataset are as follows: -
    -
  • FROM clauses (but not FROM NAMED)
  • -
  • GRAPH clauses
  • -
  • Subqueries which do not use the default dataset
  • -
-
-
- + - - Namespace for Query Classes which provide querying capabilities on RDF. - - - Query capabilities are centered around support for the SPARQL standard. You can execute full SPARQL 1.1 queries over in-memory data or submit queries to remote SPARQL endpoints. - + Constants defining Token Types - + - - Namespace for Aggregate classes which implement Aggregate functions for SPARQL - + Constants defining Token Types - + - Namespace for aggregates provided by the Leviathan function library + Constants defining Token Types - + - A Custom aggregate which requires the Expression to evaluate to true for all Sets in the Group + Constants defining Token Types - + - Creates a new All Aggregate + Constants defining Token Types - Expression - + - Creates a new All Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Applies the Aggregate to see if the expression evaluates true for every member of the Group + Constants defining Token Types - Evaluation Context - Binding IDs - - - Does lazy evaluation - as soon as it encounters a false/error it will return false - - + - Gets the String Representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - A Custom aggregate which requires the Expression to evaluate to true for at least one of the Sets in the Group + Constants defining Token Types - + - Creates a new Any Aggregate + Constants defining Token Types - Expression - + - Creates a new Any Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifer applies - + - Applies the Aggregate to see if the expression evaluates true for any member of the Group + Constants defining Token Types - Evaluation Context - Binding IDs - - - Does lazy evaluation - as soon as it encounters a true it will return true - - + - Gets the String Representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing MEDIAN Aggregate Functions + Constants defining Token Types - + - Creates a new MEDIAN Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new MEDIAN Aggregate + Constants defining Token Types - Expression - + - Creates a new MEDIAN Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new MEDIAN Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifer applies - + - Applies the Median Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing MODE Aggregate Functions + Constants defining Token Types - + - Creates a new MODE Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new MODE Aggregate + Constants defining Token Types - Expression - + - Creates a new MODE Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new MODE Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Applies the Mode Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - A Custom aggregate which requires the Expression to evaluate to false/error for all Sets in the Group + Constants defining Token Types - + - Creates a new None Aggregate + Constants defining Token Types - Expression - + - Creates a new None Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifer applies - + - Applies the Aggregate to see if the expression evaluates false/error for every member of the Group + Constants defining Token Types - Evaluation Context - Binding IDs - - - Does lazy evaluation - as soon as it encounters a true it will return false - - + - Gets the String Representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing NMAX Aggregate Functions + Constants defining Token Types - - Only operates over numeric data which is typed to one of the supported SPARQL Numeric types (integers, decimals and doubles) - - + - Creates a new NMAX Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new NMAX Aggregate + Constants defining Token Types - Expression - + - Creates a new NMAX Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new NMAX Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Applies the Numeric Max Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing NMIN Aggregate Functions + Constants defining Token Types - - Only operates over numeric data which is typed to one of the supported SPARQL Numeric types (integers, decimals and doubles) - - + - Creates a new NMIN Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new NMIN Aggregate + Constants defining Token Types - Expression - + - Creates a new NMIN Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new NMIN Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Applies the Numeric Min Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Namespace for the built-in SPARQL aggregates + Constants defining Token Types - + - Class representing AVG Aggregate Functions + Constants defining Token Types - + - Creates a new AVG Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new AVG Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Creates a new AVG Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new AVG Aggregate + Constants defining Token Types - Expression - + - Applies the Average Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing COUNT Aggregate Function + Constants defining Token Types - + - Creates a new COUNT Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new Count Aggregate + Constants defining Token Types - Expression - + - Counts the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing COUNT(*) Aggregate Function + Constants defining Token Types - - Differs from a COUNT in that it justs counts rows in the results - - + - Creates a new COUNT(*) Aggregate + Constants defining Token Types - + - Counts the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Gets the Arguments of the Aggregate + Constants defining Token Types - + - Class representing COUNT(DISTINCT *) Aggregate Function + Constants defining Token Types - + - Creates a new COUNT(DISTINCT*) Aggregate + Constants defining Token Types - + - Counts the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Gets the Arguments of the Aggregate + Constants defining Token Types - + - Class representing COUNT(DISTINCT ?x) Aggregate Function + Constants defining Token Types - + - Creates a new COUNT(DISTINCT ?x) Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new COUNT DISTINCT Aggregate + Constants defining Token Types - Expression - + - Counts the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Gets the Arguments of the Aggregate + Constants defining Token Types - + - Class representing GROUP_CONCAT Aggregate + Constants defining Token Types - + - Creates a new GROUP_CONCAT aggregate + Constants defining Token Types - Expression - Should a distinct modifer be applied - + - Creates a new GROUP_CONCAT aggregate + Constants defining Token Types - Expression - + - Creates a new GROUP_CONCAT aggregate + Constants defining Token Types - Expression - Separator Expression - Should a distinct modifer be applied - + - Creates a new GROUP_CONCAT aggregate + Constants defining Token Types - Expression - Separator Expression - + - Applies the aggregate over the given bindings + Constants defining Token Types - Evaluation Context - Binding IDs - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the value of the aggregate for the given binding + Constants defining Token Types - Evaluation Context - Binding ID - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing MAX Aggregate Functions + Constants defining Token Types - + - Creates a new MAX Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new MAX Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Creates a new MAX Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new MAX Aggregate + Constants defining Token Types - Expression - + - Creates a new MAX Aggregate + Constants defining Token Types - Distinct Modifier - Expression - + - Applies the Max Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing MIN Aggregate Functions + Constants defining Token Types - + - Creates a new MIN Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new MIN Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Creates a new MIN Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new MIN Aggregate + Constants defining Token Types - Expression - + - Applies the Min Aggregate function to the results - - Evaluation Context - Binding IDs over which the Aggregate applies - + Constants defining Token Types + - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing the SAMPLE aggregate + Constants defining Token Types - + - Creates a new SAMPLE Aggregate + Constants defining Token Types - Expression - + - Applies the SAMPLE Aggregate + Constants defining Token Types - Evaluation Context - Binding IDs - - + - Gets the String representation + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Class representing SUM Aggregate Functions + Constants defining Token Types - + - Creates a new SUM Aggregate + Constants defining Token Types - Variable Expression - Whether a DISTINCT modifier applies - + - Creates a new SUM Aggregate + Constants defining Token Types - Expression - Whether a DISTINCT modifier applies - + - Creates a new SUM Aggregate + Constants defining Token Types - Variable Expression - + - Creates a new SUM Aggregate + Constants defining Token Types - Expression - + - Applies the Sum Aggregate function to the results + Constants defining Token Types - Evaluation Context - Binding IDs over which the Aggregate applies - - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Namespace for aggregates provided by the XPath function library + Constants defining Token Types - + - Represents the XPath fn:string-join() aggregate + Constants defining Token Types - + - Separator Expression + Constants defining Token Types - + - Creates a new XPath String Join aggregate which uses no separator + Constants defining Token Types - Expression - + - Creates a new XPath String Join aggregate + Constants defining Token Types - Expression - Separator Expression - + - Applies the Aggregate in the given Context over the given Binding IDs + Constants defining Token Types - Evaluation Context - Binding IDs - - + - Gets the value of a member of the Group for concatenating as part of the result for the Group + Constants defining Token Types - Evaluation Context - Binding ID - - + - Gets the separator to use in the concatenation + Constants defining Token Types - Evaluation Context - Binding ID - - + - Gets the String representation of the function + Constants defining Token Types - - + - Gets the Functor of the Expression + Constants defining Token Types - + - Abstract Base Class for Aggregate Functions + Constants defining Token Types - + - Expression that the aggregate operates over + Constants defining Token Types - + - Whether a DISTINCT modifer is applied + Constants defining Token Types - + - Base Constructor for Aggregates + Constants defining Token Types - Expression that the aggregate is over - + - Base Constructor for Aggregates + Constants defining Token Types - Expression that the aggregate is over - Whether a Distinct modifer is applied - + - Applies the Aggregate to the Result Binder + Constants defining Token Types - Evaluation Context - - + - Applies the Aggregate to the Result Binder + Constants defining Token Types - Evaluation Context - Enumerable of Binding IDs over which the Aggregate applies - - + - Expression that the Aggregate executes over + Constants defining Token Types - + - Gets the String representation of the Aggregate + Constants defining Token Types - - + - Gets the Type of the Expression + Constants defining Token Types - + - Gets the Functor of the Aggregate + Constants defining Token Types - + - Gets the Arguments of the Expression + Constants defining Token Types - + - Interface for SPARQL Aggregates which can be used to calculate aggregates over Results + Constants defining Token Types - + - Applies the Aggregate to the Result Binder and returns a single Node as a Result + Constants defining Token Types - Evaluation Context - + - Applies the Aggregate to the Result Binder and returns a single Node as a Result + Token which represents the Start of the Input - Evaluation Context - Enumerable of Binding IDs which the aggregate is applied over - + - Gets the Expression that the Aggregate is applied to + Creates a new Beginning of File Token - + - - Namespace for classes used in executing CONSTRUCT queries - + Token which represents the End of the Input - + - Context used for Constructing Triples in SPARQL Query/Update + Creates a new End of File Token + Line at which the File Ends + Column as which the File Ends - + - Creates a new Construct Context + Token which represents the End of a Line - Graph to construct Triples in - Set to construct from - Whether Blank Nodes bound to variables should be preserved as-is - - - Either the Set or Graph parameters may be null if required - - - + - Creates a new Construct Context + Creates a new End of Line Token - Factory to create nodes with - Set to construct from - Whether Blank Nodes bound to variables should be preserved as-is - - - Either the Set or Factory parameters may be null if required - - + Line + Column at which the line ends - + - Gets the Set that this Context pertains to + Token which represents the @ Character - + - Gets the Graph that Triples should be constructed in + Creates a new @ Token + Line at which the @ occurs + Column at which the @ occurs - + - Gets whether Blank Nodes bound to variables should be preserved + Token which represents the . Character - + - Creates a new Blank Node for this Context + Creates a new . Token - ID - - - - If the same Blank Node ID is used multiple times in this Context you will always get the same Blank Node for that ID - - + Line at which the . occurs + Column at which the . occurs - + - Creates a Node for the Context + Token which represents the ; Character - Node - - - - In effect all this does is ensure that all Nodes end up in the same Graph which may occassionally not happen otherwise when Graph wrappers are involved - - - + - - Namespace for classes which implement algorithms for executing DESCRIBE queries - + Creates a new ; Token + Line at which the ; occurs + Column at which the ; occurs - + - Abstract Base Class for SPARQL Describe Algorithms which provides BNode rewriting functionality + Token which represents the , Character - + - Gets the Description Graph based on the Query Results from the given Evaluation Context + Creates a new , Token - SPARQL Evaluation Context - + Line at which the , occurs + Column at which the , occurs - + - Gets the Description Graph based on the Query Results from the given Evaluation Context passing the resulting Triples to the given RDF Handler + Tokens which represents the tab character - RDF Handler - SPARQL Evaluation Context - + - Generates the Description for each of the Nodes to be described + Creates a new Tab Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the tab occurs + Column at which the tab occurs - + - Gets the Nodes that the algorithm should generate the descriptions for + Token which represents the # Character - Factory to create Nodes in - SPARQL Evaluation Context - - + - Helper method which rewrites Blank Node IDs for Describe Queries + Creates a new # Token - Triple - Mapping of IDs to new Blank Nodes - Factory to create Nodes in - + Line at which the # occurs + Column at which the # occurs - + - Computes a Concise Bounded Description for all the Values resulting from the Query + Token which repreents the _ Character - - - The Description returned is all the Triples for which a Value is a Subject and with any Blank Nodes expanded to include Triples with the Blank Node as the Subject - - - + - Generates the Description for each of the Nodes to be described + Creates a new _ Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the _ occurs + Column at which the _ occurs - + - Computes a Labelled Description for all the Values resulting from the Query + Token which represents the ^^ sequence used for Data Type specification in some RDF Syntaxes - - - The Description returned is all the Triples for which a Value is a Subject and with any Blank Nodes expanded to include their rdfs:label property if present - - - + - Generates the Description for each of the Nodes to be described + Creates a new ^^Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the ^^ occurs + Column at which the ^^ occurs - + - Computes the merge of the Minimal Spanning Graphs for all the Values resulting from the Query + Token which represents the ^ Character used for Reverse Path Traversal in somme RDF Syntaxes - + - Generates the Description for each of the Nodes to be described + Creates a new ^ Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the ^ occurs + Column at which the ^ occurs - + - Computes a Symmetric Concise Bounded Description for all the Values resulting from the Query + Token which represents the ! Character used for Forward Path Traversal in some RDF Syntaxes - - - The Description returned is all the Triples for which a Value is a Subject/Object and with any Blank Nodes expanded to include Triples with the Blank Node as the Subject - - - + - Generates the Description for each of the Nodes to be described + Creates a new ! Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the ! occurs + Column at which the ! occurs - + - Computes a Simple Subject Object Description for all Values resulting from the Query + Token which represents Comments - - - The Description returned is all the Triples for which a Value is the Subject or Object - this description does not expand any Blank Nodes - - - + - Generates the Description for each of the Nodes to be described + Creates a new Comment Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + The Comment + Line on which the Comment occurs + Column at which the Comment starts + Column at which the Comment ends - + - Computes a Description for all the results such that the description is the merge of all the Graphs named with a resulting URI + Token which represents the [ Character - + - Generates the Description for each of the Nodes to be described + Creates a new [ Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the [ occurs + Column at which the [ occurs - + - Computes a Simple Subject Description for all Values resulting from the Query + Token which represents the ] Character - - - The Description returned is all the Triples for which a Value is the Subject - this description does not expand any Blank Nodes - - - + - Generates the Description for each of the Nodes to be described + Creates a new ] Token - RDF Handler - SPARQL Evaluation Context - Nodes to be described + Line at which the ] occurs + Column at which the ] occurs - + - Interface for classes that implement the DESCRIBE functionality of SPARQL + Token which represents the ( Character - - - This is designed so that developers can introduce their own DESCRIBE algorithms as required - - - + - Generates a Graph which is the description of the resources resulting from the Query + Creates a new ( Token - SPARQL Evaluation Context - + Line at which the ( occurs + Column at which the ( occurs - + - Generates the Description Graph based on the Query Results from the given Evaluation Context passing the resulting Triples to the given RDF Handler + Token which represents the ) Character - RDF Handler - SPARQL Evaluation Context - + - - Namespace containing classes used to apply GROUP BY clauses to SPARQL queries - + Creates a new ) Token + Line at which the ) occurs + Column at which the ) occurs - + - Interface for Classes that represent SPARQL GROUP BY clauses + Token which represents the { Character - + - Applies the Grouping to a Result Binder + Creates a new { Token - Evaluation Context - + Line at which the { occurs + Column at which the { occurs - + - Applies the Grouping to a Result Binder subdividing the Groups from the previous Group By clause into further Groups + Token which represents the } Character - Evaluation Context - Groups - - + - Gets/Sets the child GROUP BY Clause + Creates a new } Token + Line at which the } occurs + Column at which the } occurs - + - Gets the Variables used in the GROUP BY + Token which reprsents the := Assignment Operator - + - Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables + Creates a new := Token + Line on which the := occurs + Position at which the := occurs - + - Gets the Expression used to GROUP BY + Token which represents the ? Character - + - Gets/Sets the Variable the value of the GROUP BY expression should be bound to (may be null if not bound to anything) + Creates a new ? Token + Line at which the ? occurs + Column at which the ? occurs - + - Abstract Base Class for classes representing Sparql GROUP BY clauses + Token which represents the | Character - + - Child Grouping + Creates a new | Token + Line at which the | occurs + Column at which the | occurs - + - Gets/Sets the Child GROUP BY Clause + Token which represents a Prefix Directive - + - Applies the Grouping to the Binder + Creates a new Prefix Direction Token - Evaluation Context - + Line at which the Prefix Directive occurs + Column at which the Prefix Directive occurs - + - Applies the Grouping to the Binder subdividing Groups from a previous Grouping + Token which represents the Prefix specified after a Prefix Directive - Evaluation Context - Groups to subdivide - - + - Gets the Variables involved in this Group By + Creates a new Prefix Token + Prefix + Line at which the Prefix occurs + Column at which the Prefix starts + Column at which the Prefix ends - + - Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables + Token which represents a Base Directive - + - Gets the Expression used to GROUP BY + Creates a new Base Directive Token + Line at which the Base Directive occurs + Column at which the Base Directive occurs - + - Gets/Sets the Variable that the grouped upon value should be assigned to + Token which represents a Keyword Directive - + - Represents a Grouping on a given Variable + Creates a new Keyword Directive Token + Line at which the Keyword Directive occurs + Column at which the Keyword Directive occurs - + - Creates a new Group By which groups by a given Variable + Token which represents a For All Quantifier - Variable Name - + - Creates a new Group By which groups by a given Variable and assigns to another variable + Creates a new For All Quantifier Token - Variable Name - Assign Variable + Line at which the For All Quantifier occurs + Column at which the For All Quantifier occurs - + - Applies a Grouping on a given Variable to the Binder + Token which represents a For Some Quantifier - Evaluation Context - - + - Applies a Grouping on a given Variable to the Binder Groups from a previous Grouping + Creates a new For Some Quantifier Token - Evaluation Context - Binder Group to subgroup - + Line at which the For Some Quantifier occurs + Column at which the For Some Quantifier occurs - + - Gets the Variables used in the GROUP BY + Token which represents URIs - + - Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables + Creates a new Uri Token + Value of the Uri including the < > deliminators + Line the Uri occurs on + Column the Uri starts at + Column the Uri ends at - + - Gets the Variable Expression Term used by this GROUP BY + Token which represents QNames - + - Gets the String representation of the GROUP BY + Creates a new QName Token - + QName + Line the QName occurs on + Column the QName starts at + Column the QName ends at - + - Represents a Grouping on a given Expression + Token which represents Plain (Unquoted) Literals - + - Creates a new Group By which groups by a given Expression + Creates a new Plain Literal Token - Expression + Literal Value + Line the Literal occurs on + Column the Literal starts at + Column the Literal ends at - + - Applies a Grouping on a given Expression to the Binder + Token which represents Literals - Evaluation Context - - + - Applies a Grouping on a given Variable to the Binder Groups from a previous Grouping + Creates a new Literal Token - Evaluation Context - Binder Group to subgroup - + Literal Value including the Quote deliminators + Line the Literal occurs on + Column the Literal starts at + Column the Literal ends at - + - Gets the Fixed Variables used in the Grouping + Creates a new Literal Token + Literal Value including the Quote deliminators + Line the Literal starts on + Line the Literal ends on + Column the Literal starts at + Column the Literal ends at + + Most syntaxes use different deliminators for multiline literals and will usually use a LongLiteralToken instead but some formats like CSV only use quotes for multiline literals and use no delimitors for single line literals + - + - Gets the Projectable Variables used in the GROUP BY i.e. Variables that are grouped upon and Assigned Variables + Token which represents Long Literals (allows multi-line values) - + - Gets the Expression used to GROUP BY + Creates a new Long Literal Token + Literal Value including the Triple Quote deliminators + Line the Long Literal starts on + Line the Long Literal ends on + Column the Literal starts at + Column the Literal ends at - + - Gets the String representation of the GROUP BY + Token which represents the Language Specifier for a Literal - - + - - Namespace for Inference Classes which provide Inferencing capabilities on RDF - these features are currently experimental and may not work as expected. - - - Classes which implement reasoning must implement the IInferenceEngine interface, these can then be attached to classes which implement the IInferencingTripleStore interface or they can be used to apply inference to any IGraph implementation with the inferred Triples optionally output to a separate Graph. - - - OWL reasoning currently has extremely limited support, we provide a Pellet client in the Pellet namespace which can be used to connect to a Pellet Server but that currently only provides reasoning on external knowledge bases on the Pellet Server - + Creates a new Language Specifier Token + Language Specifier + Line the Literal occurs on + Column the Literal starts at + Column the Literal ends at - + - - Namespace which provides a client for interacting with a Pellet Server - - - Due to Pellet Server being a relatively new product it is currently only possible to reason over external knowledge bases on a Pellet Server and not to use Pellet to reason over in-memory data. As Pellet Server is updated in the future this client will be updated to take advantage of those updates and to eventually provide for in-memory reasoning. You may also want to consider using the which is the triple store from the same people who developed Pellet and which integrates some Pellet capabilities. - + Token which represents the Data Type for a Literal - + - - Namespace which provides classes which represent the Services offered by a Pellet Server knowledge base - + Creates a new DataType Token + DataType Uri including the < > deliminators or a QName + Line the DataType occurs on + Column the DataType starts at + Column the DataType ends at - + - Represents the Cluster Service provided by a Pellet Knowledge Base + Token which represents Literals with Language Specifiers - + - Gets a list of lists expressing clusters within the Knowledge Base + Creates a new Literal with Language Specifier Token - Number of Clusters - + Literal Token + Language Specifier Token - + - Gets a list of lists expressing clusters within the Knowledge Base + The Language Specifier for this Literal - Number of Clusters - QName of a Type to cluster around - - + - Gets the raw Cluster Graph for the Knowledge Base + Token which represents Literals with Data Types - Number of Clusters - - + - Gets the raw Cluster Graph for the Knowledge Base + Creates a new Literal with DataType Token - Number of Clusters - QName of a Type to Cluster around - + Literal Token + DataType Token - + - Gets a list of lists expressing clusters within the Knowledge Base + The Data Type Uri/QName for this Literal - Number of Clusters - Callback to be invoked when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Gets a list of lists expressing clusters within the Knowledge Base + Token which represents Graph Literals - Number of Clusters - QName of a Type to cluster around - Callback to be invoked when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Gets the raw Cluster Graph for the Knowledge Base + Creates a new Graph Literal Token - Number of Clusters - Callback to be invoked when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Value of the Graph Literal + Line the Graph Literal starts on + Line the Graph Literal ends on + Column the Graph Literal starts at + Column the Graph Literal ends at - + - Gets the raw Cluster Graph for the Knowledge Base + Token which represents anonymous Blank Nodes - Number of Clusters - QName of a Type to Cluster around - Callback to be invoked when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Represents the Similarity Service provided by a Pellet Knowledge Base + Creates a new Anonymous Blank Node Token + Line the Blank Node occurs on + Column the Blank Node occurs at - + - Creates a new Similarity Service for a Pellet Knowledge Base + Token which represents named Blank Nodes - Service Name - JSON Object - + - Gets a list of key value pairs listing Similar Individuals and their Similarity scores + Creates a new Blank Node Token - Number of Similar Individuals - QName of a Individual to find Similar Individuals to - + ID of the Blank Node + Line the Blank Node occurs on + Column the Blank Node starts at + Column the Blank Node ends at - + - Gets the raw Similarity Graph for the Knowledge Base + Token which represents Blank Node Collections - Number of Similar Individuals - QName of a Individual to find Similar Individuals to - - + - Gets a list of key value pairs listing Similar Individuals and their Similarity scores + Creates a new Blank Node Collection Token - Number of Similar Individuals - QName of a Individual to find Similar Individuals to - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Contents of the Blank Node Collection + Line the Collection starts on + Line the Collection ends on + Column the Collection starts at + Column the Collection ends at - + - Gets the raw Similarity Graph for the Knowledge Base + The Tokens contained in the Blank Node Collection - Number of Similar Individuals - QName of a Individual to find Similar Individuals to - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Represents the Integrity Constraint Validation Service provided by a Pellet Knowledge Base + Token representing the 'a' Keyword - + - Creates a new Integrity Constraint Validation Service + Creates a new 'a' Keyword Token - Service Name - JSON Object + Line the Keyword occurs on + Column the Keyword occurs at - + - Extracts an RDF Dataset which details the Constraints violated (if any) and whether Constraints are satisified + Token representing the 'is' Keyword - - + - Extracts an RDF Dataset which details the Constraints violated (if any) and whether Constraints are satisified + Creates a new 'is' Keyword Token - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Line the Keyword occurs on + Column the Keyword occurs at - + - Represents the Predict Service of a Pellet Knowledge Base + Token representing the 'of' Keyword - + - Creates a new Predict Service for a Pellet Knowledge Base + Creates a new 'of' Keyword Token - Service Name - JSON Object + Line the Keyword occurs on + Column the Keyword occurs at - + - Gets the list of Predictions for the given Individual and Property + Token representing the '=>' implies Syntax - QName of an Inidividual - QName of a Property - - + - Gets the Raw Predictions Graph from the Knowledge Base + Creates a new '=>' Keyword Token - QName of an Individual - QName of a Property - + Line the Keyword occurs on + Column the Keyword occurs at - + - Gets the list of Predictions for the given Individual and Property + Token representing the '>=' implied by Syntax - QName of an Inidividual - QName of a Property - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Gets the Raw Predictions Graph from the Knowledge Base + Creates a new '<=' Keyword Token - QName of an Individual - QName of a Property - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Line the Keyword occurs on + Column the Keyword occurs at - + - Represents the Namespace Service provided by a Pellet Server knowledge base + Token representing the '=' equality Syntax - + - Creates a new Namespace Service + Creates a new '=' Keyword Token - Service Name - JSON Object + Line the Keyword occurs on + Column the Keyword occurs at - + - Gets the Namespaces used in the Knowledge Base + Token representing the use of a Custom Keyword - - + - Gets the Namespaces used in the Knowledge Base + Creates a new Custom Keyword Token - Callback to invoke when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Custom Keyword + Line the Keyword occurs on + Column the Keyword starts at + Column the Keyword ends at - + - Represents the Classify Service provided by a Pellet Server + Token representing the definition of a Custom Keyword - + - Creates a new Classify Service + Creates a new Custom Keyword Definition Token - Service Name - JSON Object + Custom Keyword Definition + Line the Keyword occurs on + Column the Keyword starts at + Column the Keyword ends at - + - Extracts the Graph which comprises the class hierarchy + Token representing Variables - + - Extracts the Graph which comprises the class hierarchy + Creates a new Variable Token - Callback for when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Variable + Line the Variable occurs on + Column the Variable starts at + Column the Variable ends at - + - Represents the Consistency Service provided by a Pellet Server + Tokeniser for TriG (Turtle with Named Graphs) RDF Syntax - + - Creates a new Consistency Service + Creates a new TriG Tokeniser which reads Tokens from the given Stream - Service Name - JSON Object + Stream to read Tokens from - + - Returns whether the Knowledge Base is consistent + Creates a new TriG Tokeniser which reads Tokens from the given Stream using the specified syntax + Stream to read Tokens from + Syntax - + - Determines whether the Knowledge Base is consistent + Creates a new TriG Tokeniser which reads Tokens from the given Stream - Callback to invoke when the operation completes - State to be passed to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Stream to read Tokens from - + - Represents the Explain Service provided by a Pellet Server + Creates a new TriG Tokeniser which reads Tokens from the given Stream + Stream to read Tokens from + Syntax - + - Base Query for use with the Explain Service + Creates a new TriG Tokeniser which reads Tokens from the given Input + Input to read Tokens from - + - Creates a new Explain Service + Creates a new TriG Tokeniser which reads Tokens from the given Input - Service Name - JSON Object + Input to read Tokens from + Syntax - + - Gets a Graph explaining the result of the SPARQL Query + Gets the next available Token from the Input Stream - SPARQL Query - + - Gets a Graph explaining the result of the SPARQL Query + Tokeniser for tokenising TSV inputs - SPARQL Query - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Represents the Explan Unsatisfiable Service provided by a Pellet Server + Creates a new TSV Tokeniser + Text Reader - + - Creates a new Explain Unsatisfiable Service + Creates a new TSV Tokeniser - Service Name - JSON Object + Stream Reader - + - Gets a Graph explaining why a Class is unsatisfiable + Gets the next available token from the input - Class - + - Gets a Graph explaining why a Class is unsatisfiable + A Class for Reading an Input Stream and generating Turtle Tokens from it - Class - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Represents the Explain Instance Service provided by a Pellet Server + Creates a new Turtle Tokeniser + The Input Stream to generate Tokens from - + - Creates a new Explain Instance Service + Creates a new Turtle Tokeniser - Service Name - JSON Object + The Input Stream to generate Tokens from - + - Gets a Graph explaining why an Instance is of the given Class + Creates a new Turtle Tokeniser - Instance - Class - + Input to read from - + - Gets a Graph explaining why an Instance is of the given Class + Creates a new Turtle Tokeniser - Instance - Class - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + The Input Stream to generate Tokens from + Turtle Syntax - + - Represents the Explain Subclass Service provided by a Pellet Server + Creates a new Turtle Tokeniser + The Input Stream to generate Tokens from + Turtle Syntax - + - Creates a new Explain Subclass Service + Creates a new Turtle Tokeniser - Service Name - JSON Object + Input to read from + Turtle Syntax - + - Gets a Graph explaining why the given Class is a subclass of the given Super Class + Gets the next parseable Token from the Input or raises an Error - Class - Super Class + Occurs when a Token cannot be parsed - + - Gets a Graph explaining why the given Class is a subclass of the given Super Class + Internal Helper method which attempts to get a Directive Token - Class - Super Class - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + - + - Represents the Explain Inconsistent Service provided by a Pellet Server + Internal Helper method which attempts to get a Prefix Token + - + - Creates a new Explain Inconsistent Service + Internal Helper method which attempts to get a QName Token - Service Name - JSON Object + + In fact this function may return a number of Tokens depending on the characters it finds. It may find a QName, Plain Literal, Blank Node QName (with ID) or Keyword. QName & Keyword Validation is carried out by this function - + - Gets a Graph explaining why the Knowledge Base is inconsistent + Internal Helper method which attempts to get a Language Specifier Token - + - Gets a Graph explaining why the Knowledge Base is inconsistent + Internal Helper method which attempts to get a Date Type Token - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + - + - Represents the Explain Property Service provided by a Pellet Server + Internal Helper method which attempts to get a Comment Token + - + - Creates a new Explain Property Service + + Namespace for Validator classes that can be used to validate various forms of syntax + - Service Name - JSON Object - + - Gets a Graph explaining why the given Triple was derived + Interface for classes which can validate Syntax - Subject - Predicate - Object - - + - Gets a Graph explaining why the given Triple was derived + Validates the given Data - Triple + Data + - + - Gets a Graph explaining why the given Triple was derived + Interface for Validation Results - Subject - Predicate - Object - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Gets a Graph explaining why the given Triple was derived + Gets whether the Syntax was valid - Triple - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Represents the SPARQL Query Service provided by a Pellet Server knowledge base + Gets an informational message about the validity/invalidity of the Syntax - + - Creates a new SPARQL Query Service + Gets an enumeration of any warning messages - Service Name - JSON Object - + - Makes a SPARQL Query against the Knowledge Base + Gets any validation error - SPARQL Query - - + - Processes a SPARQL Query against the Knowledge Base passing the results to the RDF or Results handler as appropriate + Gets any result object that was parsed from the syntax - RDF Handler - Results Handler - SPARQL Query - + - Makes a SPARQL Query against the Knowledge Base + Syntax Validator for RDF Dataset Formats - SPARQL Query - Callback to invoke for queries that return a Graph - Callback to invoke for queries that return a Result Set - State to pass to whichever callback function is invoked - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - - + - Processes a SPARQL Query against the Knowledge Base passing the results to the RDF or Results handler as appropriate + Creates a new RDF Dataset Syntax Validator - RDF Handler - Results Handler - SPARQL Query - Callback to invoke once handling of results has completed - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Dataset Parser - + - Represents the Realize Service provided by a Pellet Server + Determines whether the data provided is valid syntax + Data + - + - Creates a new Realize Service + Syntax Validator for validating RDF Graph syntaxes - Service Name - JSON Object - + - Gets the Graph which comprises the class hierarchy and individuals of those classes + Parser to use - - + - Gets the Graph which comprises the class hierarchy and individuals of those classes + Creates a new RDF Syntax Validator using the given Parser - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Parser - + - Represents the Search Service provided by a Pellet Server + Validates the given data to see if it is valid RDF Syntax + Data + - + - Creates a new Search Service + Syntax Validator for RDF Graph syntaxes which is strict (any warnings are treated as errors) - Service Name - JSON Object - + - Gets the list of Search Results which match the given search term + Creates a new Strict RDF Syntax Validator - Search Term - A list of Search Results representing Nodes in the Knowledge Base that match the search term + Parser - + - Gets the list of Search Results which match the given search term + Validates the data to see if it is valid RDF syntax which does not produce any warnings - Search Term - Callback to invoke when the operation completes - State to pass to the callback - - If the operation succeeds the callback will be invoked normally, if there is an error the callback will be invoked with a instance of passed as the state which provides access to the error message and the original state passed in. - + Data + - + - Represents a Search Result returned from the + Syntax Validator for SPARQL Queries - + - Creates a new Search Service Result + Creates a new SPARQL Query Validator - Result Node - Result Score - + - Gets the Node for this Result + Creates a new SPARQL Query Validator using the given Syntax + Query Syntax - + - Gets the Score for this Result + Creates a new SPARQL Query Validator using the given Query Parser + Query Parser - + - Gets the String representation of the Result + Validates whether the given Data is a valid SPARQL Query + Data - - - Represents some Service provided by a Pellet Server which the library does not explicitly support - - - + - Creates a new Unsupported Service + Syntax Validator which validates SPARQL Results formats - Service Name - JSON Object - + - Callback that occurs when the connection to a Pellet Server instance is ready for use + Creates a new SPARQL Results Format validator that uses the given parser - Pellet Server - State + SPARQL Results Parser - + - Callback for Pellet Constistency Service + Validates the syntax to see if it is valid SPARQL Results - Whether the Knowledge Base is consistent - State + Data to validate + - + - Callback for Pellet Search Service + A Syntax Validator for validating SPARQL Update Commands - Pellet Search Results - State - + - Callback for Pellet Cluster Service + Validates whether the given data is a SPARQL Update Command - Clusters - State + Data + - + - Callback for Pellet Similarity Service + Represents Syntax Validation Results - Similarity Results - State - + - Represents the Service Endpoint for a Service provided by a Pellet Server + Creates new Syntax Validation Results + Whether the Syntax was valid + Validation Message - + - Creates a new Service Endpoint instance + Creates new Syntax Validation Results - JSON Object representing the Endpoint + Whether the Syntax was valid + Validation Message + Results Object - + - Gets the URI of the Endpoint + Creates new Syntax Validation Results + Whether the Syntax was valid + Validation Message + Results Object + Enumeration of Warnings - + - Gets the HTTP Methods supported by the Endpoint + Creates new Syntax Validation Results + Whether the Syntax was valid + Validation Message + Results Object + Enumeration of Warnings + Error that occurred - + - Represents a Knowledge Base on a Pellet Server + Creates new Syntax Validation Results + Whether the Syntax was valid + Validation Message + Error that occurred - + - Creates a new Knowledge Base + Creates new Syntax Validation Results - JSON Token for the Object that represents the Service + Validation Message + Error that occurred - + - Gets the Name of the Knowledge Base + Whether the Syntax was valid - + - Gets the Services provided by this Knowledge Base + Gets the Validation Message - + - Gets whether a Service is supported by the Knowledge Base + Gets the Warnings that were produced - Service Type - - + - Gets whether a Service is supported by the Knowledge Base + Gets the Error that occurred - Service Type - - + - Gets whether a Service is supported by the Knowledge Base + Gets the Result Object that was produced - Service Name - - + - Gets the first available implementation of the given Service Type for this Knowledge Base + Abstract Base Class for parsers that handle GZipped input - Service Type - - Either the Service or a Null if the Knowledge Base does not expose a Service of the given Type - + + + While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such + + - + - Gets the first available implementation of the desired Service Type + Creates a new GZipped input parser - Desired Service Type - + The underlying parser to use - + - Gets the first available Service with the given name for this Knowledge Base + Loads a RDF dataset from GZipped input - Service Name - - Either the Service or a Null if the Knowledge Base does not expose a Service with the given name - + Triple Store to load into + File to load from - + - Gets all the available implementations of the given Service Type for this Knowledge Base + Loads a RDF dataset from GZipped input - Service Type - + Triple Store to load into + Input to load from - + - Gets all the available services with the given name for this Knowledge Base + Loads a RDF dataset from GZipped input - Service Name - + RDF Handler to use + File to load from - + - Helper class provided constants and helper methods for use with Pellet Server + Loads a RDF dataset from GZipped input + RDF Handler to use + Input to load from - + - Constants for Service Names for Services that may be provided by a Pellet Server + Warning Event raised on non-fatal errors encountered parsing - + - Constants for Service Names for Services that may be provided by a Pellet Server + Helper method for raising warning events + Warning Message - + - Constants for Service Names for Services that may be provided by a Pellet Server + Gets the description of the parser + - + - Constants for Service Names for Services that may be provided by a Pellet Server + Parser for loading GZipped NQuads - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped NQuads Parser - + - Constants for Service Names for Services that may be provided by a Pellet Server + Parser for loading GZipped TriG - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped TriG Parser - + - Constants for Service Names for Services that may be provided by a Pellet Server + Parser for loading GZipped TriX - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped TriX Parser - + - Constants for Service Names for Services that may be provided by a Pellet Server + Parser for oading GZipped JSON-LD - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped JSON-LD parser - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped JSON-LD parser with a specific set of . + The options to pass to the underlying - + - Constants for Service Names for Services that may be provided by a Pellet Server + Abstract Base class for RDF parsers which can read GZipped input + + + While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such + + - + - Constants for Service Names for Services that may be provided by a Pellet Server + Creates a new GZipped input parser + Underlying parser - + - Constants for Service Names for Services that may be provided by a Pellet Server + Loads a Graph from GZipped input + Graph to load into + Stream to load from - + - Constants for Service Names for Services that may be provided by a Pellet Server + Loads a Graph from GZipped input + Graph to load into + Reader to load from - + - Constants for Service Names for Services that may be provided by a Pellet Server + Loads a Graph from GZipped input + Graph to load into + File to load from - + - Constants for Service Names for Services that may be provided by a Pellet Server + Loads RDF using a RDF Handler from GZipped input + RDF Handler to use + Stream to load from - + - Represents a Connection to a Pellet Server + Loads RDF using a RDF Handler from GZipped input + RDF Handler to use + Reader to load from - + - Preferred MIME Type for the format to retrieve the Server Description in + Loads RDF using a RDF Handler from GZipped input + RDF Handler to use + File to load from - + - Creates a new connection to a Pellet Server + Helper method for raising warning events - Server URI + - + - Creates a new connection to a Pellet Server + Warning event which is raised when non-fatal errors are encounted parsing RDF - Server URI - + - Connects to a Pellet Server instance asynchronously invoking the callback when the connection is ready + Gets the description of the parser - Server URI - Callback to invoke when the connection is ready - State to pass to the callback + - + - Connects to a Pellet Server instance asynchronously invoking the callback when the connection is ready + Parser for loading GZipped NTriples - Server URI - Callback to invoke when the connection is ready - State to pass to the callback - + - Creates a new connection to a Pellet Server + Creates a new GZipped NTriples parser - Server URI - Callback to invoke when the connection is ready - State to pass to the callback - + - Creates a new connection to a Pellet Server + Parser for loading GZipped Turtle - Server URI - Callback to invoke when the connection is ready - State to pass to the callback - + - Discovers the Knowledge Bases on a Server + Creates a new GZipped Turtle parser - + - Discovers the Knowledge Bases on a Server asynchronously + Creates a new GZipped Turtle parser - Callback to invoke when the operation completes - + Turtle Syntax - + - Gets the Knowledge Bases available from this Pellet Server + Parser for loading GZipped Notation 3 - + - Gets whether the Server has a Knowledge Base with the given Name + Creates a new GZipped Notation 3 parser - Knowledge Base Name - - + - Gets whether the Server has a Knowledge Base which supports the given Service Type + Parser for loading GZipped RDF/XML - Service Type - - + - Gets the Knowledge Base with the given Name + Creates a new GZipped RDF/XML parser - Knowledge Base Name - - - + - Gets all the Knowledge Bases which support a given Server + Creates a new GZipped RDF/XML parser - Service Type - + RDF/XML parser mode - + - Class representing Services provided by a Pellet Server Knowledge Base + Parser for loading GZipped RDF/JSON - + - Creates a new Pellet Service instance + Creates a new GZipped RDF/JSON parser - Service Name - JSON Object representing the Service - + - Factory method for generating concrete Pellet Service instances representing different Pellet Services + Parser for loading GZipped RDFa - JSON Object representing the Service - - + - Gets the Name of the Service + Creates a new GZipped RDFa parser - + - Gets the Endpoint for this Service + Abstract Base class for Results parser that read GZipped input + + + While the normal parsers can be used with GZip streams directly this class just abstracts the wrapping of file/stream input into a GZip stream if it is not already passed as such + + - + - Gets the Response MIME Types supported by the Service + Creates a new GZipped results parser + Underlying parser - + - Helper class containing constants and methods for use in implementing OWL support + Loads a Result Set from GZipped input + Result Set to load into + Input to load from - + - Class containing Extraction Mode constants + Loads a Result Set from GZipped input + Result Set to load into + Input to load from - + - OWL Extraction Mode constants + Loads a Result Set from GZipped input + Result Set to load into + File to load from - + - OWL Extraction Mode constants + Loads a Result Set from GZipped input + Results Handler to use + Input to load from - + - OWL Extraction Mode constants + Loads a Result Set from GZipped input + Results Handler to use + Input to load from - + - OWL Extraction Mode constants + Loads a Result Set from GZipped input + Results Handler to use + File to load from - + - OWL Extraction Mode constants + Gets the description of the parser + - + - OWL Extraction Mode constants + Helper method for raising warning events + Warning message - + - OWL Extraction Mode constants + Event which is raised if non-fatal errors are countered with parsing results - + - OWL Extraction Mode constants + Parser for GZipped SPARQL XML - + - OWL Extraction Mode constants + Creates a new GZipped SPARQL XML parser - + - OWL Extraction Mode constants + Parser for GZipped SPARQL JSON - + - OWL Extraction Mode constants + Creates a new GZipped SPARQL JSON parser - + - OWL Extraction Mode constants + Parser for GZipped SPARQL CSV - + - OWL Extraction Mode constants + Creates a new GZipped SPARQL CSV parser - + - OWL Extraction Mode constants + Parser for GZipped SPARQL TSV - + - OWL Extraction Mode constants + Creates a new GZipped SPARQL TSV parser - + - OWL Extraction Mode constants + An extended for use in parsing - + - OWL Extraction Mode constants + Gets whether the end of the stream has been reached - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Text Reader to wrap + Buffer Size + + If the given TextReader is already a Blocking Text Reader this is a no-op + - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Text Reader to wrap + + If the given TextReader is already a Blocking Text Reader this is a no-op + - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Input Stream + Buffer Size - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Input Stream - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Input reader + - + - OWL Extraction Mode constants + Creates a new Blocking Text Reader + Input reader + Buffer Size + - + - OWL Class and Property Constants + Creates a new non-blocking Text Reader + Input reader + - + - Proposed interface for OWL Reasoners - currently incomplete + Creates a new non-blocking Text Reader - - - Important: This interface is specifically designed so that it does not require the provision of a Graph to methods unless the method does not make sense without taking an IGraph as a parameter. This is because we envisage the use of this interface for connecting to reasoners which have their own access to the data over which they are reasoning and do not need it providing explicitly to them. - - - Reasoner implementations may throw NotSupportedException for operations they don't support and may throw any other appropriate exceptions as appropriate for operations that encounter errors. - - + Input reader + Buffer Size + - + - Adds a Graph to the reasoners knowledge base + Abstract class representing a text reader that provides buffering on top of another text reader - Graph - - - A reasoner may choose to do nothing in this method if that reasoner especially if it operates using some pre-defined, remote or otherwise immutable knowledge base. - - - May be thrown if the Reasoner does not support such an operation - + - Extract a reasoning enhanced sub-graph from the given Graph rooted at the given Node + Default Buffer Size - Graph - Root Node - - + - Extracts all possible triples using the given extraction mode + Buffer array - Extraction Mode - - - The mode permits for the specification of an extraction mode for reasoners that can extract specific subsets of reasoning. Where this is not supported the reasoner should simply extract all triples that can be inferred by reasoning - - May be thrown if the Reasoner does not support such an operation - + - Extracts all possible triples using the given extraction modes + Current buffer position - Extraction Modes - - - The modes permits for the specification of an extraction mode for reasoners that can extract specific subsets of reasoning. Where this is not supported the reasoner should simply extract all triples that can be inferred by reasoning - - May be thrown if the Reasoner does not support such an operation - + - Extracts the triples which comprise the class hierarchy + Current buffer size (may be less than length of buffer array) - - May be thrown if the Reasoner does not support such an operation - + - Extracts the triples which comprise the class hierarchy and individuals of those classes + Whether underlying reader has been exhausted - - May be thrown if the Reasoner does not support such an operation - + - Returns whether the underlying knowledge base is consistent + Underlying reader - - May be thrown if the Reasoner does not support such an operation - + - Returns whether the given Graph is consistent with the underlying knowledge base + Creates a buffered reader - Graph - - May be thrown if the Reasoner does not support such an operation + + + raised if is less than 1 + raised if is null - + - Returns the enumeration of unsatisfiable classes + Requests that the buffer be filled - May be thrown if the Reasoner does not support such an operation - + - Interface for OWL Reasoners which have access to their own SPARQL implementations + Reads a sequence of characters from the buffer in a blocking way + Buffer + Index at which to start writing to the Buffer + Number of characters to read + Number of characters read - + - Executes a SPARQL Query using the reasoners SPARQL implementation + Reads a sequence of characters from the buffer - SPARQL Query - - May be thrown if the Reasoner does not support such an operation + Buffer + Index at which to start writing to the Buffer + Number of characters to read + Number of characters read - + - An Inference Engine that supports simple N3 rules + Reads a single character from the underlying Text Reader - - - This reasoner should be initialised with a Graph that contains simple N3 rules such as the following: - - - { ?x a ?type } => { ?type a rdfs:Class }. - - - When initialised the reasoner takes account of variables declared with @forAll and @forSome directives though no guarantees that scoping will be correct if you've got multiple @forAll and @forSome directives. - - - When the reasoner is applied to a Graph rules are implemented by generating a SPARQL Update INSERT command like the following and executing it on the given Graph - - - INSERT - { - ?type a rdfs:Class . - } - WHERE - { - ?x a ?type . - } - - + Character read or -1 if at end of input - + - Applies reasoning to the given Graph materialising the generated Triples in the same Graph + Peeks at the next character from the underlying Text Reader - Graph + Character peeked or -1 if at end of input - + - Applies reasoning on the Input Graph materialising the generated Triples in the Output Graph + Gets whether the end of the input has been reached - Input Graph - Output Graph - + - Initialises the Reasoner + Closes the reader and the underlying reader - Rules Graph - + - Tries to create a Rule + Disposes of the reader and the underlying reader - Triple + Whether this was called from the Dispose() method - + - Wrapper around an IOwlReasoner to make it appear like a forward-chaining reasoner + The BlockingTextReader is an implementation of a designed to wrap other readers which may or may not have high latency and thus ensures that premature end of input bug is not experienced. - Essentially all this class does is extract all triples which the underlying reasoner can infer. Currently the input graph and any graph passed to the Initialise() method have no effect on the output of the reasoner + + This is designed to avoid premature detection of end of input when the input has high latency and the consumer tries to read from the input faster than it can return data. This derives from and ensures the buffer is filled by calling the ReadBlock() method of the underlying TextReader thus avoiding the scenario where input appears to end prematurely. + - + - Creates a new OWL Reasoner Wrapper around the given OWL Reasoner + Creates a new Blocking Text Reader - OWL Reasoner + Text Reader to wrap + Buffer Size - + - Applies the reasoner to the given Graph outputting inferences into the same Graph + Creates a new Blocking Text Reader - Graph + Text Reader to wrap - + - Applies the reasoner to the given input Graph outputting inferences into the output Graph + Creates a new Blocking Text Reader - Input Graph - Output Graph + Input Stream + Buffer Size - + - Initialises the reasoner + Creates a new Blocking Text Reader - Graph to initialise with + Input Stream - + - Wrapper around an IOwlReasoner to make it appear like a forward-chaining reasoner + Fills the Buffer + + + + + The NonBlockingTextReader is an implementation of a designed to wrap other readers where latency is known not to be a problem and we don't expect to ever have an empty read occur before the actual end of the stream - Effectively equivalent to StaticOwlReasonerWrapper except that every Graph reasoning is applied to is added to the reasoners knowledge base (unless the reasoner uses a fixed knowledge base) + Currently we only use this for file and network streams, you can force this to never be used with the global static option - + - Creates a new OWL Reasoner Wrapper around the given OWL Reasoner + Fills the buffer in a non-blocking manner - OWL Reasoner - + - Applies the reasoner to the given Graph outputting inferences into the same Graph + A subclass of JsonTextReader which automatically ignores all comments - Graph - + - Applies the reasoner to the given input Graph outputting inferences into the output Graph + Reads the next non-comment Token if one is available - Input Graph - Output Graph + True if a Token was read, False otherwise - + - A Pellet Reasoner which provides OWL 2 capable reasoning using an external knowledge base from a Pellet Server instance + A Class for parsing RDF data from Data URIs - Note: Currently this reasoner operates only on a external knowledge base and there is currently no way to introduce new knowledge bases/data through the dotNetRDF API + Data URIs use the data: scheme and are defined by the IETF in RFC 2397 and provide a means to embed data directly in a URI either in Base64 or ASCII encoded format. This class can extract the data from such URIs and attempt to parse it as RDF using the StringParser + + + The parsing process for data: URIs involves first extracting and decoding the data embedded in the URI - this may either be in Base64 or ASCII encoding - and then using the StringParser to actually parse the data string. If the data: URI defines a MIME type then a parser is selected (if one exists for the given MIME type) and that is used to parse the data, in the event that no MIME type is given or the one given does not have a corresponding parser then the StringParser will use its basic heuristics to attempt to auto-detect the format and select an appropriate parser. + + + If you attempt to use this loader for non data: URIs then the standard UriLoader is used instead. - + - Creates a new Pellet Reasoner + Loads RDF data into a Graph from a data: URI - Pellet Server - Knowledge Base name + Graph to load into + URI to load from + + Invokes the normal UriLoader instead if a the URI provided is not a data: URI + + Thrown if the metadata portion of the URI which indicates the MIME Type, Character Set and whether Base64 encoding is used is malformed - + - Creates a new Pellet Reasoner + Loads RDF data using an RDF Handler from a data: URI - Pellet Server URI - Knowledge Base name + RDF Handler + URI to load from + + Invokes the normal UriLoader instead if a the URI provided is not a data: URI + + Thrown if the metadata portion of the URI which indicates the MIME Type, Character Set and whether Base64 encoding is used is malformed - + - Gets the Knowledge Base this Reasoner operates over + Static Helper Class for loading Graphs and Triple Stores from Embedded Resources - + - Gets the Pellet Server this Reasoner operates on + Loads a Graph from an Embedded Resource + Graph to load into + Assembly Qualified Name of the Resource to load + Parser to use (leave null for auto-selection) - + - Executes a SPARQL Query against the underlying Knowledge Base + Loads a Graph from an Embedded Resource - SPARQL Query - + RDF Handler to use + Assembly Qualified Name of the Resource to load + Parser to use (leave null for auto-selection) - + - Adds a Graph to the Knowledge Base + Loads a Graph from an Embedded Resource - Graph - - Currently not supported by Pellet Server - + RDF Handler to use + Assembly Qualified Name of the Resource to load - + - Extract a reasoning enhanced sub-graph from the given Graph rooted at the given Node + Loads a Graph from an Embedded Resource - Graph - Root Node - + Graph to load into + Assembly Qualified Name of the Resource to load - Currently not supported by Pellet Server + Parser will be auto-selected - + - Extracts all possible triples using the given extraction mode + Internal Helper method which does the actual loading of the Graph from the Resource - Extraction Mode - - - Currently not supported by Pellet Server - + RDF Handler to use + Assembly to get the resource stream from + Full name of the Resource (without the Assembly Name) + Parser to use (if null then will be auto-selected) - + - Extracts all possible triples using the given extraction modes + Loads a RDF Dataset from an Embedded Resource - Extraction Modes - + Store to load into + Assembly Qualified Name of the Resource to load + Parser to use (leave null for auto-selection) + + + + Loads a RDF Dataset from an Embedded Resource + + Store to load into + Assembly Qualified Name of the Resource to load - Currently not supported by Pellet Server + Parser will be auto-selected - + - Extracts the triples which comprise the class hierarchy + Loads a RDF Dataset from an Embedded Resource - + RDF Handler to use + Assembly Qualified Name of the Resource to load + Parser to use (leave null for auto-selection) - + - Extracts the triples which comprise the class hierarchy and individuals of those classes + Loads a RDF Dataset from an Embedded Resource - + RDF Handler to use + Assembly Qualified Name of the Resource to load - + - Returns whether the underlying knowledge base is consistent + Internal Helper method which does the actual loading of the Triple Store from the Resource - + RDF Handler to use + Assembly to get the resource stream from + Full name of the Resource (without the Assembly Name) + Parser to use (if null will be auto-selected) - + - Returns whether the given Graph is consistent with the underlying knowledge base + Static Helper Class for loading RDF Files into Graphs/Triple Stores - Graph - - - Currently not supported by Pellet Server - - + - Returns the enumeration of unsatisfiable classes + Loads the contents of the given File into a Graph providing the RDF format can be determined + Graph to load into + File to load from - Currently not supported by Pellet Server + + The FileLoader first attempts to select a RDF Parser by examining the file extension to select the most likely MIME type for the file. This assumes that the file extension corresponds to one of the recognized file extensions for a RDF format the library supports. If this suceeds then a parser is chosen and will be used to attempt to parse the input. + + + Should this fail then the contents of the file will be read into a String, the StringParser is then used to attempt to parse it. The StringParser uses some simple rules to guess which format the input is likely to be and chooses a parser based on it's guess. + + + Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If a File URI is assigned it will always be an absolute URI for the file + - + - Interfaces for Inference Engines + Loads the contents of the given File into a Graph using the given RDF Parser + Graph to load into + File to load from + Parser to use - An Inference Engine is a class that given a Graph can infer extra information from that Graph based on fixed rules or rules computed from the Graphs it is performing inference on + Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. - In general terms an implementation of an Inference Engine typically provides some form of forward chaining reasoner though implementations may do more advanced reasoning or wrap other kinds of reasoner. + If a File URI is assigned it will always be an absolute URI for the file - - - Applies inference to the given Graph and outputs the inferred information to that Graph - - Graph - - - - Applies inference to the Input Graph and outputs the inferred information to the Output Graph - - Graph to apply inference to - Graph inferred information is output to - - - - Initialises the Inference Engine using the given Graph - - Graph to initialise from - - + - An Inference Engine which uses RDFS reasoning + Loads the contents of the given File using a RDF Handler providing the RDF format can be determined + RDF Handler to use + File to load from - Does basic RDFS inferencing using the schema taken from the Graph(s) which are provided in calls to the reasoners Initialise() method. + The FileLoader first attempts to select a RDF Parser by examining the file extension to select the most likely MIME type for the file. This assumes that the file extension corresponds to one of the recognized file extensions for a RDF format the library supports. If this suceeds then a parser is chosen and will be used to attempt to parse the input. - Types of inference performed are as follows: + Should this fail then the contents of the file will be read into a String, the StringParser is then used to attempt to parse it. The StringParser uses some simple rules to guess which format the input is likely to be and chooses a parser based on it's guess. -
    -
  • Class hierarchy reasoning - asserts additional types triples for anything that is typed as the subclass of a class.
  • -
  • Property hierarchy reasoning - asserts additional property triples for anything where the predicate is a subproperty of a defined property
  • -
  • Domain & Range reasoning - asserts additional type triples based on the domains and ranges of properties
  • -
- - - Creates a new instance of the Static RdfsReasoner - - - + - Applies inference to the given Graph and outputs the inferred information to that Graph + Loads the contents of the given File using a RDF Handler using the given RDF Parser - Graph + RDF Handler to use + File to load from + Parser to use + + + Note: FileLoader will assign the Graph a file URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If a File URI is assigned it will always be an absolute URI for the file + + - + - Applies inference to the Input Graph and outputs the inferred information to the Output Graph + Loads the contents of the given File into a Triple Store providing the RDF dataset format can be determined - Graph to apply inference to - Graph inferred information is output to + Triple Store to load into + File to load from + Parser to use to parse the given file + + + If the parameter is set to null then the FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. + + - + - Imports any Class heirarchy information from the given Graph into the Reasoners Knowledge Base in order to initialise the Reasoner + Loads the contents of the given File into a Triple Store providing the RDF dataset format can be determined - Graph to import from + Triple Store to load into + File to load from - Looks for Triples defining things to be classes and those defining that something is a subClass of something + + The FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. + - + - Helper method which applies Class hierarchy inferencing + Loads the contents of the given File using a RDF Handler providing the RDF dataset format can be determined - Triple defining the type for something - Input Graph - Output Graph - List of Inferences + RDF Handler to use + File to load from + Parser to use to parse the given file + + + If the parameter is set to null then the FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. + + - + - An Inference Engine which uses RDFS reasoning + Loads the contents of the given File using a RDF Handler providing the RDF dataset format can be determined + RDF Handler to use + File to load from - Does basic RDFS inferencing as detailed in the remarks for the StaticRdfsReasoner except every Graph that inference is applied to has the potential to alter the schema which is in use. + + The FileLoader attempts to select a Store Parser by examining the file extension to select the most likely MIME type for the file. This assume that the file extension corresponds to one of the recognized file extensions for a RDF dataset format the library supports. If this suceeds then a parser is chosen and used to parse the input file. + - + - Applies inference to the Input Graph and outputs the inferred information to the Output Graph + Raises warning messages - Graph to apply inference to - Graph inferred information is output to + Warning Message - + - An Inference Engine that uses SKOS Concept Hierarchies + Raises Store Warning messages - - - Infers additional values for properties based on SKOS Concept Hierarcies. If there is a Triple whose value is a Concept from the hierarchy then new versions of that Triple will be inferred where the object becomes each concept higher in the hierarchy. - - + Warning Message - + - Namespace for SKOS + Event which is raised when the parser invoked by the FileLoader detects a non-fatal issue with the RDF syntax - + - Creates a new instance of the SKOS Reasoner + Event which is raised when the Store parser invoked by the FileLoader detects a non-fatal issue with the RDF syntax - + - Applies inference to the given Graph and outputs the inferred information to that Graph + Interface for RDFa Vocabularies - Graph - + - Applies inference to the Input Graph and outputs the inferred information to the Output Graph + Gets whether a Vocabulary contains a Term - Graph to apply inference to - Graph inferred information is output to + Term + - + - Imports any Concept heirarchy information from the given Graph into the Reasoners Knowledge Base in order to initialise the Reasoner + Resolves a Term in the Vocabulary - Graph to import from - - Looks for Triples defining SKOS concepts and relating them to narrower and broader concepts - + Term + - + - An Inference Engine that uses SKOS Concept Hierarchies + Adds a Term to the Vocabulary + Term + URI - + - Applies inference to the Input Graph and outputs the inferred information to the Output Graph + Adds a Namespace to the Vocabulary - Graph to apply inference to - Graph inferred information is output to + Prefix + Namespace URI - + - - Contains the classes which model property paths in SPARQL, they can be used to both represent and evaluate a property path as part of a SPARQL query. - + Merges another Vocabulary into this one + Vocabulary - + - Represents a Negated Property Set + Gets/Sets the Vocabulary URI - + - Creates a new Negated Property Set + Gets the Term Mappings - Negated Properties - Inverse Negated Properties - + - Gets the Negated Properties + Gets the Namespace Mappings - + - Gets the Inverse Negated Properties + Vocabulary for XHTML+RDFa (and HTML+RDFa) - + - Converts a Path into its Algebra Form + Gets whether the Vocabulary contains a Term - Path Transformation Context + Term - + - Gets the String representation of the Path + Resolves a Term in the Vocabulary + Term - + - Represents Alternative Paths + Adds a Term to the Vocabulary + Term + URI + Thrown since this vocabulary is fixed and cannot be changed - + - Creates a new Alternative Path + Adds a Namespace to the Vocabulary - LHS Path - RHS Path + Prefix + Namespace URI + Thrown since this vocabulary is fixed and cannot be changed - + - Gets the String representation of the Path + Merges another Vocabulary into this one - + Vocabulary + Thrown since this vocabulary is fixed and cannot be changed - + - Converts a Path into its Algebra Form + Gets the Term Mappings - Path Transformation Context - - + - Abstract Base Class for Binary Path operators + Gets the Namespace Mappings - + - Parts of the Path + Gets/Sets the Vocabulary URI + Set throws this since this vocabulary is fixed and cannot be changed - + - Parts of the Path + Represents a dynamic vocabulary for RDFa - + - Creates a new Binary Path + Creates a new set of Term Mappings - LHS Path - RHS Path - + - Gets the LHS Path component + Creates a new set of Term Mappings with the given Vocabulary URI + Vocabulary URI - + - Gets the RHS Path component + Creates a new set of Term Mappings from the given Vocabulary + Vocabulary - + - Converts a Path into its Algebra Form + Merges another Vocabulary into this one - Path Transformation Context - + Vocabulary - + - Gets the String representation of the Path + Gets whether the Vocabulary contains a Term + Term - + - Abstract Base Class for Unary Path operators + Resolves a Term in the Vocabulary + Term + - + - Path + Adds a Namespace to the Vocabulary + Prefix + Namespace URI - + - Creates a new Unary Path + Adds a Term to the Vocabulary - Path + Term + URI - + - Gets the Inner Path + Gets the Term Mappings - + - Converts a Path into its Algebra Form + Gets the Namespace Mappings - Path Transformation Context - - + - Gets the String representation of the Path + Gets/Sets the Vocabulary URI - - + - Represents a Cardinality restriction on a Path + Static Helper class which can be used to validate IRIs according to RFC 3987 + + Some valid IRIs may be rejected by these validating functions as the IRI specification allows character codes which are outside the range of the .Net char type + - + - Creates a new Cardinality Restriction + Gets whether a string matches the IRI production - Path + String + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the ihier-part production + String + - + - Gets the Maximum Cardinality of the Path + Gets whether a string matches the IRI-reference production + String + - + - Represents a Fixed Cardinality restriction on a Path + Gets whether a string matches the absolute-IRI production + String + - + - Creates a new Fixed Cardinality restriction + Gets whether a string matches the irelative-ref production - Path - N + String + - + - Gets the Maximum Cardinality of the Path + Gets whether a string matches the irelative-part production + String + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the iauthority production + String + - + - Converts a Path into its Algebra Form + Gets whether a string matches the userinfo production - Path Transformation Context + String - + - Gets the String representation of the Path + Gets whether a string matches the ihost production + String - + - Represents a Zero or More cardinality restriction on a Path + Gets whether a string matches the ireg-name production + String + - + - Creates a new Zero or More cardinality restriction + Gets whether a string matches the ipath production - Path + String + - + - Gets the Maximum Cardinality of the Path + Gets whether a string matches the ipath-abempty production + String + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the ipath-absolute production + String + - + - Gets the String representation of the Path + Gets whether a string matches the ipath-noscheme production + String - + - Converts a Path into its Algebra Form + Gets whether a string matches the ipath-rootless production - Path Transformation Context + String - + - Represents a Zero or One cardinality restriction on a Path + Gets whether a string matches the ipath-empty production + String + - + - Creates a new Zero or One cardinality restriction + Gets whether a string matches the isegment production - Path + String + - + - Gets the Maximum Cardinality of the Path + Gets whether a string matches the isegment-nz production + String + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the isegment-nz-nc production + String + - + - Gets the String representation of the Path + Gets whether a string matches the ipchar production + String - + - Converts a Path into its Algebra Form + Gets whether a string matches the iquery production - Path Transformation Context + String - + - Represents a One or More cardinality restriction on a Path + Gets whether a string matches the ifragment production + String + - + - Creates a new One or More cardinality restriction + Gets whether a character matches the iunreserved production - Path + Character + - + - Gets the Maximum Cardinality of the Path + Gets whether a character matches the ucschar production + Character + + + Not all strings that will match the official ucschar production will be matched by this function as the ucschar production permits character codes beyond the range of the .Net char type + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the scheme production + String + - + - Gets the String representation of the Path + Gets whether a string matches the port production + String - + - Converts a Path into its Algebra Form + Gets whether a string matches the IP-literal production - Path Transformation Context + String - + - Represents a N or More cardinality restriction on a Path + Gets whether a string matches the IPvFuture production + String + - + - Creates a new N or More cardinality restriction + Gets whether a string matches the IPv6address production - Path - Minimum Cardinality + String + - + - Gets the Maximum Cardinality of the Path + Gets whether a string matches the h16 production + String + - + - Gets the Minimum Cardinality of the Path + Gets whether a string matches the ls32 production + String + - + - Gets the String representation of the Path + Gets whether a string matches the IPv4address production + String - + - Converts a Path into its Algebra Form + Gets whether a string matches the dec-octet production - Path Transformation Context + String - + - Represents a Zero to N cardinality restriction on a Path + Gets whether a string matches the pct-encoded production + String + - + - Creates a new Zero to N cardinality restriction + Gets whether a character matches the unreserved production - Path - Maximum Cardinality + Character + - + - Gets the Maximum Cardinality of the Path + Gets whether a character matches the reserved production + Character + - + - Gets the Minimum Cardinality of the Path + Gets whether a character matches the gen-delims production + Character + - + - Gets the String representation of the Path + Gets whether a character matches the sub-delims production + Character - + - Converts a Path into its Algebra Form + Gets whether a character matches the HEXDIG terminal - Path Transformation Context + Character - + - Represents a N to M cardinality restriction on a Path + Interface for Caches that can be used to cache the result of loading Graphs from URIs + + + Warning: Only available in Builds for which caching is supported e.g. not supported under Silverlight + + + Implementors should take care to implement their caches such that any errors in the cache do not bubble up outside of the cache. If the cache encounters any error when caching data or retrieving data from the cache it should indicate that the cached data is not available + + - + - Creates a new N to M cardinality restriction + Gets/Sets the Cache Directory that is in use - Path - Minimum Cardinality - Maximum Cardinality + + + Non-filesystem based caches are free to return String.Empty or null but MUST NOT throw any form or error + + - + - Gets the Maximum Cardinality of the Path + Gets/Sets how long results should be cached + + This only applies to downloaded URIs where an ETag is not available, where ETags are available ETag based caching SHOULD be used + - + - Gets the Minimum Cardinality of the Path + Clears the Cache - + - Gets the String representation of the Path + Gets the ETag for the given URI + URI + Thrown if there is no ETag for the given URI + + + Calling code MUST always use the HasETag() method prior to using this method so it should be safe to throw the KeyNotFoundException if there is no ETag for the given URI + + - + - Converts a Path into its Algebra Form + Gets the path to the locally cached copy of the Graph from the given URI - Path Transformation Context + URI - + - Represents an Inverse Path + Gets whether there is an ETag for the given URI + URI + - + - Creates a new Inverse Path + Is there a locally cached copy of the Graph from the given URI which is not expired - Path + URI + Whether the local copy is required to meet the Cache Freshness (set by the Cache Duration) + - + - Gets the String representation of the Path + Remove the ETag record for the given URI - + URI - + - Converts a Path into its Algebra Form + Removes a locally cached copy of a URIs results from the Cache - Path Transformation Context - + URI - + - Represents a Path expression in SPARQL + Associates an ETag (if any) with the Request and Response URIs plus returns an IRdfHandler that can be used to write to the cache + URI from which the RDF Graph was requested + The actual URI which responded to the request + ETag of the response (if any) + Either an instance of an IRdfHandler that will do the caching or null if no caching is possible - + - Converts a Path into its Algebra Form + Parser for JSON-LD 1.0/1.1 - Path Transformation Context - - + + + + - Gets the String representation of a Path + Get the current parser options - - + - Class representing a potential path used during the evaluation of complex property paths + Create an instance of the parser configured to parser JSON-LD 1.1 with no pre-defined context - + - Creates a new Potential Path + Create an instace of the parser configured with the provided parser options - Start Point of the Path - Current Point on the Path + - + - Creates a new Potential Path which is a copy of an existing Path + Read JSON-LD from the specified file and add the RDF quads found in the JSON-LD to the specified store - Potentuak Path + The store to add the parsed RDF quads to + The path to the JSON file to be parsed - + + + + + + + + + + - Gets the Start of the Path + Parser for Notation 3 syntax + + + Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Gets/Sets the Current Point of the Path - in the case of a complete Path this is the end of the Path + The Uri for log:implies - + - Gets/Sets whether the Path is complete + The Uri for owl:sameAs - + - Gets/Sets whether the Path is a dead-end + Creates a new Notation 3 Parser - - - This may be useful information as it can help stop us uneccessarily regenerating partial paths which are dead ends - - - + - Gets/Sets whether the Path is Partial + Creates a new Notation 3 Parser which uses the given Token Queue Mode - - While this may seem something of a misnomer what this represents is that the path is only part of the overall path so in the case of a sequence path we'll make all incomplete paths from the first part of the sequence as partial so they can't be themselves completed but they can be used to form complete paths - + Queue Mode for Tokenising - + - Gets/Sets the Length of the Path + Gets/Sets whether Parsing Trace is written to the Console - + - Gets the Hash Code for the potential path + Gets/Sets whether Tokeniser Trace is written to the Console - - + - Checks whether the other object is an equivalent potential path + Gets/Sets the token queue mode used - Object to test - - + - Gets the String representation of the path + Loads a Graph by reading Notation 3 syntax from the given input - + Graph to load into + Stream to read from - + - Evaluation Context for evaluating complex property paths in SPARQL + Loads a Graph by reading Notation 3 syntax from the given input + Graph to load into + Input to read from - + - Creates a new Path Evaluation Context + Loads a Graph by reading Notation 3 syntax from the given file - SPARQL Evaluation Context - Start point of the Path - End point of the Path + Graph to load into + File to read from - + - Creates a new Path Evaluation Context copied from the given Context + Loads RDF using a RDF handler by reading Notation 3 syntax from the given input - Path Evaluation Context + RDF Handler to use + Stream to read from - + - Gets the SPARQL Evaluation Context + Loads RDF using a RDF handler by reading Notation 3 syntax from the given input + RDF Handler to use + Input to read from - + - Gets/Sets whether this is the first part of the Path to be evaluated + Loads RDF using a RDF handler by reading Notation 3 syntax from the given file + RDF Handler to use + File to read from - + - Gets/Sets whether this is the last part of the Path to be evaluated + Internal method which does the parsing of the input + Parser Context - + - Gets/Sets whether the Path is currently reversed + Tries to parse declarations + Parse Context - + - Gets the hash set of incomplete paths generated so far + Tries to parse forAll quantifiers + Parser Context - + - Gets the hash set of complete paths generated so far + Tries to parse forSome quantifiers + Parser Context - + - Gets the pattern which is the start of the path + Tries to parse Triples + Parser Context - + - Gets the pattern which is the end of the path + Tries to parse Predicate Object lists + Parse Context + Subject of the Triples + Whether this is a Blank Node Predicate Object list - + - Gets whether pattern evaluation can be aborted early + Tries to parse Object lists - - Useful when both the start and end of the path are fixed (non-variables) which means that we can stop evaluating once we find the path (if it exists) - + Parse Context + Subject of the Triples + Predicate of the Triples + Whether this is a Blank Node Object list + Indicates whether the asserted triples should have it's subject and object swapped - + - Gets/Sets whether new paths can be introduced when not evaluating the first part of the path + Tries to parse Collections - - - This is required when we have a path like ?x foaf:knows* /foaf:knows ?y and ?x is not bound prior to the path being executed. Since we permit zero-length paths we should return the names of everyone even if they don't know anyone - - - The cases where ?x is already bound are handled elsewhere as we can just introduce zero-length paths for every existing binding for ?x - - + Parser Context + Blank Node which is the head of the collection - + - Adds a new path to the list of current incomplete paths + Tries to parse a Graph Literal - Path + - + - Adds a new path to the list of complete paths + Tries to parse Literal Tokens into Literal Nodes - Path + Parser Context + Literal Token + - + - Transform Context class that is used in the Path to Algebra Transformation process + Helper method which raises the Warning event if there is an event handler registered + - + - Creates a new Path Transform Context + Event which is raised when the parser detects issues with the input which are non-fatal - Subject that is the start of the Path - Object that is the end of the Path - + - Creates a new Path Transform Context from an existing context + Gets the String representation of the Parser which is a description of the syntax it parses - Context + - + - Returns the BGP that the Path Transform produces + Possible NQuads Syntax modes - - + - Gets the next available temporary variable + The original NQuads specification - - + - Adds a Triple Pattern to the Path Transform + Standardized NQuads as specified in the RDF 1.1 NQuads specification - Triple Pattern - + - Gets the Next ID to be used + Parser for parsing NQuads (NTriples with an additional Context i.e. Named Graphs) + + + The Default Graph (if any) will be given the special Uri nquads:default-graph + + + NQuads permits Blank Nodes and Literals to be used as Context, since the library only supports Graphs named with URIs these are translated into URIs of the following form: + +
+            nquads:bnode:12345678
+            
+
+            nquads:literal:87654321
+            
+ + In these URIs the numbers are the libraries hash codes for the node used as the Context. + +
- + - Gets/Sets the Subject of the Triple Pattern at this point in the Path Transformation + Creates a new NQuads parser - + - Gets/Sets the Object of the Triple Pattern at this point in the Path Transformation + Creates a new NQuads parser + NQuads syntax mode - + - Gets/Sets the Object at the end of the Pattern + Creates a new NQuads parser + Token Queue Mode - + - Resets the current Object to be the end Object of the Path + Creates a new NQuads parser + Token Queue Mode + NQuads syntax mode - + - Gets/Sets whether this is the Top Level Pattern + Gets/Sets whether Tokeniser Tracing is used - + - Creates a Triple Pattern + Gets/Sets the token queue mode used - Subject - Property Path - Object - - + - Represents a Predicate which is part of a Path + Gets/Sets the NQuads syntax mode - + - Creates a new Property + Loads a RDF Dataset from the NQuads input into the given Triple Store - Predicate + Triple Store to load into + File to load from - + - Gets the Predicate this part of the Path represents + Loads a RDF Dataset from the NQuads input into the given Triple Store + Triple Store to load into + Input to load from - + - Evaluates the Path using the given Path Evaluation Context + Loads a RDF Dataset from the NQuads input using a RDF Handler - Path Evaluation Context + RDF Handler to use + File to load from - + - Gets the String representation of the Path + Loads a RDF Dataset from the NQuads input using a RDF Handler - + RDF Handler to use + Input to load from - + - Converts a Path into its Algebra Form + Converts syntax enumeration values from NQuads to NTriples - Path Transformation Context + NQuads Syntax - + - Represents a standard forwards path + Tries to parse a URI + RDF Handler + URI + URI Node if parsed successfully - + - Creates a new Sequence Path + Helper method used to raise the Warning event if there is an event handler registered - LHS Path - RHS Path + Warning message - + - Gets the String representation of the Path + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - - + - Converts a Path into its Algebra Form + Gets the String representation of the Parser which is a description of the syntax it parses - Path Transformation Context - + - A Class for connecting to a remote SPARQL Endpoint and executing Queries against it + Possible NTriples syntax modes - + - Empty Constructor for use by derived classes + The original NTriples syntax as specified in the original RDF specification test cases specification - + - Creates a new SPARQL Endpoint for the given Endpoint URI + Standardized NTriples as specified in the RDF 1.1 NTriples specification - Remote Endpoint URI - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph Uri + Parser for NTriples syntax - Remote Endpoint URI - Default Graph URI to use when Querying the Endpoint + Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Creates a new SPARQL Endpoint for the given Endpoint Uri using the given Default Graph Uri + Creates a new instance of the parser - Remote Endpoint URI - Default Graph URI to use when Querying the Endpoint - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Creates a new instance of the parser - Remote Endpoint URI - Default Graph URI to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint + NTriples syntax to parse - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Creates a new instance of the parser using the given token queue mode - Remote Endpoint URI - Default Graph URI to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint + Token Queue Mode - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Creates a new instance of the parser using the given syntax and token queue mode - Remote Endpoint URI - Default Graph URI to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint + + Token Queue Mode + NTriples syntax to parse - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Controls whether Tokeniser progress will be traced by writing output to the Console - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Controls whether Parser progress will be traced by writing output to the Console - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Gets/Sets the token queue mode used - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Gets/Sets the desired NTriples syntax - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Parses NTriples Syntax from the given Input Stream into Triples in the given Graph - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint + Graph to create Triples in + Arbitrary Input Stream to read input from - + - Creates a new SPARQL Endpoint for the given Endpoint URI using the given Default Graph URI + Parses NTriples Syntax from the given Input into Triples in the given Graph - Remote Endpoint URI - Default Graph URIs to use when Querying the Endpoint - Named Graph URIs to use when Querying the Endpoint + Graph to create Triples in + Arbitrary Input to read input from - + - Gets the Default Graph URIs for Queries made to the SPARQL Endpoint + Parses NTriples Syntax from the given File into Triples in the given Graph + Graph to create Triples in + Name of the file containing Turtle Syntax + Simply opens an StreamReader and uses the overloaded version of this function - + - Gets the List of Named Graphs used in requests + Parses NTriples Syntax from the given Input Stream using a RDF Handler + RDF Handler to use + Input Stream to read input from - + - Gets/Sets the Accept Header sent with ASK/SELECT queries + Parses NTriples Syntax from the given Input using a RDF Handler - - - Can be used to workaround buggy endpoints which don't like the broad Accept Header that dotNetRDF sends by default. If not set or explicitly set to null the library uses the default header generated by - - + RDF Handler to use + Input to read input from - + - Gets/Sets the Accept Header sent with CONSTRUCT/DESCRIBE queries + Parses NTriples Syntax from the given file using a RDF Handler - - - Can be used to workaround buggy endpoints which don't like the broad Accept Header that dotNetRDF sends by default. If not set or explicitly set to null the library uses the default header generated by - - + RDF Handler to use + File to read from - + - Makes a Query where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + Tries to parse a URI - SPARQL Query String - A SPARQL Result Set + Context + URI + URI Node if parsed successfully - + - Makes a Query where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + Helper method for raising informative standardised Parser Errors - Results Handler - SPARQL Query String + The Error Message + The Token that is the cause of the Error + - + - Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Internal Helper method which raises the Warning event if an event handler is registered to it - SPARQL Query String - RDF Graph + Warning Message - + - Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Event which is raised when there is a non-fatal issue with the NTriples being parsed - RDF Handler - SPARQL Query String - + - Makes a Query to a SPARQL Endpoint and returns the raw Response + Gets the String representation of the Parser which is a description of the syntax it parses - SPARQL Query String - + - Makes a Query to a SPARQL Endpoint and returns the raw Response + Static Helper class containing useful methods for Parsers - SPARQL Query String - MIME Types to use for the Accept Header - - + - Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - SPARQL Query String - A Sparql Result Set - - - Allows for implementation of asynchronous querying. Note that the overloads of QueryWithResultSet() and QueryWithResultGraph() that take callbacks are already implemented asynchronously so you may wish to use those instead if you don't need to explicitly invoke and wait on an async operation. - - + Parser Context + Token to resolve + - + - Delegate for making a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - Sparql Query String - RDF Graph - Allows for implementation of asynchronous querying - - - Allows for implementation of asynchronous querying. Note that the overloads of QueryWithResultSet() and QueryWithResultGraph() that take callbacks are already implemented asynchronously so you may wish to use those instead if you don't need to explicitly invoke and wait on an async operation. - - + Parser Context + Token to resolve + Whether when the default prefix is used but not defined it can fallback to the Base URI + - + - Internal method which builds the Query Uri and executes it via GET/POST as appropriate + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - Sparql Query - Accept Header to use for the request + Parser Context + Token to resolve + Whether when the default prefix is used but not defined it can fallback to the Base URI + QName unescaping function - + - Internal Helper Method which executes the HTTP Requests against the Sparql Endpoint + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - Uri to make Request to - Data that is to be POSTed to the Endpoint in application/x-www-form-urlencoded format - The Accept Header that should be used - HTTP Response + Parser Context + Token to resolve + - + - Makes a Query asynchronously where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - SPARQL Query String - Callback to invoke when the query completes - State to pass to the callback + RDF Handler + Token to resolve + + + It is not recommended to use this overload since an IRdfHandler cannot resolve QNames + - + - Makes a Query asynchronously where the expected Result is a SparqlResultSet i.e. SELECT and ASK Queries + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - SPARQL Query String Results Handler - Callback to invoke when the query completes - State to pass to the callback - - - - Makes a Query asynchronously where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries - - SPARQL Query String - Callback to invoke when the query completes - State to pass to the callback + Token to resolve + + + It is not recommended to use this overload since an IRdfHandler cannot resolve QNames + - + - Makes a Query asynchronously where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Attempts to resolve a QName or URI Token into a URI Node and produces appropriate error messages if this fails - SPARQL Query String - RDF Handler - Callback to invoke when the query completes - State to pass to the callback + Graph + Token to resolve + - + - Serializes the Endpoint's Configuration + Helper method for raising informative standardised Parser Errors - Configuration Serialization Context + The Error Message + The Token that is the cause of the Error + - + - Represents the type of the SPARQL Results Set + Helper function which generates standardised Error Messages + Error Message + Event causing the Error + - + - The Result Set represents a Boolean Result + Helper function which generates standardised Error Messages + Error Message + The Production where the Error occurred + Event causing the Error + - + - The Result Set represents a set of Variable Bindings + Throws a RdfParsingTerminatedException which is used to tell the parser that it should stop parsing. + - + - The Result Set represents an unknown result i.e. it has yet to be filled with Results + Represents Position Information from Parsers - + - Class for representing Sparql Result Sets + Creates a new set of Position Information + Line + Column - + - Lists of Sparql Results + Creates a new set of Position Information + Line + Start Column + End Column - + - Lists of Variables in the Result Set + Creates a new set of Position Information + Start Line + End Line + Start Column + End Column - + - Boolean Result + Creates a new set of Position Information form some XML Line Information + XML Line Information - + - Creates an Empty Sparql Result Set + Gets the Start Line - Useful where you need a possible guarentee of returning an result set even if it proves to be empty and also necessary for the implementation of Result Set Parsers. - + - Creates a Sparql Result Set for the Results of an ASK Query with the given Result value + Gets the End Line - - + - Creates a Sparql Result Set for the collection of results + Gets the Start Column - Results - + - Creates a SPARQL Result Set for the Results of a Query with the Leviathan Engine + Gets the End Column - SPARQL Evaluation Context - + - Gets the Type of the Results Set + Class for reading RDF embedded as RDFa from within HTML web pages + + + The RDFa parser uses a HTML parser (Html Agility Pack) that is highly tolerant of real-world HTML and so is able to extract RDFa from pages that are not strictly valid HTML/XHTML + + - + - Gets the Result of an ASK Query + Creates a new RDFa Parser which will auto-detect which RDFa version to use (assumes 1.1 if none explicitly specified) - Result Set is deemed to refer to an ASK query if the Variables list is empty since an ASK Query result has an empty <head>. It is always true for any other Query type where one/more variables were requested even if the Result Set is empty. - + - Gets the number of Results in the Result Set + Creates a new RDFa Parser which will use the specified RDFa syntax + RDFa Syntax Version - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - Gets whether the Result Set is empty and can have Results loaded into it + Base class for the framework-specific RDFa parser implementations - - + + + + - + - Gets the List of Results + XHTML Vocab Namespace - + - Index directly into the Results + URI for the XHTML+RDFa DTD - Index of the Result you wish to retrieve - - + - Gets the Variables used in the Result Set + Namespace URI for XHTML - - As of 1.0 where possible dotNetRDF tries to preserve the ordering of variables however this may not be possible depending on where the result set originates from or how it is populated - - + - Trims the Result Set to remove unbound variables from results + Namespace URI for RDFa - - Note: This does not remove empty results this only removes unbound variables from individual results - - + - Adds a Variable to the Result Set + RDFa Version Constants - Variable Name - + - Adds a Result to the Result Set + RDFa Version Constants - Result - + - Sets the Boolean Result for the Result Set + RDFa Version Constants - Boolean Result - + - Gets an Enumerator for the Results List + RDFa Version Constants - - + - Gets an Enumerator for the Results List + Creates a new RDFa Parser which will auto-detect which RDFa version to use (assumes 1.1 if none explicitly specified) - - + - Determines whether two Result Sets are equal + Creates a new RDFa Parser which will use the specified RDFa syntax - - - - Experimental and not yet complete - + RDFa Syntax Version - + - Converts a Result Set into a Triple Collection + Parses RDFa by extracting it from the HTML from the given input - Graph to generate the Nodes in - - - Assumes the Result Set contains three variables ?s, ?p and ?o to use as the Subject, Predicate and Object respectively. Only Results for which all three variables have bound values will generate Triples - + Graph to load into + Stream to read from - + - Converts a Result Set into a Triple Collection + Parses RDFa by extracting it from the HTML from the given input - Graph to generate the Nodes in - Variable whose value should be used for Subjects of Triples - Variable whose value should be used for Predicates of Triples - Variable whose value should be used for Object of Triples - - - Only Results for which all three variables have bound values will generate Triples - + Graph to load into + Input to read from - + - Casts a SPARQL Result Set to a DataTable with all Columns typed as INode (Results with unbound variables will have nulls in the appropriate columns of their DataRow) + Parses RDFa by extracting it from the HTML from the given file - SPARQL Result Set - - - Warning: Not available under builds which remove the Data Storage layer from dotNetRDF e.g. Silverlight - + Graph to load into + File to read from - + - Disposes of a Result Set + Parses RDFa by extracting it from the HTML from the given input + RDF Handler to use + Stream to read from - + - Gets the data for serialization + Parses RDFa by extracting it from the HTML from the given input - Serialization Information - Streaming Context + RDF Handler to use + Input to read from - + - Gets the schema for XML serialization + Parses RDFa by extracting it from the HTML from the given input - + RDF Handler to use + File to read from - + - Writes the data for XML serialization (.Net serialization not the official SPARQL results serialization) + Parse the input stream as an HTML document - XML Writer + + - + - Reads the data for XML deserialization (.Net serialization not the official SPARQL results serialization) + Determine if an element has a particular attribute - XML Reader + The element to check + The name of the attribute to check for + True if the element has an attribute named , false otherwise - + - Class containing Helper information and methods pertaining to the Sparql Query Language for RDF + Get the value of a particular attribute of an element + The element + The name of the attribute on the element + The value of the attribute - + - Namespace Uri for SPARQL Namespace + Set the value of a particular attribute of an element + The element + The name of the attribute to set/update + The new value for the attribute - + - Namespace Uri for the RDF serialization of a SPARQL Result Set + Get the base element of the specified document + + - + - Keywords in Sparql + Deterine if the HTML document can have an xml:base element + + - + - Keywords in Sparql + Get the html element of the document + + - + - Keywords in Sparql + Process the content of an HTML document + + - + - Keywords in Sparql + Get all attributes of an element + + - + - Keywords in Sparql + Get the name of an attribute + + - + - Keywords in Sparql + Get the value of an attribute + + - + - Keywords in Sparql + Get the name of an element + + - + - Keywords in Sparql + Return the children of an element (in order) + + - + - Keywords in Sparql + Get the inner text of an element or a text node + + - + - Keywords in Sparql + Get the HTML contained within an element as a string + + - + - Keywords in Sparql + Determine if an element has children + + True if the element has children, false otherwise - + - Keywords in Sparql + Determine if a node in the parsed Html document tree is a text node + + True if is a text node, false otherwise - + - Keywords in Sparql + Process the content of an element of the document + + + - + - Keywords in Sparql + Resolves a CURIE to a Node + Parser Context + Evaluation Context + CURIE + - + - Keywords in Sparql + Resolves an Attribute which may be a CURIE/URI to a Node + Parser Context + Evaluation Context + URI/CURIE + - + - Keywords in Sparql + Resolves an Attribute which may be a Term/CURIE/URI to a Node where one/more of the values may be special values permissible in a complex attribute + Parser Context + Evaluation Context + URI/CURIE/Term + - + - Keywords in Sparql + Parses an complex attribute into a number of Nodes + Parser Context + Evaluation Context + Attribute Value + + + A complex attribute is any attribute which accepts multiple URIs, CURIEs or Terms + - + - Keywords in Sparql + Parses an attribute into a number of Nodes from the CURIEs contained in the Attribute + Parser Context + Evaluation Context + Attribute Value + - + - Keywords in Sparql + Get the text content of a node and add it to the provided output buffer + + - + - Keywords in Sparql + Internal Helper for raising the Warning Event + Warning Message - + - Keywords in Sparql + Event which is raised when there is a non-fatal error with the input being read - + - Keywords in Sparql + Possible RDFa Syntaxes - + - Keywords in Sparql + RDFa 1.0 - + - Keywords in Sparql + RDFa 1.1 - + - Keywords in Sparql + Auto-detect - assumes RDFa 1.1 - + - Keywords in Sparql + Auto-detect - assumes RDFa 1.0 - + - Keywords in Sparql + Parser for RDF/JSON Syntax + Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Keywords in Sparql + Read RDF/JSON Syntax from some Stream into a Graph + Graph to read into + Stream to read from - + - Keywords in Sparql + Read RDF/JSON Syntax from some Input into a Graph + Graph to read into + Input to read from - + - Keywords in Sparql + Read RDF/Json Syntax from some File into a Graph + Graph to read into + File to read from - + - Keywords in Sparql + Read RDF/JSON Syntax from some Stream using a RDF Handler + RDF Handler to use + Stream to read from - + - Keywords in Sparql + Read RDF/JSON Syntax from some Input using a RDF Handler + RDF Handler to use + Input to read from - + - Keywords in Sparql + Read RDF/JSON Syntax from a file using a RDF Handler + RDF Handler to use + File to read from - + - Keywords in Sparql + Internal top level Parse method which parses the Json + RDF Handler to use + Stream to read from - + - Keywords in Sparql + Parser method which parses the top level Json Object which represents the overall Graph + Parser Context - + - Keywords in Sparql + Parser method which parses Json Objects representing Triples + Parser Context - + - Keywords in Sparql + Parser method which parses Json Objects representing Predicate Object Lists + Parser Context + Subject of Triples which comes from the parent Json Object - + - Keywords in Sparql + Parser method which parses Json Arrays representing Object Lists + Parser Context + Subject of Triples which comes from the Grandparent Json Object + Predicate of Triples which comes form the Parent Json Object - + - Keywords in Sparql + Parser method which parses Json Objects reprsenting Object Nodes + Parser Context + Subject of Triples which comes from the Great-Grandparent Json Object + Predicate of Triples which comes form the Grandparent Json Object - + - Keywords in Sparql + Helper method for raising Error messages with attached Line Information + Parser Context + Error Message + - + - Keywords in Sparql + Helper method for raising Error messages with attached Position Information + Parser Context + Error Message + Start Position + - + - Keywords in Sparql + Helper Method for raising the Warning event + Warning Message - + - Keywords in Sparql + Event which is raised if there's a non-fatal issue with the RDF/Json Syntax - + - Keywords in Sparql + Gets the String representation of the Parser which is a description of the syntax it parses + - + - Keywords in Sparql + Helper class with useful constants relating to the RDF Specification - + - Keywords in Sparql + URI for rdf:List - + - Keywords in Sparql + URI for rdf:first - + - Keywords in Sparql + URI for rdf:rest - + - Keywords in Sparql + URI for rdf:nil - + - Keywords in Sparql + URI for rdf:type - + - Keywords in Sparql + URI for rdf:XMLLiteral - + - Keywords in Sparql + URI for rdf:subject - + - Keywords in Sparql + URI for rdf:predicate - + - Keywords in Sparql + URI for rdf:object - + - Keywords in Sparql + URI for rdf:Statement - + - Keywords in Sparql + URI for rdf:langString the implicit type of language specified literals - + - Keywords in Sparql + Pattern for Valid Language Specifiers - + - Keywords in Sparql + Regular Expression for Valid Language Specifiers - + - Keywords in Sparql + Determines whether a given String is a valid Language Specifier + String to test + - + - Keywords in Sparql + Possible RDF/XML Parse Types - + - Keywords in Sparql + No specific Parse Type is specified (Default Parsing Rules will be used) - + - Keywords in Sparql + Literal Parse Type - + - Keywords in Sparql + Resource Parse Type - + - Keywords in Sparql + Collection Parse Type - + - Keywords in Sparql + Other Parse Type + This is never used since any other Parse Type encountered is assumed to be Literal as per the RDF/XML Specification - + - Keywords in Sparql + Possible RDF/XML Parser Modes - + - Keywords in Sparql + Uses DOM Based parsing (not fully supported under .NET Standard/Core) - + - Keywords in Sparql + Uses Streaming Based parsing (default) - + - Keywords in Sparql + Parser for RDF/XML syntax - + - Keywords in Sparql + Controls whether Parser progress will be traced by writing output to the Console - + - Keywords in Sparql + Creates a new RDF/XML Parser - + - Keywords in Sparql + Creates a new RDF/XML Parser which uses the given parsing mode + RDF/XML Parse Mode - + - Keywords in Sparql + Reads RDF/XML syntax from some Stream into the given Graph + Graph to create Triples in + Input Stream - + - Keywords in Sparql + Reads RDF/XML syntax from some Input into the given Graph + Graph to create Triples in + Input to read from - + - Keywords in Sparql + Reads RDF/XML syntax from some File into the given Graph + Graph to create Triples in + Filename of File containg XML/RDF + Simply opens a Stream for the File then calls the other version of Load to do the actual parsing - + - Keywords in Sparql + Reads RDF/XML syntax from some Stream using a RDF Handler + RDF Handler to use + Input Stream - + - Keywords in Sparql + Reads RDF/XML syntax from some Input using a RDF Handler + RDF Handler to use + Input to read from - + - Keywords in Sparql + Reads RDF/XML syntax from a file using a RDF Handler + RDF Handler to use + File to read from - + - Keywords in Sparql + Reads RDF/XML from the given XML Document + Graph to load into + XML Document - + - Keywords in Sparql + Helper Method for raising the Warning event + Warning Message - + - Keywords in Sparql + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - + - Keywords in Sparql + Function which does the actual Parsing by invoking the various steps of the Parser + Parser Context - + - Keywords in Sparql + Starts the Parsing of the flattened Event Tree by calling the appropriate Grammar Production based on the type of the First Event in the Queue - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'doc' + Parser Context + Root Event to start applying Productions from - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'RDF' + Parser Context + RDF Element to apply Production to - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'nodeElementList' + Parser Context + Queue of Events to apply the Production to - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'nodeElement' + Parser Context + Queue of Events that make up the Node Element and its Children to apply the Production to - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'propertyEltList' + Parser Context + Queue of Events to apply the Production to + Parent Event (ie. Node) of the Property Elements - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'propertyElt' + Parser Context + Queue of Events that make up the Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'resourcePropertyElt' + Parser Context + Queue of Events that make up the Resource Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'literalPropertyElt' + Parser Context + Queue of Events that make up the Literal Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'parseTypeLiteralPropertyElt' + Parser Context + Queue of Events that make up the Literal Parse Type Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'parseTypeResourcePropertyElt' + Parser Context + Queue of Events that make up the Resource Parse Type Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'parseTypeCollectionPropertyElt' + Parser Context + Queue of Events that make up the Collection Parse Type Property Element and its Children + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Implementation of the RDF/XML Grammar Production 'emptyPropertyElt' + Parser Context + Element Event for the Empty Property Element + Parent Event (ie. Node) of the Property Element - + - Keywords in Sparql + Applies the Namespace Attributes of an Element Event to the Namespace Map + Parser Context + Element Event - + - Keywords in Sparql + Resolves a Uri Reference into a Uri Node against a given Base Uri + Parser Context + Uri Reference to Resolve + Base Uri to Resolve against + - + - Keywords in Sparql + Reifies a Triple + Parser Context + Uri Reference for the Reified Triple + Subject of the Triple + Predicate of the Triple + Object of the Triple - + - Keywords in Sparql + Helper function which inserts an Element back on the front of a Queue + Queue to insert onto the Front of + Event to put on the front of the Queue - + - Keywords in Sparql + Applies List Expansion to the given Event + Element to apply List Expansion to + Uri Reference for the List Item + List Expansion only works on Element Events - + - Keywords in Sparql + Validates that an ID is correctly formed and has only been used once in the context of a given Subject + Parser Context + ID to Validate + Subject that the ID pertains to - + - Keywords in Sparql + Tracing function used when Parse Tracing is enabled + Production - + - Keywords in Sparql + Tracing function used when Parse Tracing is enabled + Production + - + - Keywords in Sparql + Gets the String representation of the Parser which is a description of the syntax it parses + - + - Keywords in Sparql + Static Helper class for providing Constants and Helper functions for use by RDF/XML parsers - + - Keywords in Sparql + Checks whether a Uri Reference is an absolute Uri + Uri Reference to Test + + Implemented by seeing if the Uri Reference starts with a Uri scheme specifier - + - Keywords in Sparql + Array containing the Core Syntax Terms - + - Keywords in Sparql + Array containing the other Syntax Terms - + - Keywords in Sparql + Array containing the Old Syntax Terms - + - Keywords in Sparql + Array containing Syntax Terms where the rdf: Prefix is mandated - + - Keywords in Sparql + Checks whether a given QName is a Core Syntax Term + QName to Test + True if the QName is a Core Syntax Term - + - Keywords in Sparql + Checks whether a given QName is a Syntax Term + QName to Test + True if the QName is a Syntax Term - + - Keywords in Sparql + Checks whether a given QName is a Old Syntax Term + QName to Test + True if the QName is a Old Syntax Term - + - Keywords in Sparql + Checks whether a given QName is valid as a Node Element Uri + QName to Test + True if the QName is valid - + - Keywords in Sparql + Checks whether a given QName is valid as a Property Element Uri + QName to Test + True if the QName is valid - + - Keywords in Sparql + Checks whether a given QName is valid as a Property Attribute Uri + QName to Test + True if the QName is valid - + - Keywords in Sparql + Checks whether a given Local Name is potentially ambigious + Local Name to Test + True if the Local Name is ambigious + This embodies Local Names which must have an rdf prefix - + - Keywords in Sparql + Checks whether a given URIRef is encoded in Unicode Normal Form C + URIRef to Test + True if the URIRef is encoded correctly - + - Keywords in Sparql + Checks whether a given Base Uri can be used for relative Uri resolution + Base Uri to Test + True if the Base Uri can be used for relative Uri resolution - + - Keywords in Sparql + Determines whether a QName is valid for use in RDF/XML + QName + - + - Keywords in Sparql + Checks whether an attribute is an rdf:ID attribute + Attribute to Test + True if is an rdf:ID attribute + Does some validation on ID value but other validation occurs at other points in the Parsing - + - Keywords in Sparql + Checks whether an attribute is an rdf:nodeID attribute + Attribute to Test + True if is an rdf:nodeID attribute + Does some validation on ID value but other validation occurs at other points in the Parsing - + - Keywords in Sparql + Checks whether an attribute is an rdf:about attribute + Attribute to Test + True if is an rdf:about attribute - + - Keywords in Sparql + Checks whether an attribute is an property attribute + Attribute to Test + True if is an property attribute - + - Keywords in Sparql + Checks whether an attribute is an rdf:resource attribute + Attribute to Test + True if is an rdf:resource attribute - + - Keywords in Sparql + Checks whether an attribute is an rdf:datatype attribute + Attribute to Test + True if is an rdf:datatype attribute - + - Keywords in Sparql + Validates that an ID is a valid NCName + ID Value to Test + True if the ID is valid - + - Set of SPARQL Keywords that are Non-Query Keywords + Validates that a URIReference is valid + URIReference to Test + True + + Currently partially implemented, some invalid Uri References may be considered valid + - + - Set of SPARQL Keywords that are Function Keywords + Parser for SPARQL Boolean results as Plain Text - + - Set of SPARQL Keywords that are Aggregate Keywords + Loads a Result Set from an Input Stream - - Unlike AggregateFunctionKeywords[] this includes keywords related to aggregates (like DISTINCT) and those for Leviathan extension aggregates which are not standard SPARQL 1.1 syntax - + Result Set to load into + Input Stream to read from - + - Set of SPARQL Keywords that are built in SPARQL Aggregate Functions + Loads a Result Set from an Input Stream + Result Set to load into + File to read from - + - Set of XML Schema Data Types which are derived from Integer and can be treated as Integers by SPARQL + Loads a Result Set from an Input + Result Set to load into + Input to read from - + - Set of IRIs for supported Cast Functions + Loads a Result Set from an Input using a Results Handler + Results Handler to use + Input to read from - + - Set of Keywords for SPARQL Query 1.0 + Loads a Result Set from an Input Stream using a Results Handler + Results Handler to use + Input Stream to read from - + - Set of additional Keywords for SPARQL Query 1.1 + Loads a Result Set from a file using a Results Handler + Results Handler to use + File to read from - + - Set of SPARQL Keywords that are Update Keywords + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Warning Message - + - Set of Keywords for SPARQL Update 1.1 + Event raised when a non-fatal issue with the SPARQL Results being parsed is detected - + - Regular Expression Pattern for Valid Integers in Sparql + Gets the String representation of the Parser which is a description of the syntax it parses + - + - Regular Expression Pattern for Valid Decimals in Sparql + Parser for reading SPARQL Results that have been serialized in the SPARQL Results CSV format - + - Regular Expression Pattern for Valid Doubles in Sparql + Loads a Result Set from an Input Stream + Result Set to load into + Input Stream to read from - + - Checks whether a given Keyword is a SPARQL Query Keyword + Loads a Result Set from a File - Keyword to check - + Result Set to load into + File to load from - + - Checks whether a given Keyword is a SPARQL Non-Query Keyword + Loads a Result Set from an Input - Keyword to check - + Result Set to load into + Input to read from - + - Checks whether a given Keyword is a SPARQL Function Verb + Loads a Result Set from an Input Stream using a Results Handler - Keyword to check - + Results Handler to use + Input Stream to read from - + - Checks whether a given Keyword is a SPARQL 1.1 Function Verb + Loads a Result Set from a File using a Results Handler - Keyword to check - + Results Handler to use + Filename to load from - + - Checks whether a given Keyword is a SPARQL Aggregate Keyword (includes keywords related to aggregates like DISTINCT, AS and Leviathan extension aggregate keywords) + Loads a Result Set from an Input using a Results Handler - Keyword to check - + Results Handler to use + Input to read from - + - Checks whether a given Keyword is a SPARQL Aggregate Function Keyword (only keywords for the SPARQL built-in aggregate functions) + Event which is raised when the parser encounters a non-fatal issue with the syntax being parsed - Keyword to check - - + - Checks whether a given Keyword is a SPARQL Update Keyword + Gets the String representation of the Parser - - + - Checks whether a given QName is valid in Sparql + Parser for SPARQL Results JSON Format - QName to check - SPARQL Syntax - - + - Checks whether a given Variable Name is valid in Sparql + Loads a Result Set from an Input Stream - Variable Name to check - + Result Set to load into + Input Stream to read from - + - Gets whether a given prefix declaration is valid in SPARQL + Loads a Result Set from a File - Prefix declaration - + Result Set to load into + File to load from - + - Gets whether a given BNode ID is valid + Loads a Result Set from an Input - Value - + Result Set to load into + Input to read from - + - Checks whether a given Character matches the PN_CHARS_BASE rule from the Sparql Specification + Loads a Result Set from an Input using a Results Handler - Character to test - + Results Handler to use + Input to read from - + - Checks whether a given Character matches the PN_CHARS_U rule from the SPARQL Specification + Loads a Result Set from an Input Stream using a Results Handler - Character to test - + Results Handler to use + Input Stream to read from - + - Checks whether a given Character matches the PN_CHARS rule from the SPARQL Specification + Loads a Result Set from a file using a Results Handler - Character to test - + Results Handler to use + File to read from - + - Checks whether a given String matches the PN_LOCAL rule from the Sparql Specification + Parser method which parses the Stream as Json - String as character array - SPARQL Syntax - + Input Stream + Results Handler - + - Checks whether a given String matches the PN_PREFIX rule from the SPARQL Specification + Parser method which parses the top level Json Object which represents the overall Result Set - String as character array - - + - Checks whether a given String matches the PLX rule from the SPARQL Specification + Parser method which parses the 'head' property of the top level Json Object which represents the Header of the Result Set - String as character array - Start Index - Resulting End Index - - + - Gets whether a character is a Hex character + Parser method which parses the 'head' property of the top level Json Object which represents the Header of the Result Set - Character - - + - Unescapes local name escapes from QNames + Parser method which parses the Header Object of the Result Set - Value to unescape - - + - Checks whether the given value is a valid Numeric Literal in Sparql + Parser method which parses the Properties of the Header Object - Value to test - - + - Checks whether the given value is a valid Integer Literal in Sparql + Parser method which parses the 'vars' property of the Header Object - Value to test - - + - Checks whether the given value is a valid Decimal Literal in Sparql + Parser method which parses the 'link' property of the Header Object - Value to test - - + - Checks whether the given value is a valid Float Literal in Sparql + Parser method which parses the Body of the Result Set which may be either a 'results' or 'boolean' property of the top level Json Object - - - + - Checks whether the given value is a valid Double Literal in Sparql + Parser method which parses the Results Object of the Result Set - Value to test - - + - Determines the Sparql Numeric Type for a Literal based on its Data Type Uri + Parser method which parses the 'bindings' property of the Results Object - Data Type Uri - - + - Determines the Sparql Numeric Type for a Literal based on its Data Type Uri + Parser method which parses a Binding Object which occurs in the array of Bindings - Data Type Uri as a String - - + - Calculates the Effective Boolean Value of a given Node according to the Sparql specification + Parser method which parses a Bound Variable Object which occurs within a Binding Object - Node to computer EBV for - + Parser Context + Variable Name + Result Object that is being constructed from the Binding Object + - + - Checks whether the Query is a SELECT Query + Parser method which parses the 'boolean' property of the Result Set - Query Type - - + - Implements Node Equality with SPARQL Semantics + Checks whether a JSON Token is valid as the value for a RDF term - Node - Node + Context - + - Implements Node Inequality with SPARQL Semantics + Skips to the end of the current object - Node - Node - + Context + True if a warning should be issued - + - Implements Numeric Equality with SPARQL Semantics + Helper method for raising Error messages with attached Line Information - Node - Node - SPARQL Numeric Tyoe + Parser Context + Error Message - + - Implements Date Time Equality with SPARQL Semantics + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected - Node - Node - + Warning Message - + - Implements Date Equality with SPARQL Semantics + Event raised when a non-fatal issue with the SPARQL Results being parsed is detected - Node - Node - - + - Implements Time Span Equality with SPARQL Semantics + Gets the String representation of the Parser which is a description of the syntax it parses - Node - Node - + - Converts a Literal Node to a Decimal + Available Query Syntaxes - Literal Node - - + - Converts a Literal Node to a Double + Use SPARQL 1.0 - Literal Node - - + - Converts a Literal Node to a Float + Use SPARQL 1.1 - Literal Node - - + - Converts a Literal Node to an Integer + Use the latest SPARQL specification supported by the library (currently SPARQL 1.1) with some extensions - Literal Node - + + + Extensions include the following: + +
    +
  • LET assignments (we recommend using the SPARQL 1.1 standards BIND instead)
  • +
  • Additional aggregates - NMAX, NMIN, MEDIAN and MODE (we recommend using the Leviathan Function Library URIs for these instead to make them usable in SPARQL 1.1 mode)
  • +
  • UNSAID alias for NOT EXISTS (we recommend using the SPARQL 1.1 standard NOT EXISTS instead
  • +
  • EXISTS and NOT EXISTS are permitted as Graph Patterns (only allowed in FILTERs in SPARQL 1.1)
  • +
+
- + - Converts a Literal Node to a Date Time + Class for parsing SPARQL Queries into SparqlQuery objects that can be used to query a Graph or Triple Store - Literal Node - - + - Converts a Literal Node to a Date Time Offset + Creates a new instance of the SPARQL Query Parser - Literal Node - - + - Converts a Literal Node to a Time Span + Creates a new instance of the SPARQL Query Parser which supports the given SPARQL Syntax - Literal Node - + SPARQL Syntax - + - Gets a SPARQL Formatter to use in formatting Queries as Strings + Creates a new instance of the SPARQL Query Parser using the given Tokeniser Queue Mode + Token Queue Mode - + - Represents the level of Query Explanation that is desired + Creates a new instance of the SPARQL Query Parser using the given Tokeniser which supports the given SPARQL Syntax + Token Queue Mode + SPARQL Syntax - + - Specifies No Explanations + Gets/Sets whether Tokeniser progress is Traced to the Console - + - Specifies Explanations are output to Debug + Gets/Sets the Default Base URI for Queries parsed by this Parser instance - + - Specifies Explanations are output to Trace + Gets/Sets the Syntax that should be supported - + - Specifies Explanations are output to Console Standard Output + Gets/Sets the locally scoped custom expression factories - + - Specifies Explanations are output to Console Standard Error + Gets/Sets the locally scoped Query Optimiser applied to queries at the end of the parsing process + + + May be null if no locally scoped optimiser is set in which case the globally scoped optimiser will be used + + - + - Specifies Explanations are output to Debug and Console Standard Output + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Query being parsed is detected + Warning Message - + - Specifies Explanations are output to all + Event raised when a non-fatal issue with the SPARQL Query being parsed is detected - + - Show the Thread ID of the Thread evaluating the query (useful in multi-threaded environments) + Parses a SPARQL Query from a File + File containing the Query + - + - Show the Depth of the Algebra Operator + Parses a SPARQL Query from an arbitrary Input Stream + Input Stream + - + - Show the Type of the Algebra Operator + Parses a SPARQL Query from an arbitrary Input + Input + - + - Show the Action being performed (makes it clear whether the explanation marks the start/end of an operation) + Parses a SPARQL Query from a String + A SPARQL Query + - + - Shows Timings for the Query + Parses a SPARQL Query from a SPARQL Parameterized String + A SPARQL Parameterized String + + + The SparqlParameterizedString class allows you to use parameters in a String in a manner similar to SQL Commands in the ADO.Net model. See the documentation for SparqlParameterizedString for details of this. + - + - Show Intermediate Result Counts at each stage of evaluation + Tries to parse a Graph Pattern from the given Parser Context + Parser Context + Whether the opening Left Curly Bracket is required + - + - Shows Basic Information (Depth, Operator and Action) + Constructs an error message that informs the user about unexpected excess tokens in a SPARQL qery + Current parser context + The expected number of tokens + - + - Shows Default Information (Thread ID, Depth, Operator and Action) + Parser for reading SPARQL Results which have been encoded in the RDF schema for Result Sets and serialized as RDF - + - Shows All Information + Creates a new SPARQL RDF Parser which will use auto-detection for determining the syntax of input streams/files - + - Shows an analysis of BGPs prior to evaluating them + Creates a new SPARQL RDF Parser which will use the given RDF Parser - - This lets you see how many joins, cross products, filters, assignments etc must be applied in each BGP - + RDF Parser - + - Shows an analysis of Joins prior to evaluating them + Loads a SPARQL Result Set from RDF contained in the given Input + SPARQL Result Set to populate + Input to read from - This lets you see whether the join is a join/cross product and in the case of a Minus whether the RHS can be ignored completely + Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - + - Shows an analysis of Named Graphs used by a Graph clause prior to evaluating them + Loads a SPARQL Result Set from RDF contained in the given Stream + SPARQL Result Set to populate + Stream to read from - This lets you see how many graphs a given Graph clause will operate over. As the Graph clause in SPARQL is defined as the union of evaluating the inner operator over each named graph in the dataset graph clauses applied to datasets with many named graphs can be expensive. + Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use - + - Sets whether Evaluation should be simulated (means timings will not be accurate but allows you to explain queries without needing actual data to evaluate them against) + Loads a SPARQL Result Set from RDF contained in the given File + SPARQL Result Set to populate + File to read from + + Uses the FileLoader to load the RDF from the file which will attempt to determine the format of the RDF based on the file extension unless the parser was instantiated with a specific IRdfReader to use + - + - Shows all analysis information + Loads a SPARQL Results from RDF contained in the given Input using a Results Handler + Results Handler to use + Input to read from + + Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use + - + - Basic Explanation Level (Console Standard Output and Basic Information) + Loads a SPARQL Results from RDF contained in the given Stream using a Results Handler + Results Handler to use + Stream to read from + + Uses the StringParser which will use simple heuristics to 'guess' the format of the RDF unless the parser was instaniated with a specific IRdfReader to use + - + - Default Explanation Level (Default Outputs and Default Information) + Loads a SPARQL Results from RDF contained in the given file using a Results Handler + Results Handler to use + File to read from + + Uses the FileLoader to load the RDF from the file which will attempt to determine the format of the RDF based on the file extension unless the parser was instantiated with a specific IRdfReader to use + - + - Detailed Explanation Level (Default Outputs and All Information) + Internal method which actually parses the Result Set by traversing the RDF Graph appropriately + Parser Context - + - Full Explanation Level (All Outputs, All Information and All Analysis) + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected + Warning Message - + - Basic Explanation Level with Query Evaluation simulated + Event raised when a non-fatal issue with the SPARQL Results being parsed is detected - + - Default Explanation Level with Query Evaluation simulated + Gets the String representation of the Parser which is a description of the syntax it parses + - + - Detailed Explanation Level with Query Evaluation simulated + Parser for reading SPARQL Results that have been serialized in the SPARQL Results TSV format - + - Full Explanation Level with Query Evaluation simulated + Loads a Result Set from an Input Stream + Result Set to load into + Input Stream to read from - + - A Query Processor which evaluates queries while printing explanations to any/all of Debug, Trace, Console Standard Output and Console Standard Error + Loads a Result Set from a File + Result Set to load into + File to load from - + - Creates a new Explain Query Processor that will use the Default Explanation Level + Loads a Result Set from an Input - Dataset + Result Set to load into + Input to read from - + - Creates a new Explain Query Processor with the desired Explanation Level + Loads a Result Set from an Input Stream using a Results Handler - Dataset - Explanation Level + Results Handler to use + Input Stream to read from - + - Creates a new Explain Query Processor that will use the Default Explanation Level + Loads a Result Set from a File using a Results Handler - Triple Store + Results Handler to use + Filename to load from - + - Creates a new Explain Query Processor with the desired Explanation Level + Loads a Result Set from an Input using a Results Handler - Triple Store - Explanation Level + Results Handler to use + Input to read from - + - Gets/Sets the Explanation Level + Event which is raised when the parser encounters a non-fatal issue with the syntax being parsed - + - Determines whether a given Flag is present + Gets the String representation of the Parser - Flag - - - Prints Analysis - - Algebra - SPARQL Evaluation Context - - + - Prints BGP Analysis + Class for parsing SPARQL Update commands into SparqlUpdateCommandSet objects that can be used to modify a Triple Store - Analysis - + - Prints Join Analysis + Gets/Sets whether Tokeniser Tracing is used - Join - + - Prints Expalantions + Gets/Sets the locally scoped custom expression factories - StringBuilder to output to - + - Prints Explanations + Gets/Sets the Default Base URI used for Updated Commands parsed by this parser instance - String to output - + - Explains the start of evaluating some algebra operator + Gets/Sets the locally scoped Query Optimiser applied to graph patterns in update commands at the end of the parsing process - Algebra - Context + + + May be null if no locally scoped optimiser is set in which case the globally scoped optimiser will be used + + - + - Explains the evaluation of some action + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Update Commands being parsed is detected - Algebra - Context - Action + Warning Message - + - Explains the end of evaluating some algebra operator + Event raised when a non-fatal issue with the SPARQL Update Commands being parsed is detected - Algebra - Context - + - Explains and evaluates some algebra operator + Parses a SPARQL Update Command Set from the input stream - Algebra Operator Type - Algebra - Context - Evaluator Function + Input Stream - - - Processes an Ask - - Ask - SPARQL Evaluation Context - - - - Processes a BGP - - BGP - SPARQL Evaluation Context - - + - Processes a Bindings modifier + Parses a SPARQL Update Command Set from the input - Bindings - SPARQL Evaluation Context + Input + - + - Processes a Distinct modifier + Parses a SPARQL Update Command Set from the given file - Distinct modifier - SPARQL Evaluation Context + File + - + - Processes an Exists Join + Parses a SPARQL Update Command Set from the given String - Exists Join - SPARQL Evaluation Context + SPARQL Update Commands + - + - Processes an Extend + Parses a SPARQL Update Command Set from the given String - Extend - SPARQL Evaluation Context + SPARQL Update Commands + - + - Processes a Filter + Parser for SPARQL Results XML Format - Filter - SPARQL Evaluation Context - + - Processes a Graph + Loads a Result Set from an Input - Graph - SPARQL Evaluation Context + Result Set to load into + Input to read from - + - Processes a Group By + Loads a Result Set from an Input Stream - Group By - SPARQL Evaluation Context + Result Set to load into + Input Stream to read from - + - Processes a Having + Loads a Result Set from a File - Having - SPARQL Evaluation Context + Result Set to load into + File to load from - + - Processes a Join + Loads a Result Set from an Input using a Results Handler - Join - SPARQL Evaluation Context + Results Handler to use + Input to read from - + - Processes a LeftJoin + Loads a Result Set from an Input using a Results Handler - Left Join - SPARQL Evaluation Context + Results Handler to use + Input Stream to read from - + - Processes a Minus + Loads a Result Set from a file using a Results Handler - Minus - SPARQL Evaluation Context + Results Handler to use + File to read from - + - Processes a Negated Property Set + Initialises the XML Reader settings - Negated Property Set - SPARQL Evaluation Context - + - Processes a Null Operator + Parses the XML Result Set format into a set of SPARQLResult objects - Null Operator - SPARQL Evaluation Context - + Parser Context - + - Processes a One or More Path + Internal Helper method which parses the child element of a <binding> element into an INode - Path - SPARQL Evaluation Context + Parser Context - + - Processes an Order By + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being parsed is detected - - SPARQL Evaluation Context + Warning Message - + - Processes a Property Path + Event raised when a non-fatal issue with the SPARQL Results being parsed is detected + + + + + Gets the String representation of the Parser which is a description of the syntax it parses - Path - SPARQL Evaluation Context - + - Processes a Reduced modifier + Static Helper Class which allows raw strings of RDF/SPARQL Results to be parsed directly - Reduced modifier - SPARQL Evaluation Context + + The API structure for dotNetRDF means that our IRdfReader classes which are our Parsers only have to support parsing from a file or a stream. For most applications this is fine but there may be occassions when you wish to parse a small fragment of RDF and you don't want to have to put it into a file before you can parse it. + - + - Processes a Select + Parses a raw RDF String using the given IRdfReader - Select - SPARQL Evaluation Context + Graph to load into + Raw RDF String + Parser to use to read the data + Use this when you have a raw RDF string and you know the syntax the RDF is in - + - Processes a Select Distinct Graphs + Parses a raw RDF String (attempts to auto-detect the format) - Select Distinct Graphs - SPARQL Evaluation Context + Graph to load into + Raw RDF String + +

+ Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate and it may choose a parser which is less expressive than the actual syntax e.g. TurtleParser instead of Notation3Parser as it tends to guess downwards. +

+

+ For example if you parsed a Notation 3 string that contained Graph Literals but didn't use any of the Notation 3 specific directives like @keywords it would be assumed to be Turtle but then would fail to parse +

+

+ The auto-detection rules used are as follows: +

+
    +
  1. If it contains <?xml and <rdf:RDF then it's most likely RDF/XML
  2. +
  3. If it contains <html then it's most likely HTML with possibly RDFa embedded
  4. +
  5. + If it contains @prefix or @base then its Turtle/Notation 3 +
      +
    1. If it contains @keywords, @forall or @forsome then it's Notation 3
    2. +
    3. Otherwise it's Turtle
    4. +
    +
  6. +
  7. If it contains all of a set of terms and symbols that occur in RDF/JSON then it's most likely RDF/JSON. These terms are "value","type",{,},[ and ]
  8. +
  9. Otherwise try it as NTriples, NTriples has no real distinctive syntax so hard to test if it's NTriples other than by parsing it
  10. +
+
- + - Processes a Service + Parses a raw RDF Dataset String using the given Parser - Service - SPARQL Evaluation Context + Store to load into + Raw RDF Dataset String + Parser to use - + - Processes a Slice modifier + Parses a raw RDF Dataset String (attempts to auto-detect the format) - Slice modifier - SPARQL Evaluation Context + Store to load into + Raw RDF Dataset String + +

+ Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate. +

+
- + - Processes a Subquery + Parses a raw SPARQL Results String (attempts to auto-detect the format) - Subquery - SPARQL Evaluation Context - + SPARQL Result Set to fill + Raw SPARQL Results String + +

+ Auto-detection is based on testing the string to see if it contains certain keyword constructs which might indicate a particular syntax has been used. This detection may not always be accurate. +

+
- + - Processes a Union + Parses a raw SPARQL Results String using the given Parser - Union - SPARQL Evaluation Context + SPARQL Result Set to fill + Raw SPARQL Results String + Parser to use - + - Processes a Unknown Operator + Uses the rules described in the remarks for the Parse() to return the most likely Parser - Unknown Operator - SPARQL Evaluation Context + Raw RDF String - + - Processes a Zero Length Path + Uses the format detection rules to determine the most likely RDF Dataset Parser - Path - SPARQL Evaluation Context + Raw RDF Dataset String - + - Processes a Zero or More Path + Uses the format detection rules to return the most likely SPARQL Results parser - Path - SPARQL Evaluation Context + Raw SPARQL Results String - + - A Class for connecting to multiple remote SPARQL Endpoints and federating queries over them with the data merging done locally + Parser for parsing TriG (Turtle with Named Graphs) RDF Syntax into a Triple Store - - - Queries are federated by executing multiple requesting simultaneously and asynchronously against the endpoints in question with the data then merged locally. The merging process does not attempt to remove duplicate data it just naively merges the data. - - + The Default Graph (if any) will be given the special Uri trig:default-graph - + - Creates a new Federated SPARQL Endpoint using a given Endpoint + Creates a TriG Parser than uses the default syntax - Endpoint - + - Creates a new Federated SPARQL Endpoint using the given Endpoints + Creates a TriG Parser which uses the specified syntax - Endpoints + Syntax - + - Creates a new Federated SPARQL Endpoint by creating a new SparqlRemoteEndpoint for the given URI + Gets/Sets whether Tokeniser Tracing is used - Endpoint URI - + - Creates a new Federated SPARQL Endpoint by creating a SparqlRemoteEndpoint for each of the given URI + Gets/Sets the TriG syntax used - Endpoint URIs - + - Adds a additional endpoint to be used by this endpoint + Gets/Sets the token queue mode used - Endpoint - + - Adds an additional endpoint to be used by this endpoint + Loads the named Graphs from the TriG input into the given Triple Store - Endpoint URI + Triple Store to load into + File to load from - + - Removes a given endpoint from this endpoint + Loads the named Graphs from the TriG input into the given Triple Store - Endpoint + Triple Store to load into + Input to load from - + - Removes all endpoints with the given URI from this endpoint + Loads the named Graphs from the TriG input using the given RDF Handler - Endpoint URI + RDF Handler to use + File to load from - + - Gets/Sets whether a failed request on one endpoint should cause the entire request to fail + Loads the named Graphs from the TriG input using the given RDF Handler + + RDF Handler to use + Input to load from + + + + Tries to parse a directive + + + + + Tries to parse directives + + + - - By default if a request on any of the endpoint fails or times out then the entire request will fail - + This overload is needed because in some cases we may dequeue a token before we know it is a directive - + - Gets/Sets the maximum number of endpoints this endpoint will issue queries to at any one time + Helper method used to raise the Warning event if there is an event handler registered + Warning message - + - Makes a Query to a Sparql Endpoint and returns the raw Response + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - Sparql Query String - - Thrown if more than one endpoint is in use since for any federated endpoint which used more than one endpoint there is no logical/sensible way to combine the result streams - + - Makes a Query to a Sparql Endpoint and returns the raw Response + Gets the String representation of the Parser which is a description of the syntax it parses - Sparql Query String - MIME Types to use for the Accept Header - Thrown if more than one endpoint is in use since for any federated endpoint which used more than one endpoint there is no logical/sensible way to combine the result streams - + - Makes a Query where the expected Result is an RDF Graph ie. CONSTRUCT and DESCRIBE Queries + Parser for parsing TriX (a named Graph XML format for RDF) - SPARQL Query String - RDF Graph - The query is executed by sending it federating it to all the endpoints this endpoint contains using simultaneous asychronous calls. Once these calls complete the results are naivley merged together (no duplicate data removal) and returned as a single result. + The Default Graph (if any) will be given the special Uri trix:default-graph - By default if any of the endpoints used return an error then the entire query will fail and an exception will be thrown, this behaviour can be overridden by setting the IgnoreFailedRequests property to be true in which case the result will be the merge of the results from all endpoints which successfully provided a result. + TriX permits Graphs to be named with Blank Node IDs, since the library only supports Graphs named with URIs these are converted to URIs of the form trix:local:ID - Thrown if any of the requests to the endpoints fail - Thrown if not all the requests complete within the set timeout - + - Makes a Query where the expected result is a Graph i.e. a CONSTRUCT or DESCRIBE query + Current W3C Namespace Uri for TriX - RDF Handler to process the results - SPARQL Query - + - Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries + Loads the RDF Dataset from the TriX input into the given Triple Store - Sparql Query String - A Sparql Result Set - - - The query is executed by sending it federating it to all the endpoints this endpoint contains using simultaneous asychronous calls. Once these calls complete the results are naivley merged together (no duplicate data removal) and returned as a single result. - - - By default if any of the endpoints used return an error then the entire query will fail and an exception will be thrown, this behaviour can be overridden by setting the IgnoreFailedRequests property to be true in which case the result will be the merge of the results from all endpoints which successfully provided a result. - - - Thrown if any of the requests to the endpoints fail - Thrown if not all the requests complete within the set timeout + Triple Store to load into + File to load from - + - Makes a Query where the expected Result is a SparqlResultSet ie. SELECT and ASK Queries + Loads the RDF Dataset from the TriX input into the given Triple Store - Results Handler to process the results - SPARQL Query String - Thrown if any of the requests to the endpoints fail - Thrown if not all the requests complete within the set timeout + Triple Store to load into + Input to load from - + - Serializes the Endpoint's Configuration + Loads the RDF Dataset from the TriX input using a RDF Handler - Configuration Serialization Context + RDF Handler to use + File to load from - + + + + - A SPARQL Query Processor which processes queries by parsing them to the SPARQL Query Service of a Knowledge Base on a Pellet Server + Helper method for raising informative standardised Parser Errors + The Error Message + The XML reader being used by the parser + - + - Creates a new Pellet Query Processor + Helper method used to raise the Warning event if there is an event handler registered - Pellet Server - Knowledge Base Name + Warning message - + - Creates a new Pellet Query Processor + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - Pellet Server URI - Knowledge Base Name - + - Processes a SPARQL Query + Gets the String representation of the Parser which is a description of the syntax it parses - SPARQL Query - + - Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + Parser for Turtle syntax - RDF Handler - Results Handler - SPARQL Query + Designed to be Thread Safe - should be able to call Load from multiple threads on different Graphs without issue - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Creates a new Turtle Parser - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set - State to pass to the callback - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Creates a new Turtle Parser - RDF Handler - Results Handler - SPARQL Query - Callback - State to pass to the callback + Turtle Syntax - + - Interface for SPARQL Query Processors + Creates a new Turtle Parser which uses the given Token Queue Mode - - - A SPARQL Query Processor is a class that knows how to evaluate SPARQL queries against some data source to which the processor has access - - - The point of this interface is to allow for end users to implement custom query processors or to extend and modify the behaviour of the default Leviathan engine as required. - - + Queue Mode for Turtle - + - Processes a SPARQL Query returning a IGraph instance or a SparqlResultSet depending on the type of the query + Creates a new Turtle Parser which uses the given Token Queue Mode - SPARQL Query - - Either an IGraph instance of a SparqlResultSet depending on the type of the query - + Queue Mode for Turtle + Turtle Syntax - + - Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + Gets/Sets whether Parsing Trace is written to the Console - RDF Handler - Results Handler - SPARQL Query - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Gets/Sets whether Tokeniser Trace is written to the Console - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set - State to pass to the callback - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Gets/Sets the token queue mode used - RDF Handler - Results Handler - SPARQL Query - Callback - State to pass to the callback - + - Interface for SPARQL Query Algebra Processors + Loads a Graph by reading Turtle syntax from the given input - - A SPARQL Query Algebra Processor is a class which knows how to evaluate the - - Type of intermediate results produced by processing an Algebra operator - Type of context object providing evaluation context + Graph to load into + Stream to read from - + - Processes SPARQL Algebra + Loads a Graph by reading Turtle syntax from the given input - Algebra - Evaluation Context + Graph to load into + Input to read from - + - Processes an Ask + Loads a Graph by reading Turtle syntax from the given file - Ask - Evaluation Context + Graph to load into + File to read from - + - Processes a BGP + Loads RDF by reading Turtle syntax from the given input using a RDF Handler - BGP - Evaluation Context + RDF Handle to use + Stream to read from - + - Processes a Bindings modifier + Loads RDF by reading Turtle syntax from the given input using a RDF Handler - Bindings - Evaluation Context + RDF Handle to use + Input to read from - + - Processes a Distinct modifier + Loads RDF by reading Turtle syntax from the given file using a RDF Handler - Distinct modifier - Evaluation Context + RDF Handle to use + File to read from - + - Processes an Exists Join + Internal method which does the parsing of the input - Exists Join - Evaluation Context + Parser Context - + - Processes an Extend + Tries to parse Base/Prefix declarations - Extend - Evaluation Context - + Parse Context + Whether declarations are Turtle style (if false SPARQL style is assumed) - + - Processes a Filter + Tries to parse Triples - Filter - Evaluation Context + Parser Context - + - Processes a Graph + Tries to parse Predicate Object lists - Graph - Evaluation Context + Parse Context + Subject of the Triples + Whether this is a Blank Node Predicate Object list - + - Processes a Group By + Tries to parse Object lists - Group By - Evaluation Context + Parse Context + Subject of the Triples + Predicate of the Triples + Whether this is a Blank Node Object list - + - Processes a Having + Tries to parse Collections - Having - Evaluation Context + Parser Context + Blank Node which is the head of the collection - + - Processes a Join + Tries to parse Literal Tokens into Literal Nodes - Join - Evaluation Context + Parser Context + Literal Token + - + - Processes a LeftJoin + Helper method which raises the Warning event if there is an event handler registered - Left Join - Evaluation Context + - + - Processes a Minus + Event which is raised when the parser detects issues with the input which are non-fatal - Minus - Evaluation Context - + - Processes a Negated Property Set + Gets the String representation of the Parser which is a description of the syntax it parses - Negated Property Set - Evaluation Context - + - Processes a Null Operator + Acceptable Turtle syntaxes - Null Operator - Evaluation Context - - + - Processes a One or More Path + Turtle as originally specified by the Turtle Team Submission - Path - Evaluation Context - - + - Processes an Order By + Turtle as standardised by the W3C RDF Working Group - - Evaluation Context - + - Processes a Property Path + Acceptable TriG syntaxes - Path - Evaluation Context - - + - Processes a Reduced modifier + TriG as originally specified - Reduced modifier - Evaluation Context + + @base is not permitted and @prefix may only occur outside of graphs + - + - Processes a Select + TriG as specified by the TriG Member Submission - Select - Evaluation Context + + @base is permitted and both @base and @prefix may occur both inside and outside graphs but the tokens use Turtle Team Submission rules i.e. newer escape sequences and other changes in the official W3C specification of Turtle do not apply. + - + - Processes a Select Distinct Graphs + Helper function relating to the Turtle Specifications - Select Distinct Graphs - Evaluation Context + Not currently used in the actual TurtleTokeniser or TurtleParser but is used for the new TriGTokeniser - + - Processes a Service + Pattern for Valid Integers in Turtle - Service - Evaluation Context - + - Processes a Slice modifier + Pattern for Valid Decimals in Turtle - Slice modifier - Evaluation Context - + - Processes a subquery + Pattern for Valid Doubles in Turtle - Subquery - Evaluation Context - - + - Processes a Union + Pattern for determining whether a given String should be serialized as a Long Literal - Union - Evaluation Context - + - Processes an Unknown Operator + Determines whether a given String is a valid Plain Literal - Algebra - Evaluation Context + String to test + Turtle Syntax - + - Processes a Zero Length Path + Determines whether a given String is a valid Plain Literal for the given Datatype - Path - Evaluation Context + Value + Datatype + Turtle Syntax - + - Processes a Zero or More Path + Determines whether a given String is a valid Integer - Path - Evaluation Context + String to test - + - Default SPARQL Query Processor provided by the library's Leviathan SPARQL Engine + Determines whether a given String is a valid Decimal - - - The Leviathan Query Processor simply invokes the Evaluate method of the SPARQL Algebra it is asked to process - - - In future releases much of the Leviathan Query engine logic will be moved into this class to make it possible for implementors to override specific bits of the algebra processing but this is not possible at this time - - + String to test + - + - Creates a new Leviathan Query Processor + Determines whether a given String is a valid Double - Triple Store + String to test + - + - Creates a new Leviathan Query Processor + Gets whether a QName is valid in Turtle (assumes Turtle as originally specified by Dave Beckett) - SPARQL Dataset + QName + - + - Processes a SPARQL Query + Gets whether the given value is a valid prefix in Turtle - SPARQL Query + Value + Turtle Syntax - + - Processes a SPARQL Query sending the results to a RDF/SPARQL Results handler as appropriate + Gets whether the given value is the valid prefix portion of a prefixed name in Turtle - RDF Handler - Results Handler - SPARQL Query + Value + Turtle Syntax + - + - Delegate used for asychronous execution + Gets whether the given value is the valid local name portion of a prefixed name in Turtle - RDF Handler - Results Handler - SPARQL Query + Value + Turtle Syntax + - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Gets whether the given value matches the PN_LOCAL rule from the Turtle specification - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set - State to pass to the callback - - In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. - + Value + - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Checks whether a given String matches the PLX rule from the Turtle W3C Specification - RDF Handler - Results Handler - SPARQL Query - Callback - State to pass to the callback - - In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. - + String as character array + Start Index + Resulting End Index + - + - Creates a new Evaluation Context + Gets whether a character is a Hex character + Character - + - Creates a new Evaluation Context for the given Query + Determines whether a given String is a valid QName - Query + String to test + Turtle Syntax - + - Gets the Query Processor for a Context + Unescapes local name escapes in a QName - + QName + Unescaped QName - + - Processes SPARQL Algebra + Determines whether a given String should be serialized as a Long Literal - Algebra - SPARQL Evaluation Context + String to test + - + - Processes an Ask + Infers the Type of a Plain Literal - Ask - SPARQL Evaluation Context + Plain Literal to infer the Type of + Turtle Syntax + A Uri representing the XML Scheme Data Type for the Plain Literal - + - Processes a BGP + Gets whether a character matches the PN_CHARS_BASE production from the Turtle specifications - BGP - SPARQL Evaluation Context + Character + - + - Processes a Bindings modifier + Gets whether a surrogate pair matches the PN_CHARS_BASE production from the Turtle specifications - Bindings - SPARQL Evaluation Context + High surrogate + Low surrogate + - + - Processes a Distinct modifier + Gets whether a character matches the PN_CHARS production from the Turtle specification - Distinct modifier - SPARQL Evaluation Context + Character + - + - Processes an Extend + Gets whether a surrogate pair matches the PN_CHARS production from the Turtle specification - Extend - SPARQL Evaluation Context + High surrogate + Low surrogate + - + - Processes an Exists Join + Gets whether a character matches the PN_CHARS_U production from the Turtle specification - Exists Join - SPARQL Evaluation Context + Character + - + - Processes a Filter + Gets whether a surrogate pair matches the PN_CHARS_U production from the Turtle specification - Filter - SPARQL Evaluation Context + High surrogate + Low surrogate + - + - Processes a Graph + Gets whether a character matches the nameStartChar production from the Turtle specification - Graph - SPARQL Evaluation Context + Character + - + - Processes a Group By + Gets whether a surrogate pair matches the nameStartChar production from the Turtle specification - Group By - SPARQL Evaluation Context + High surrogate + Low surrogate + - + - Processes a Having + Gets whether a character matches the nameChar production from the Turtle specification - Having - SPARQL Evaluation Context + Character + - + - Processes a Join + Gets whether a surrogate pair matches the nameChar production from the Turtle specification - Join - SPARQL Evaluation Context + High surrogate + Low surrogate + - + - Processes a LeftJoin + Helper Class which defines some Test Functions for testing the Unicode Category of Characters - Left Join - SPARQL Evaluation Context - + - Processes a Minus + Start of high surrogate range - Minus - SPARQL Evaluation Context - + - Processes a Negated Property Set + End of high surrogate range - Negated Property Set - SPARQL Evaluation Context - - + - Processes a Null Operator + Start of low surrogate range - Null Operator - SPARQL Evaluation Context - - + - Processes a One or More Path + End of low surrogate range - Path - SPARQL Evaluation Context - - + - Processes an Order By + Checks whether a given Character is considered a Letter - - SPARQL Evaluation Context + Character to Test + - + - Processes a Property Path + Checks whether a given Character is considered a Letter or Digit - Path - SPARQL Evaluation Context + Character to Test - + - Processes a Reduced modifier + Checks whether a given Character is considered a Letter Modifier - Reduced modifier - SPARQL Evaluation Context + Character to Test + - + - Processes a Select + Checks whether a given Character is considered a Digit - Select - SPARQL Evaluation Context + Character to Test + - + - Processes a Select Distinct Graphs + Indicates whether the specified object is a high surrogate. - Select Distinct Graphs - SPARQL Evaluation Context + + + true if the numeric value of the parameter ranges from U+D800 through U+DBFF; otherwise, false. + + The Unicode character to evaluate. 1 - + - Processes a Service + Indicates whether the specified object is a low surrogate. - Service - SPARQL Evaluation Context + + + true if the numeric value of the parameter ranges from U+DC00 through U+DFFF; otherwise, false. + + The character to evaluate. 1 - + - Processes a Slice modifier + Converts the value of a UTF-16 encoded surrogate pair into a Unicode code point. - Slice modifier - SPARQL Evaluation Context + + + The 21-bit Unicode code point represented by the and parameters. + + A high surrogate code point (that is, a code point ranging from U+D800 through U+DBFF). + A low surrogate code point (that is, a code point ranging from U+DC00 through U+DFFF). + is not in the range U+D800 through U+DBFF, or is not in the range U+DC00 through U+DFFF. + 1 - + - Processes a Subquery + Converts a Hex Escape into the relevant Unicode Character - Subquery - SPARQL Evaluation Context + Hex code - + - Processes a Union + Converts a Hex Escape into the relevant UTF-16 codepoints - Union - SPARQL Evaluation Context + + - + - Processes a Unknown Operator + Static Helper Class for dereferencing URIs and attempting to parse the results of a HTTP GET request to the URI into RDF - Unknown Operator - SPARQL Evaluation Context + +

Caching

+ + As of the 0.2.2 release the loader has support for caching retrieved data locally built into it (for Graphs only), caching is done using ETags where the remote server provides them or just by a user-defineable 'freshness' criteria (i.e. number of hours retrieved resources should be cached for). By default this caching happens in the system temporary directory which means it is non-persistent i.e. if you run your application using dotNetRDF it may cache stuff during the session but once the application is closed the operating system may freely delete the cached data. If you wish to have a persistent cache then you can use the CacheDirectory property to set your own cache directory. Even when you set your own cache directory dotNetRDF will delete obsolete data from it over time though this will only happen when a new request invalidates previously cached data. + + + If you wish to completely control the Cache you can implement your own IUriLoaderCache implementation and use it by setting the Cache property + +
- + - Processes a Zero Length Path + Gets/Sets the Directory used for caching Graphs loaded from URIs - Path - SPARQL Evaluation Context - - + - Processes a Zero or More Path + Gets/Sets the amount of time Graphs are cached for - Path - SPARQL Evaluation Context - + + This duration only applies to URIs which don't return an ETag as part of the HTTP response when they are deferenced + - + - Static Helper class containing extension methods related to queries + Gets/Sets the Cache that is in use + + Setting the Cache to null does not disable it, to disable caching use the Options.UriLoaderCaching property. + - + - Determines whether an Expresion uses the Default Dataset + Determines whether the RDF behind the given URI is cached - Expression + URI - Almost all Expressions use the Default Dataset. The only ones that does are EXISTS/NOT EXISTS expressions where the graph pattern does not use the default dataset + + Note: This does not guarantee that the cached content will be used if you load from the URI using the UriLoader. Whether the cached copy is used will depend on whether + - + - A SPARQL Query Processor where the query is processed by passing it to the ExecuteQuery() method of an INativelyQueryableStore + Gets/Sets an optional User Agent string that will be appended to HTTP Requests - + - Creates a new Simple Query Processor + Attempts to load a RDF Graph from the given URI into the given Graph - Triple Store + Graph to assert Triples in + URI to attempt to get RDF from + + + Attempts to select the relevant Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can just open a HTTP Stream yourself and pass it to an instance of the correct Parser. + + + In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + - + - Processes a SPARQL Query + Attempts to load a RDF Graph from the given URI into the given Graph - SPARQL Query - + Graph to assert Triples in + URI to attempt to get RDF from + Parser to use + + + Uses the supplied parser to attempt parsing regardless of the actual Content Type returned + + + In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + - + - Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + Attempts to load a RDF Graph from the given URI using a RDF Handler - RDF Handler - Results Handler - SPARQL Query + RDF Handler to use + URI to attempt to get RDF from + + + Attempts to select the relevant Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can just open a HTTP Stream yourself and pass it to an instance of the correct Parser. + + + In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + + - + - Delegate used for asychronous execution + Attempts to load a RDF Graph from the given URI using a RDF Handler - RDF Handler - Results Handler - SPARQL Query + RDF Handler to use + URI to attempt to get RDF from + Parser to use + + + Uses the supplied parser to attempt parsing regardless of the actual Content Type returned + + + In the event that the URI is a File URI the FileLoader will be used instead + + + If the URI is a Data URI then the DataUriLoader will be used instead. + + - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Attempts to load a RDF dataset from the given URI into the given Triple Store - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set - State to pass to the callback + Triple Store to load into + URI to attempt to get a RDF dataset from + Parser to use to parse the RDF dataset - In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + + If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can explicitly pass in the parser to use. + - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Attempts to load a RDF dataset from the given URI into the given Triple Store - RDF Handler - Results Handler - SPARQL Query - Callback - State to pass to the callback + Triple Store to load into + URI to attempt to get a RDF dataset from - In the event of a success the callback will be invoked normally, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + + Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + - + - A SPARQL Query Processor where the query is processed by passing it to the Query() method of an IQueryableStorage + Attempts to load a RDF dataset from the given URI using a RDF Handler + RDF Handler to use + URI to attempt to get a RDF dataset from + Parser to use to parse the RDF dataset + + + If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can explicitly pass in the parser to use. + + - + - Creates a new Generic Query Processor + Attempts to load a RDF dataset from the given URI using a RDF Handler - Generic IO Manager + RDF Handler to use + URI to attempt to get a RDF dataset from + + + Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + - + - Processes a SPARQL Query + Raises warning messages - SPARQL Query - + Warning Message - + - Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + Raises store warning messages - RDF Handler - Results Handler - SPARQL Query + Warning Message - + - Delegate used for asychronous execution + Event which is raised when a parser that is invoked by the UriLoader notices a non-fatal issue with the RDF syntax - RDF Handler - Results Handler - SPARQL Query - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Event which is raised when a store parser that is invoked by the UriLoader notices a non-fatal issue with the RDF dataset syntax - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set - State to pass to the callback - - In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. - - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Attempts to load a RDF Graph from a URI asynchronously - RDF Handler - Results Handler - SPARQL Query - Callback + Graph to assert triple in + URI to load from + Parser to use + Callback to invoke when the operation completes State to pass to the callback - In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + + Uses the supplied parser to attempt parsing regardless of the actual Content Type returned + + + In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + - + - A SPARQL Query Processor where the query is processed by passing it to a remote SPARQL endpoint + Attempts to load a RDF Graph from a URI asynchronously + Graph to assert triple in + URI to load from + Callback to invoke when the operation completes + State to pass to the callback + + + Will attempt to autodetect the format of the RDF based on the Content-Type header of the HTTP response + + + In the event that the URI is a File URI the FileLoader will be used instead. If the URI is a Data URI then the DataUriLoader will be used instead. + + + Note: UriLoader will assign the Graph the source URI as it's Base URI unless the Graph already has a Base URI or is non-empty prior to attempting parsing. Note that any Base URI specified in the RDF contained in the file will override this initial Base URI. In some cases this may lead to invalid RDF being accepted and generating strange relative URIs, if you encounter this either set a Base URI prior to calling this method or create an instance of the relevant parser and invoke it directly. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + + - + - Creates a new Remote Query Processor + Attempts to load a RDF Graph from a URI asynchronously using an RDF Handler - SPARQL Endpoint + RDF Handler to use + URI to load from + Parser to use + Callback to invoke when the operation completes + State to pass to the callback + + + Uses the supplied parser to attempt parsing regardless of the actual Content Type returned + + + In the event that the URI is a File URI the FileLoader will be used instead + + + If the URI is a Data URI then the DataUriLoader will be used instead. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + + - + - Processes a SPARQL Query + Attempts to load a RDF Graph from a URI asynchronously using an RDF Handler - SPARQL Query - + RDF Handler to use + URI to load from + Callback to invoke when the operation completes + State to pass to the callback + + + Attempts to autodetect the RDF format based on the Content-Type header of the HTTP response + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + + - + - Processes a SPARQL Query passing the results to the RDF or Results handler as appropriate + Attempts to load a RDF dataset asynchronously from the given URI into the given Triple Store - RDF Handler - Results Handler - SPARQL Query + Triple Store to load into + URI to attempt to get a RDF dataset from + Parser to use to parse the RDF dataset + Callback to invoke when the operation completes + State to pass to the callback + + + If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can explicitly pass in the parser to use. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + + - + - Processes a SPARQL Query asynchronously invoking the relevant callback when the query completes + Attempts to load a RDF dataset asynchronously from the given URI into the given Triple Store - SPARQL QUery - Callback for queries that return a Graph - Callback for queries that return a Result Set + Triple Store to load into + URI to attempt to get a RDF dataset from + Callback to invoke when the operation completes State to pass to the callback - In the event of a success the appropriate callback will be invoked, if there is an error both callbacks will be invoked and passed an instance of which contains details of the error and the original state information passed in. + + Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + - + - Processes a SPARQL Query asynchronously passing the results to the relevant handler and invoking the callback when the query completes + Attempts to load a RDF dataset asynchronously from the given URI using a RDF Handler - RDF Handler - Results Handler - SPARQL Query - Callback + RDF Handler to use + URI to attempt to get a RDF dataset from + Parser to use to parse the RDF dataset + Callback to invoke when the operation completes State to pass to the callback - In the event of a success the callback will be invoked, if there is an error the callback will be invoked and passed an instance of which contains details of the error and the original state information passed in. + + If the parameter is set to null then this method attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If you know ahead of time the Content Type you can explicitly pass in the parser to use. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + - + - Stores information about the Evaluation of a Query during it's evaluation + Attempts to load a RDF dataset asynchronously from the given URI using a RDF Handler + RDF Handler to use + URI to attempt to get a RDF dataset from + Callback to invoke when the operation completes + State to pass to the callback + + + Attempts to select the relevant Store Parser based on the Content Type header returned in the HTTP Response. + + + If the loading completes normally the callback will be invoked normally, if an error occurs it will be invoked and passed an instance of as the state which contains details of the error and the original state. + + - + - Creates a new Evaluation Context for the given Query over the given Dataset + Provides caching services to the UriLoader class - Query - Dataset - + - Creates a new Evaluation Context for the given Query over the given Dataset using a specific processor + Creates a new Cache which uses the system temporary directory as the cache location - Query - Dataset - Query Processor - + - Creates a new Evaluation Context which is a Container for the given Result Binder + Creates a new Cache which uses the given directory as the cache location - + Directory - + - Gets the Query that is being evaluated + Gets/Sets how long results should be cached + + This only applies to downloaded URIs where an ETag is not available, where ETags are available proper ETag based caching is used + - + - Gets the Dataset the query is over + Gets/Sets the Cache Directory that is used - + - Gets the custom query processor that is in use (if any) + Initialises the Cache as required - + - Gets/Sets the Input Multiset + Clears the Cache - + - Gets/Sets the Output Multiset + Gets whether there is an ETag for the given URI + URI + - + - Gets/Sets the Results Binder + Gets the ETag for the given URI + URI + + Thrown if there is no ETag for the given URI - + - Gets/Sets whether BGPs should trim temporary variables + Remove the ETag record for the given URI + URI - + - Starts the Execution Timer + Removes a locally cached copy of a URIs results from the Cache + URI - + - Ends the Execution Timer + Is there a locally cached copy of the Graph from the given URI which is not expired + URI + Whether the local copy is required to meet the Cache Freshness (set by the Cache Duration) + - + - Checks whether Execution should Time out + Gets the path to the locally cached copy of the Graph from the given URI - Thrown if the Query has exceeded the Execution Timeout + URI + + + This method does not do any cache expiry calculations on the file. This is due to the fact that we'll store local copies of Graphs for which we have ETags and when using ETags we rely on the servers knowledge of whether the resource described by the URI has changed rather than some arbitrary caching duration that we/the user has set to use. + - + - Gets the Remaining Timeout i.e. the Timeout taking into account time already elapsed + Static Helper Class which contains a set of Functions which model Name and Character validations as laid + out in the W3C XML and XML Namespaces specification - If there is no timeout then this is always zero, if there is a timeout this is always >= 1 since any operation that wants to respect the timeout must have a non-zero timeout to actually timeout properly. + These are needed in the XML/RDF Parser + + Also contains the Date Time format string used to format .Net's DateTime type into a String whose format conforms to the XML Schema Datatypes specification + + + http://www.w3.org/TR/REC-xml/ + http://www.w3.org/TR/REC-xml-names/ + http://www.w3.org/TR/xmlschema-2/ - + - Gets the Query Timeout used for the Query + Namespace for XML - - - This is taken either from the Timeout property of the SparqlQuery to which this evaluation context pertains (if any) or from the global option Options.QueryExecutionTimeout. To set the Timeout to be used set whichever of those is appropriate prior to evaluating the query. If there is a Query present then it's timeout takes precedence unless it is set to zero (no timeout) in which case the global timeout setting is applied. You cannot set the Query Timeout to be higher than the global timeout unless the global timeout is set to zero (i.e. no global timeout) - - - + - Retrieves the Time in milliseconds the query took to evaluate + Namespace for XML Namespaces - + - Retrieves the Time in ticks the query took to evaluate + Namespace for XML Schema - + - Gets/Sets a Object that should be persisted over the entire Evaluation Context + Date Time Formatting string which meets the specified format for xsd:dateTime - Key - - May be used by parts of the Evaluation Process that need to ensure a persistent state across the entire Evaluation Query (e.g. the implementation of the BNODE() function) + Use with the DateTime.ToString() method to format a DateTime into appropriate string format - + - Evalutes an Algebra Operator in this Context using the current Query Processor (if any) or the default Evaluate() method + Date Time Formatting string which meets the specified format for xsd:dateTime, this formatting string is imprecise in the sense that it does not preserve the fractional seconds. - Algebra - + + Use with the DateTime.ToString() method to format a DateTime into appropriate string format + - + - Comparer class for implementing the SPARQL semantics for the relational operators + Date Time Formatting string which meets the specified format for xsd:date + + Use with the DateTime.ToString() method to format a DateTime into appropriate string format + - + - Compares two Nodes + Date Time Formatting string which meets the the specified format for xsd:time - Node - Node - + + Use with the DateTime.ToString() method to format a DateTime into appropriate string format + - + - Compares two valued Nodes + Date Time Formatting string which meets the the specified format for xsd:time, this formatting string is imprecise in the sense that it does not preserve the fractional seconds. - Node - Node - + + Use with the DateTime.ToString() method to format a DateTime into appropriate string format + - + - Compares two Nodes for Numeric Ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - Numeric Type - - + - Compares two Nodes for Numeric Ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - Numeric Type - - + - Compares two Date Times for Date Time ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Compares two Date Times for Date Time ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Compares two Dates for Date ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Compares two Dates for Date ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Comparer class for use in SPARQL ORDER BY - implements the Semantics broadly similar to the relational operator but instead of erroring using Node/Lexical ordering where an error would occur it makes an appropriate decision + Data Type Uri Constants for XML Schema Data Types - + - Compares two Nodes + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Compares two Nodes + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - Compares two Date Times for Date Time ordering + Data Type Uri Constants for XML Schema Data Types - Node - Node - - + - A SPARQL Parameterized String is a String that can contain parameters in the same fashion as a SQL command string + Data Type Uri Constants for XML Schema Data Types - - - This is intended for use in applications which may want to dynamically build SPARQL queries/updates where user input may comprise individual values in the triples patterns and the applications want to avoid SPARQL injection attacks which change the meaning of the query/update - - - It works broadly in the same way as a SqlCommand would in that you specify a string with paramters specified in the form @name and then use various set methods to set the actual values that should be used. The values are only substituted for parameters when you actually call the ToString() method to get the final string representation of the command. E.g. - - - SparqlParameterizedString queryString = new SparqlParameterizedString(); - queryString.CommandText = @"SELECT * WHERE - { - ?s a @type . - }"; - queryString.SetUri("type", new Uri("http://example.org/myType")); - Console.WriteLine(queryString.ToString()); - - - Would result in the following being printed to the Console: - - - SELECT * WHERE - { - ?s a <http://example.org/myType> - } - - - Calling a Set method to set a parameter that has already been set changes that value and the new value will be used next time you call ToString() - this may be useful if you plan to execute a series of queries/updates using a series of values since you need not instantiate a completely new parameterized string each time - - - This class was added to a library based on a suggestion by Alexander Sidorov and ideas from slides from Slideshare by Almedia et al - - - PERFORMANCE TIPS: if building the command text incrementaly, avoid using CommandText += and use the AppendSubQuery or Append methods instead - - - + - Creates a new empty parameterized String + Data Type Uri Constants for XML Schema Data Types - + - Creates a new parameterized String + Data Type Uri Constants for XML Schema Data Types - Command Text - + - Gets/Sets the Namespace Map that is used to prepend PREFIX declarations to the command + Data Type Uri Constants for XML Schema Data Types - + - Gets/Sets the Base URI which will be used to prepend BASE declarations to the command + Data Type Uri Constants for XML Schema Data Types - + - Gets/Sets the parameterized Command Text + Data Type Uri Constants for XML Schema Data Types - + - Appends the given query as a sub-query to the existing command text, any prefixes in the sub-query are moved to the parent query + Data Type Uri Constants for XML Schema Data Types - Query - + - Appends the given query as a sub-query to the existing command text, any prefixes in the sub-query are moved to the parent query but any parameter/variable assignments will be lost + Data Type Uri Constants for XML Schema Data Types - Query - + - Appends the given text to the existing command text, any prefixes in the sub-query are moved to the parent query but any parameter/variable assignments will be lost + Data Type Uri Constants for XML Schema Data Types - Text - + - Appends the given text to the existing command text, any prefixes in the command are moved to the parent query + Data Type Uri Constants for XML Schema Data Types - Text - + - Gets/Sets the Query processor which is used when you call the ExecuteQuery() method + Data Type Uri Constants for XML Schema Data Types - + - Gets/Sets the Query processor which is used when you call the ExecuteUpdate() method + Data Type Uri Constants for XML Schema Data Types - + - Gets an enumeration of the Variables for which Values have been set + Data Type Uri Constants for XML Schema Data Types - + - Gets an enumeration of the Parameters for which Values have been set + Data Type Uri Constants for XML Schema Data Types - + - Clears all set Parameters and Variables + Data Type Uri Constants for XML Schema Data Types - + - Clears all set Parameters + Data Type Uri Constants for XML Schema Data Types - + - Clears all set Variables + Array of Constants for Data Types that are supported by the Literal Node CompareTo method - + - Sets the Value of a Parameter + Returns whether a String is a Name as defined by the W3C XML Specification - Parameter Name - Value - - Can be used in derived classes to set the value of parameters if the derived class defines additional methods for adding values for parameters - + String to test + - + - Removes a previously set value for a Parameter + Returns whether a String is a NCName as defined by the W3C XML Namespaces Specification - Parameter Name - - There is generally no reason to do this since you can just set a parameters value to change it - + String to test + + http://www.w3.org/TR/REC-xml-names/#NT-NCName - + - Removes a previously set value for a Variable + Returns whether a Character is a NameChar as defined by the W3C XML Specification - Variable Name - - May be useful if you have a skeleton query/update into which you sometimes substitute values for variables but don't always do so - + Character to Test + + http://www.w3.org/TR/REC-xml/#NT-NameChar - + - Sets the Value of a Variable + Returns whether a Character is a NameChar as defined by the W3C XML Specification - Variable Name - Value + Character to test + + http://www.w3.org/TR/REC-xml/#NT-NameChar - + - Sets the Parameter to an Integer Literal + Returns whether the given Type refers to one of the types supported by the LiteralNode CompareTo method - Parameter - Integer + Data Type Uri + - + - Sets the Parameter to an Integer Literal + Returns whether the given Type refers to one of the types supported by the LiteralNode CompareTo method - Parameter - Integer + Data Type Uri + - + - Sets the Parameter to an Integer Literal + Gets the Data Type Uri of the given Node if it has a supported type - Parameter - Integer + Node + + + + Only ILiteralNode's can have a Data Type + + + The function only returns the Data Type Uri (as a String) if the Data Type of the Literal is one of the supported Data Types + + - + - Sets the Parameter to a Decimal Literal + Gets the Compatible Supported Data Type assuming the two Nodes are Literals with support types and that those types are compatible - Parameter - Integer + A Node + A Node + Whether the compatible type should be the wider type + + + + Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics + + - + - Sets the Parameter to a Float Literal + Gets the Compatible Supported Data Type assuming the two Nodes are Literals with support types and that those types are compatible - Parameter - Integer + A Node + A Node + + + + Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics + + - + - Sets the Parameter to a Double Literal + Gets the Compatible Supported Data Type for the two Data Types - Parameter - Integer + A Data Type + A Data Type + + + + Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics + + - + - Sets the Parameter to a Date Time Literal + Gets the Compatible Supported Data Type for the two Data Types - Parameter - Integer + A Data Type + A Data Type + Whether the compatible type should be the wider type + + + + Currently this is only immplemented sufficiently for the types it needs to know are compatible for implementing SPARQL equality and ordering semantics + + - + - Sets the Parameter to a Date Time Literal + Internal class which parses SPARQL Paths into path expressions - Parameter - Integer - Whether to preserve precisely i.e. include fractional seconds - + - Sets the Parameter to a Date Time Literal + Interface to be implemented by RDF Writers which generate RDF Concrete Syntax - Parameter - Integer - + - Sets the Parameter to a Date Time Literal + Method for Saving a Graph to a Concrete RDF Syntax in a file based format - Parameter - Integer - Whether to preserve precisely i.e. include fractional seconds + The Graph to Save + The filename to save the Graph in + Thrown if the RDF in the Graph is not representable by the Writer + Thrown if the Writer is unable to write to the File - + - Sets the Parameter to a Duration Literal + Method for Saving a Graph to a Concrete RDF Syntax via some arbitrary TextWriter - Parameter - Integer + The Graph to Save + The TextWriter to save the Graph to + Thrown if the RDF in the Graph is not representable by the Writer + Thrown if the Writer is unable to write to the underlying storage of the TextWriter specified in the - + - Sets the Parameter to a Boolean Literal + Method for saving a graph to a concrete RDF syntax via some arbitray . - Parameter - Integer + The graph to save + The to save the graph to + true to leave the stream open when the method completes; false otherwise + Thrown if the RDF in the graph is not representable by the writer + Thrown if the writer is unable to write to the underlying storage of the TextWriter specified in the - + - Sets the Parameter to an Untyped Literal + Event which writers can raise to indicate possible ambiguities or issues in the syntax they are producing - Parameter - Integer - + - Sets the Parameter to a Typed Literal + Interface for Writers that Support Pretty Printing - Parameter - Integer - Datatype URI - + - Sets the Parameter to a Literal with a Language Specifier + Gets/Sets whether Pretty Printing Mode should be used - Parameter - Integer - Language Specifier - + - Sets the Parameter to a URI + Interface for Writers that Support engaging High Speed Write Mode for some Graphs - Parameter - URI - + - Sets the Parameter to be a Blank Node with the given ID + Gets/Sets whether the Writer can use High Speed Write Mode if the Graph is deemed suitable for this - Parameter - Node ID - - Only guarantees that the Blank Node ID will not clash with any other Blank Nodes added by other calls to this method or it's overload which generates anonymous Blank Nodes. If the base query text into which you are inserting parameters contains Blank Nodes then the IDs generated here may clash with those IDs. - - + - Sets the Parameter to be a new anonymous Blank Node + Interface for Writers that support varying levels of Syntax Compression - Parameter - - Only guarantees that the Blank Node ID will not clash with any other Blank Nodes added by other calls to this method or it's overload which takes an explicit Node ID. If the base query text into which you are inserting parameters contains Blank Nodes then the IDs generated here may clash with those IDs. - - + - Executes this command as a query + Gets/Sets the Compression Level that the Writer is using - + Compression Level is an arbitrary figure that the Writer can interpret as it wants, implementations of this interface should state in the XML Comments for this property what the different values mean. The Standard Compression levels provided by the WriterCompressionLevel enumeration are intended as guides and Writers may interpret these as they desire. - + - Executes this command as a query + Interface for Writers that support use of DTDs to compress output - RDF Handler - Results Handler - + - Executes this command as an update + Gets/Sets whether DTDs can be used - + - Clears the preprocessing structures + Interface for Writers that can use attributes (e.g. XML or HTML based writers) which allows you to control whether the writer will choose to use attributes to encode data which could otherwise be expressed as elements - + - Trims out the SPARQL preamble (BASE and PREFIX definitions) from the command text + Gets/Sets whether literal objects can be compressed as attributes - - This is done so the instance can be directly merged into another SparqlParameterizedString through the Append methods - - + - Provides some fast string exploration to determine valid parameter/variable placeholders and leave out any constant SPARQL ambiguous patterns (language tags, parameter- or variable-like patterns in IRIs or in string literals...) + Interface for Writers that support the use of Namespaces and allows a set of Default Namespaces to be defined - + - Returns the actual Query/Update String with parameter and variable values inserted + Gets/Sets the Default Namespaces used for writing - - + - Abstract Base class for SPARQL Views which are Graphs which are generated from SPARQL Queries and get automatically updated when the Store they are attached to changes + Interface for Writers that support multi-threaded writing - - - CONSTRUCT, DESCRIBE or SELECT queries can be used to generate a Graph. If you use a SELECT query the returned variables must contain ?s, ?p and ?o in order to generate a view correctly - - - + - SPARQL Query + Gets/Sets whether multi-threading is used - + - Graphs that are mentioned in the Query + Interface for Writers that generate HTML - + - Triple Store the query operates over + Gets/Sets a Stylesheet file used to format the HTML - + - Creates a new SPARQL View + Gets/Sets the CSS class used for the anchor tags used to display the URIs of URI Nodes - SPARQL Query - Triple Store to query - + - Creates a new SPARQL View + Gets/Sets the CSS class used for the span tags used to display Blank Node IDs - SPARQL Query - Triple Store to query - + - Creates a new SPARQL View + Gets/Sets the CSS class used for the span tags used to display Literals - SPARQL Query - Triple Store to query - + - Initialises the SPARQL View + Gets/Sets the CSS class used for the anchor tags used to display Literal datatypes - + - Invalidates the View causing it to be updated + Gets/Sets the CSS class used for the span tags used to display Literal language specifiers - + - Callback for when asychronous invalidation completes + Gets/Sets the CSS class used for the div tags used to group chunks of markup into a box - Async call results - + - Forces the view to be updated + Gets/Sets a Prefix that is applied to all href attributes - + - Abstract method that derived classes should implement to update the view + Interface for writers which use formatters from the Formatting namespace - + - Gets the error that occurred during the last update (if any) + Gets the Type for the Triple Formatter this writer uses + + This should be the type descriptor for a type that implements ITripleFormatter + - + - Represents a SPARQL View over an in-memory store + Class of exceptions that may occur when outputting RDF - + - Creates a new SPARQL View + Creates a new RDF Output Exception - SPARQL Query - Triple Store to query + Error Message - + - Creates a new SPARQL View + Creates a new RDF Output Exception - SPARQL Query - Triple Store to query + Error Message + Exception that caused this Exception - + - Creates a new SPARQL View + Class of exceptions that may occur when doing multi-threaded output of RDF - SPARQL Query - Triple Store to query + + + Used when a process may result in multiple errors from different threads + + - + - Updates the view by making the SPARQL Query in-memory over the relevant Triple Store + Creates a new Threaded RDF Output Exception + Error Message - + - Represents a SPARQL View over an arbitrary native Triple Store + Adds an Exception to the list of Inner Exceptions + Exception - + - Creates a new SPARQL View + Gets the enumeration of Exceptions - SPARQL Query - Triple Store to query - + - Creates a new SPARQL View + Class for errors in selecting an appropriate Writer to output RDF with - SPARQL Query - Triple Store to query - + - Creates a new SPARQL View + Creates a new RDF Writer Selection Exception with the given Message - SPARQL Query - Triple Store to query + Error Message - + - Updates the view by making the query over the Native Store (i.e. the query is handled by the stores SPARQL implementation) + Creates a new RDF Writer Selection Exception with the given Message and Inner Exception + Error Message + Inner Exception - + - Represents an Group of Bindings which is used when executing Queries with GROUP BY clauses + + Namespace for Writing Classes which provide the means to Serialize RDF Graphs as concrete RDF syntaxes or graphical representations. + + + Also contains classes that can be used to save Graphs and Triple Stores to arbitrary database backed storage using classes from the Storage namespace. + - + - Creates a new Binding Group + + Namespace for Writer Context classes, these are classes that are used internally by writers to store their state. This allows writers to be safely used in a multi-threaded scenario since the writing of one Graph/Store cannot affect the writing of another. + - + - Creates a new Binding Group which is a sub-group of the given Parent Group + Interface for Store Writer Contexts - Parent Group - + - Creates a new Binding Group from the specified IDs + Gets the Store being written - IDs - + - Adds a Binding ID to the Group + Base Class for Store Writer Context Objects - ID - + - Gets the Enumerator for the Binding IDs in the Group + Pretty Print Mode setting - - + - Gets the Enumerator for the Binding IDs in the Group + High Speed Mode setting - - + - Gets the Binding IDs in the Group + Creates a new Base Store Writer Context with default settings + Store to write + TextWriter being written to - + - Adds a Variable Assignment to the Group + Creates a new Base Store Writer Context with custom settings - Variable - Value + Store to write + TextWriter being written to + Pretty Print Mode + High Speed Mode - + - Gets the Variable Assignments for the Group + Gets/Sets the Pretty Printing Mode used - + - Gets a String summarising the group + Gets/Sets the High Speed Mode used - - + - Class for representing errors that occur in RDF Storage + Gets the Store being written - + - Creates a new RDF Storage Exception + Gets the TextWriter being written to - Error Message - + - Creates a new RDF Storage Exception + Formats a URI as a String for full Output - Error Message - Exception which caused this Exception + URI + - + - Static Helper class containing internal extensions methods used to support the BaseAsyncSafeConnector class + Formats a URI as a String for full Output + URI + - + - Loads a Graph asynchronously + Interface for Writer Contexts - Storage Provider - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph asynchronously + Gets the Graph being written - Storage Provider - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback - + - Saves a Graph aynchronously + Gets the TextWriter being written to - Storage Provider - Graph to save - Callback - State to pass to the callback - + - Updates a Graph asynchronously + Gets/Sets the Pretty Printing Mode used - Storage Provider - URI of the Graph to update - Triples to add - Triples to remove - Callback - State to pass to the callback - + - Deletes a Graph asynchronously + Gets/Sets the High Speed Mode used - Storage Provider - URI of the Graph to delete - Callback - State to pass to the callback - + - Lists Graphs in the store asynchronously + Gets/Sets the Compression Level used - Storage Provider - Callback - State to pass to the callback - + - Queries a store asynchronously + Gets/Sets the Node Formatter used - Storage Provider - SPARQL Query - Callback - State to pass to the callback - + - Queries a store asynchronously + Gets/Sets the URI Formatter used - Storage Provider - SPARQL Query - RDF Handler - Results Handler - Callback - State to pass to the callback - + - Updates a store asynchronously + Interface for Writer Contexts which store collection compression data - Storage Provider - SPARQL Update - Callback - State to pass to the callback - + - Abstract Base Class for IStorageProvider implementations for which it is safe to do the IAsyncStorageProvider implementation simply by farming out calls to the synchronous methods onto background threads (i.e. non-HTTP based connectors) + Gets the mapping from Blank Nodes to Collections - + - Gets the parent server (if any) + Gets the Triples that should be excluded from standard output as they are part of collections - + - Gets the parent server (if any) + Base Class for Writer Context Objects + + This is not an abstract class since some writers will require only this information or possibly less + - + - Loads a Graph from the Store + Compression Level to be used - Graph to load into - URI of the Graph to load - + - Loads a Graph from the Store + Pretty Printing Mode setting - Graph to load into - URI of the Graph to load - + - Loads a Graph from the Store + High Speed Mode setting - Handler to load with - URI of the Graph to load - + - Loads a Graph from the Store + Graph being written - Handler to load with - URI of the Graph to load - + - Saves a Graph to the Store + TextWriter being written to - Graph to save - + - Updates a Graph in the Store + QName Output Mapper - URI of the Graph to update - Triples to be added - Triples to be removed - + - Updates a Graph in the Store + Node Formatter - URI of the Graph to update - Triples to be added - Triples to be removed - + - Deletes a Graph from the Store + URI Formatter - URI of the Graph to delete - + - Deletes a Graph from the Store + Creates a new Base Writer Context with default settings - URI of the Graph to delete + Graph being written + TextWriter being written to - + - Lists the Graphs in the Store + Creates a new Base Writer Context with custom settings - + Graph being written + TextWriter being written to + Compression Level - + - Indicates whether the Store is ready to accept requests + Creates a new Base Writer Context with custom settings + Graph being written + TextWriter being written to + Compression Level + Pretty Print Mode + High Speed Mode - + - Gets whether the Store is read only + Gets the Graph being written - + - Gets the IO Behaviour of the Store + Gets the TextWriter being written to - + - Gets whether the Store supports Triple level updates via the UpdateGraph() method + Gets the QName Output Mapper in use - + - Gets whether the Store supports Graph deletion via the DeleteGraph() method + Gets/Sets the Compression Level used - + - Gets whether the Store supports listing graphs via the ListGraphs() method + Gets/Sets the Pretty Printing Mode used - + - Diposes of the Store + Gets/Sets the High Speed Mode used - + - Loads a Graph from the Store asynchronously + Gets/Sets the Node Formatter in use - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Gets/Sets the URI Formatter in use - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Writer Context for XHTML+RDFa Writers - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Creates a new HTML Writer Context - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback + Graph + Text Writer - + - Saves a Graph to the Store asynchronously + HTML Writer to use - Graph to save - Callback - State to pass to the callback - + - Updates a Graph in the Store asychronously + Writer Context for NTriples Writers - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback - + - Updates a Graph in the Store asychronously + Creates a new NTriples Writer Context with default settings - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + Graph to write + TextWriter to write to + NTriples Syntax mode - + - Deletes a Graph from the Store + Creates a new NTriples Writer Context with custom settings - URI of the Graph to delete - Callback - State to pass to the callback + Graph to write + TextWriter to write to + NTriples Syntax mode + Pretty Print Mode + High Speed Mode - + - Deletes a Graph from the Store + Gets the NTriples syntax mode - URI of the Graph to delete - Callback - State to pass to the callback - + - Lists the Graphs in the Store asynchronously + Writer Context for RDF/XML Writers - Callback - State to pass to the callback - + - Interface which describes the capabilities of some storage provider + Pretty Printing Mode setting - + - Gets whether the connection with the underlying Store is ready for use + Graph being written - + - Gets whether the connection with the underlying Store is read-only + TextWriter being written to - - Any Manager which indicates it is read-only should also return false for the UpdatedSupported property and should throw a RdfStorageException if the SaveGraph() or UpdateGraph() methods are called - - + - Gets the Save Behaviour the Store uses + XmlWriter being written to - + - Gets whether the triple level updates are supported + Nested Namespace Mapper - - Some Stores do not support updates at the Triple level and may as designated in the interface defintion throw a NotSupportedException if the UpdateGraph() method is called. This property allows for calling code to check in advance whether Updates are supported - - + - Gets whether the deletion of graphs is supported + Creates a new RDF/XML Writer Context - - Some Stores do not support the deletion of Graphs and may as designated in the interface definition throw a NotSupportedException if the DeleteGraph() method is called. This property allows for calling code to check in advance whether Deletion of Graphs is supported. - + Graph + Output destination - + - Gets whether the Store supports Listing Graphs + Generates the required settings for the XmlWriter + - + - Interface for storage providers which provide the read/write functionality to some arbitrary storage layer + Gets the Graph being written - - Designed to allow for arbitrary Triple Stores to be plugged into the library as required by the end user - - + - Gets the Parent Server on which this store is hosted (if any) + Gets the TextWriter being written to - - - For storage backends which support multiple stores this is useful because it provides a way to access all the stores on that backend. For stores which are standalone they should simply return null - - - + - Loads a Graph from the Store + Gets the XML Writer in use - Graph to load into - Uri of the Graph to load - - - If the Graph being loaded into is Empty then it's Base Uri should become the Uri of the Graph being loaded, otherwise it should be merged into the existing non-empty Graph whose Base Uri should be unaffected. - - - Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. - - - + - Loads a Graph from the Store + Gets/Sets the Pretty Printing Mode used - Graph to load into - URI of the Graph to load - - - If the Graph being loaded into is Empty then it's Base Uri should become the Uri of the Graph being loaded, otherwise it should be merged into the existing non-empty Graph whose Base Uri should be unaffected. - - - Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. - - - + - Loads a Graph from the Store using the RDF Handler + Gets/Sets the Node Formatter - RDF Handler - URI of the Graph to load - - Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. - + Node Formatters are not used for RDF/XML output - + - Loads a Graph from the Store using the RDF Handler + Gets/Sets the URI Formatter - RDF Handler - URI of the Graph to load - - Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. - + URI Formatters are not used for RDF/XML output - + - Saves a Graph to the Store + Gets the Namespace Map in use - Graph to Save - - Uri of the Graph should be taken from the BaseUri property -

- Behaviour of this method with regards to whether it overwrites/updates/merges with existing Graphs of the same Uri is up to the implementor and may be dependent on the underlying store. Implementors should state in the XML comments for their implementations what behaviour is implemented. -
- + - Updates a Graph in the Store + Gets the Blank Node map in use - Uri of the Graph to update - Triples to add to the Graph - Triples to remove from the Graph - - - Note: Not all Stores are capable of supporting update at the individual Triple level and as such it is acceptable for such a Store to throw a NotSupportedException if the Store cannot provide this functionality - - - Behaviour of this method with regards to non-existent Graph is up to the implementor, it may create a new empty Graph and apply the updates to that or it may throw an error. Implementors should state in the XML comments for their implementation what behaviour is implemented. - - - Implementers MUST allow for either the additions or removals argument to be null - - - May be thrown if the underlying Store is not capable of doing Updates at the Triple level - May be thrown if the underlying Store is not capable of doing Updates at the Triple level or if some error occurs while attempting the Update - + - Updates a Graph in the Store + Gets/Sets whether High Speed Mode is permitted - Uri of the Graph to update - Triples to add to the Graph - Triples to remove from the Graph - - Note: Not all Stores are capable of supporting update at the individual Triple level and as such it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality - - - Behaviour of this method with regards to non-existent Graph is up to the implementor, it may create a new empty Graph and apply the updates to that or it may throw an error. Implementors should state in the XML comments for their implementation what behaviour is implemented. - - - Implementers MUST allow for either the additions or removals argument to be null - + Not currently supported - May be thrown if the underlying Store is not capable of doing Updates at the Triple level - May be thrown if the underlying Store is not capable of doing Updates at the Triple level or if some error occurs while attempting the Update - + - Deletes a Graph from the Store + Gets/Sets the Compression Level used - URI of the Graph to be deleted - May be thrown if the underlying Store is not capable of doing Deleting a Graph - May be thrown if the underlying Store is not capable of Deleting a Graph or an error occurs while performing the delete - - Note: Not all Stores are capable of Deleting a Graph so it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality - + Not currently supported - + - Deletes a Graph from the Store + Gets/Sets the next ID to use for issuing Temporary Namespaces - URI of the Graph to be deleted - May be thrown if the underlying Store is not capable of doing Deleting a Graph - May be thrown if the underlying Store is not capable of Deleting a Graph or an error occurs while performing the delete - - - Note: Not all Stores are capable of Deleting a Graph so it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality - - - + - Gets a List of Graph URIs for the graphs in the store + Gets/Sets whether a DTD is used + + + + + Gets/Sets whether attributes are used to encode the predicates and objects of triples with simple literal properties + + + + + Represents the mapping from Blank Nodes to Collections + + + + + Stores the Triples that should be excluded from standard output as they are part of collections + + + + + Writer Context for Store Writers which do multi-threaded writing - - - Implementations should implement this method only if they need to provide a custom way of listing Graphs. If the Store for which you are providing a manager can efficiently return the Graphs using a SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } query then there should be no need to implement this function. - + Provides a queue for queuing the URIs of Graphs from the Store that need writing and a thread safe way of retrieving the next Uri to be written from the Queue - + - Interface for storage providers which allow SPARQL Queries to be made against them + Creates a new Threaded Store Writer Context with default settings + Store to be written + TextWriter to write to - + - Makes a SPARQL Query against the underlying store + Creates a new Threaded Store Writer Context with custom settings - SPARQL Query - SparqlResultSet or a Graph depending on the Sparql Query - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand + Store to be written + TextWriter to write to + Pretty Print Mode + High Speed Mode - + - Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice + Gets the NamespaceMap used for reducing URIs to QNames since there may only be one shared map written to the output - RDF Handler - SPARQL Results Handler - SPARQL Query - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand - + - Interface for storage providers which allow SPARQL Queries to be made against them with reasoning set by query + Gets the QName Mapper + + Must be manually initialised by the user + - + - Makes a SPARQL Query against the underlying store + Adds a Uri to the list of URIs for Graphs that are waiting to be written - SPARQL Query - rReasoning On demand by query - SparqlResultSet or a Graph depending on the Sparql Query - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand + - + - Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice + Gets the next Uri for a Graph that is waiting to be written - RDF Handler - SPARQL Results Handler - SPARQL Query - rReasoning On demand by query - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand + Uri of next Graph to be written - + - Interface for storage providers which allow SPARQL Updates to be made against them + Writer Context for TriG - + - Processes a SPARQL Update command against the underlying Store + Creates a new TriG Writer context - SPARQL Update + Triple Store to save + TextWriter to output to + Whether to use pretty printing + Whether high speed mode is permitted + Compression Level to use + Whether to enable N3 compatability mode - + - Interface for storage providers which provide asynchronous read/write functionality to some arbitrary storage layer + Gets/Sets the Compression Level - - Designed to allow for arbitrary Triple Stores to be plugged into the library as required by the end user - - + - Gets the Parent Server on which this store is hosted (if any) + Gets/Sets N3 Compatability Mode - - - For storage backends which support multiple stores this is useful because it provides a way to access all the stores on that backend. For stores which are standalone they should simply return null - - - + - Loads a Graph from the Store asynchronously + Writer Context for Turtle Writers - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Creates a new Turtle Writer Context with default settings - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Turtle Syntax - + - Loads a Graph from the Store asynchronously + Creates a new Turtle Writer Context with default settings - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback + Graph to write + TextWriter to write to - + - Loads a Graph from the Store asynchronously + Creates a new Turtle Writer Context with custom settings - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Pretty Print Mode + High Speed Mode + Turtle Syntax - + - Saves a Graph to the Store asynchronously + Creates a new Turtle Writer Context with custom settings - Graph to save - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Pretty Print Mode + High Speed Mode - + - Updates a Graph in the Store asychronously + Creates a new Turtle Writer Context with custom settings - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Compression Level to use + Pretty Print Mode + High Speed Mode + Turtle Syntax - + - Updates a Graph in the Store asychronously + Writer Context for Compressing Turtle Writers - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback - + - Deletes a Graph from the Store + Creates a new Turtle Writer Context with default settings - URI of the Graph to delete - Callback - State to pass to the callback + Graph to write + TextWriter to write to - + - Deletes a Graph from the Store + Creates a new Turtle Writer Context with default settings - URI of the Graph to delete - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Turtle Syntax - + - Lists the Graphs in the Store asynchronously + Creates a new Turtle Writer Context with custom settings - Callback - State to pass to the callback + Graph to write + TextWriter to write to + Pretty Print Mode + High Speed Mode - + - Interface for storage providers which allow SPARQL Queries to be made against them asynchronously + Creates a new Turtle Writer Context with custom settings + Graph to write + TextWriter to write to + Pretty Print Mode + High Speed Mode + Turtle Syntax - + - Queries the store asynchronously + Creates a new Turtle Writer Context with custom settings - SPARQL Query - Callback - State to pass to the callback - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand + Graph to write + TextWriter to write to + Compression Level to use + Pretty Print Mode + High Speed Mode - + - Queries the store asynchronously + Creates a new Turtle Writer Context with custom settings - SPARQL Query - RDF Handler - Results Handler - Callback - State to pass to the callback - Thrown if an error occurs performing the query - Thrown if an error occurs performing the query - Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results - Thrown if the store returns results in a format dotNetRDF does not understand + Graph to write + TextWriter to write to + Compression Level to use + Pretty Print Mode + High Speed Mode + Turtle Syntax - + - Interface for storage providers which allow SPARQL Updates to be made against them asynchronously + Represents the mapping from Blank Nodes to Collections - + - Updates the store asynchronously + Stores the Triples that should be excluded from standard output as they are part of collections - SPARQL Update - Callback - State to pass to the callback - + - Interface for storage providers which have controllable transactions - - - It is up to the implementation whether transactions are per-thread or global and how transactions interact with operations performed on the storage provider. Please see individual implementations for notes on how transactions are implemented. + Namespace for Formatter Classes which can be used to format Triples, Nodes and URIs among other types. - + - + - Begins a transaction + Abstract Base Class for Formatters - + - Commits a transaction + Creates a new Formatter + Format Name - + - Rolls back a transaction + Gets the Format Name - + - Interface for storage providers which have controllable transactions which can be managed asynchronously + Formats a Node as a String + Node + Triple Segment + - + - Begins a transaction asynchronously + Formats a Node as a String - Callback - State to pass to the callback + Node + - + - Commits a transaction asynchronously + Formats a Triple as a String - Callback - State to pass to the callback + Triple + - + - Rolls back a transaction asynchronously + Formats a URI Node as a String for the given Format - Callback - State to pass to the callback + URI Node + Triple Segment + - + - Represents an AllegroGraph server, may be used to access and manage stores within a catalog on the server + Formats a URI as a String for full Output + URI + - + - Creates a new Connection to an AllegroGraph store + Formats a URI as a String for full Output - Base URI for the Store - Catalog ID + URI + - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Formats a Literal Node as a String for the given Format - Base Uri for the Store + Literal Node + Triple Segment + - + - Creates a new Connection to an AllegroGraph store + Formats a Blank Node as a String for the given Format - Base Uri for the Store - Catalog ID - Username for connecting to the Store - Password for connecting to the Store + Blank Node + Triple Segment + - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Formats a Variable Node as a String for the given Format - Base Uri for the Store - Username for connecting to the Store - Password for connecting to the Store + Variable Name + Triple Segment + - + - Creates a new Connection to an AllegroGraph store + Formats a Graph Literal Node as a String for the given Format - Base Uri for the Store - Catalog ID - Proxy Server + Graph Literal + Triple Segment + - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Formats a Character for the given Format - Base Uri for the Store - Proxy Server + Character + - + - Creates a new Connection to an AllegroGraph store + Formats a sequence of characters as a String - Base Uri for the Store - Catalog ID - Username for connecting to the Store - Password for connecting to the Store - Proxy Server + Characters + String - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Formats a SPARQL Result for the given format - Base Uri for the Store - Username for connecting to the Store - Password for connecting to the Store - Proxy Server + SPARQL Result + + + + + Formats a SPARQL Boolean Result for the given format + + Boolean Result + - + - Gets a default template for creating a new Store + Applies escapes to the given value - Store ID - + Value + Escapes + Escaped string - + - Gets all available templates for creating a new Store + Gets the Name of the Format this Format uses - Store ID - + - Creates a new Store (if it doesn't already exist) + Formatter for generating CSV - Template for creating the new Store - + - Requests that AllegroGraph deletes a Store + Creates a new CSV Formatter - Store ID - + - Get the lists of stores available on the Server + Formats URIs for CSV output + URI + Triple Segment - + - Gets a Store within the current catalog + Formats Literals for CSV output - Store ID + Literal + Triple Segment - - AllegroGraph groups stores by catalogue, you may only use this method to obtain stores within your current catalogue - - + - Gets the List of Stores available on the server within the current catalog asynchronously + Abstract Base Class for formatters where things are formatted as lines of plain text deliminated by specific characters - Callback - State to pass to callback - + - Gets a default template for creating a new Store + Creates a new Deliminated Line Formatter - Store ID - Callback - State to pass to callback - + Format Name + Item Deliminator Character + Escape Character + Character to start URIs (may be null) + Character to end URIs (may be null) + Character to wrap Literals in (may be null) + Character to wrap Long Literals in (may be null) + Character to add at end of line (may be null) + Whether Literals are output with Language/Datatype information - + - Gets all available templates for creating a new Store + Formats a Triple - Store ID - Callback - State to pass to callback + Triple - + - Creates a new Store on the server within the current catalog asynchronously + Formats a URI Node - Template to create the store from - Callback - State to pass to callback + URI Node + Triple Segment + - + - Deletes a Store from the server within the current catalog asynchronously + Formats a Literal Node - Store ID - Callback - State to pass to callback + Literal Node + Triple Segment + - + - Gets a Store within the current catalog asynchronously + Formats URIs - Store ID - Callback - State to pass to call back + - - AllegroGraph groups stores by catalog, you may only use this method to obtain stores within your current catalogue - - + - Helper method for creating HTTP Requests to the Store + Formatter for formatting as HTML - Path to the Service requested - Acceptable Content Types - HTTP Method - Querystring Parameters - - + - Serializes the connection's configuration + Formats URIs using HTML encoding - Configuration Serialization Context + URI + - + - Interface for storage servers which are systems capable of managing multiple stores which are exposed as instances + Formats URIs using HTML encoding - - This interface may be implemented either separately or alongside . It is quite acceptable for an implementation of that provides a connection to a store sitting on a server that manages multiple stores to also provide an implementation of this interface in order to allow access to other stores on the server. - + URI + - + - Returns information on the IO behaviour of a Server + Interface for Character Formatters - + - Gets the list of available stores + Formats a Character as a String + Character - + - Gets a default template for creating a store with the given ID + Formats a sequence of characters as a String - ID - + Characters + String - + - Gets all possible templates for creating a store with the given ID + Interface for formatters designed to format entire RDF Graphs - ID - - + - Creates a new Store with the given ID + Generates the header section for the Graph - Template for the new store - Whether creation succeeded + Graph + - + - Deletes the Store with the given ID + Generates the header section for the Graph - Store ID - - Whether attempting to delete the Store that you are accessing is permissible is up to the implementation - + Namespaces + - + - Gets the Store with the given ID + Generates a generic header section - Store ID - + - Interface for storage providers which are capable of managing multiple stores asynchronously + Generates the footer section + - + - Gets information on the IO Behaviour of the Server + Interface for Formatters which can format Namespace Information - + - Lists the available stores asynchronously + Formats Namespace Information as a String - Callback - State to pass to the callback + Namespae Prefix + Namespace URI + - + - Gets a default template for creating a store with the given ID + Interface for Formatters which can format Base URI Information - ID - Callback - State to pass to the callback - - + - Gets all available templates for creating a store with the given ID + Formats Base URI Information as a String - ID - Callback - State to pass to the callback + Base URI + - + - Creates a store asynchronously + Interface for classes which can format Nodes into Strings - Template for the store to be created - Callback - State to pass to the callback - - Behaviour with regards to whether creating a store overwrites an existing store with the same ID is at the discretion of the implementation and SHOULD be documented in an implementations comments - - + - Deletes a store asynchronously + Formats a Node as a String - ID of the store to delete - Callback - State to pass to the callback + Node + - + - Gets a store asynchronously + Formats a Node as a String for a specific segment of a Triple - Store ID - Callback - State to pass to the callback + Node + Segment + - + - Abstract base class for templates for creating new stores in Sesame + Interface for classes which can format SPARQL Queries into Strings - - - Sesame templates generate a configuration graph like the one mentioned in the Sesame documentation, this graph is POSTed to the SYSTEM repository causing a new store to be created. - - - + - Constants for Sesame repository configuration namespaces + Formats a SPARQL Query into a String + SPARQL Query + - + - Constants for Sesame repository configuration namespaces + Formats a Graph Pattern into a String + Graph Pattern + - + - Constants for Sesame repository configuration namespaces + Formats a Triple Pattern into a String + Triple Pattern + - + - Constants for Sesame repository configuration namespaces + Formats a Triple Pattern item into a String + Pattern Item + Segment of the Triple Pattern in which the Item appears + - + - Constants for Sesame repository configuration namespaces + Interface for Formatters which Format SPARQL Results - + - Constants for Sesame repository configuration namespaces + Formats a SPARQL Result as a String + SPARQL Result + - + - Creates a new Sesame template + Formats a Boolean Result as a String - ID - Template name - Template description + Boolean Result + - + - Gets/Sets the descriptive label for a Sesame store + Interface for formatters designed to format entire SPARQL Result Sets - + - Gets a Graph representing the RDF that must be inserted into Sesame's SYSTEM repository in order to create the desired store + Generates a header section using the given variables + Variables - + - Gets the basic template graph which is a graph with all the required namespaces registered and the ID and label filled in + Generates a header section assuming no variables - + - Gets the Node used to refer to the store configuration context + Generates a footer section + - + - Templates for creating remote Sesame stores + Interface for Triple Formatters - - - This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: - - - @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. - @prefix rep: <http://www.openrdf.org/config/repository#>. - @prefix hr: <http://www.openrdf.org/config/repository/http#>. - - [] a rep:Repository ; - rep:repositoryImpl [ - rep:repositoryType "openrdf:HTTPRepository" ; - hr:repositoryURL <{%Sesame server location|http://localhost:8080/openrdf-sesame%}/repositories/{%Remote repository ID|SYSTEM%}> - ]; - rep:repositoryID "{this.ID}" ; - rdfs:label "{this.Label}" . - - - The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. - - - + - Creates a new Template + Formats a Triple as a String - Store ID + Triple + - + - Gets/Sets the remote Sesame server to connect to + Interface for URI Formatters - + - Gets/Sets the ID of the remote repository to connect to + Formats a URI as a String + URI + - + - Gets the template graph + Formats a URI as a String + URI - + - Template for creating Sesame memory stores + Formatter for formatting as Notation 3 without any compression - - - This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: - - - @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. - @prefix rep: <http://www.openrdf.org/config/repository#>. - @prefix sr: <http://www.openrdf.org/config/repository/sail#>. - @prefix sail: <http://www.openrdf.org/config/sail#>. - @prefix ms: <http://www.openrdf.org/config/sail/memory#>. - - [] a rep:Repository ; - rep:repositoryID "{this.ID}" ; - rdfs:label "{this.Label}" ; - rep:repositoryImpl [ - rep:repositoryType "openrdf:SailRepository" ; - sr:sailImpl [ - sail:sailType "openrdf:MemoryStore" ; - ms:persist {this.Persist} ; - ms:syncDelay {this.SyncDelay} - ] - ]. - - - The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. - - - + - Creates a new memory store template + Creates a new Uncompressed Notation 3 Formatter - Store ID - + - Gets the template graph used to create the store + Formats a Variable Node for Notation 3 + Variable + Triple Segment - + - Gets/Sets whether to persist the store + Formats a Graph Literal Node for Notation 3 + Graph Literal + Triple Segment + - + - Gets/Sets the sync delay + Formatter for formatting as Notation 3 - + - Gets/Sets whether to enable direct type hierarchy inferencing + Creates a new Notation 3 Formatter - + - Gets/Sets whether to enable RDF Schema Inferencing + Creates a new Notation 3 Formatter using the given Graph + Graph - + - Sesame Native index modes + Creates a new Notation 3 Formatter using the given Namespace Map + Namespace Map - + - SPOC indexes + Formats a Variable Node for Notation 3 + Variable + Triple Segment + - + - POSC indexes + Formats a Graph Literal Node for Notation 3 + Graph Literal + Triple Segment + - + - Template for creating Sesame Native stores + Formatter which formats Triples as NQuads adding an additional URI at the end of the Triple if there is a Graph URI associated with the Triple - - - This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: - - - @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. - @prefix rep: <http://www.openrdf.org/config/repository#>. - @prefix sr: <http://www.openrdf.org/config/repository/sail#>. - @prefix sail: <http://www.openrdf.org/config/sail#>. - @prefix ns: <http://www.openrdf.org/config/sail/native#>. - - [] a rep:Repository ; - rep:repositoryID "{this.ID}" ; - rdfs:label "{this.Label}" ; - rep:repositoryImpl [ - rep:repositoryType "openrdf:SailRepository" ; - sr:sailImpl [ - sail:sailType "openrdf:NativeStore" ; - ns:tripleIndexes "{this.IndexMode}" - ] - ]. - - - The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. - - - + - Creates a Sesame Native store template + Creates a new NQuads Formatter - Store ID - + - Gets the template graph used to specify the configuration of a Sesame repository + Creates a new NQuads formatter - Template Graph + NQuads syntax to output - + - Gets/Sets the Indexing Mode + Creates a new NQuads formatter + NQuads syntax to output + Format Name - + - Gets/Sets whether to enable direct type hierarchy inferencing + Formats a Triple as a String + Triple + - + - Gets/Sets whether to enable RDF Schema Inferencing + Formatter which formats Triples as NQuads according to the RDF 1.1 NQuads specification - + - - Namespace containing implementations of which provide templates for creating new stores on Sesame servers - + Creates a new formatter - - - Interface for templates for the provisioning of new stores - - - - This interface is intentionally very limited, the generic type constraints on the interface allow for specific implementations of that interface to futher constrain their implementation to accept only relevant implementations of this interface when provisioning new stores. - - - Specific implementations will likely add various properties that allow end users to configure implementation specific parameters. It is suggested that implementors include System.ComponentModel attributes on their implementations. - - + + + Formatter for formatting as NTriples + - + - Gets/Sets the ID for the Store + Set of characters which must be escaped in Literals - + - Gets the name of the type of store the template will create + Creates a new NTriples formatter + NTriples syntax to output + Format Name - + - Gets the description of the type of store the template will create + Creates a new NTriples Formatter - + - Validates the template returning an enumeration of error messages + Creates a new NTriples Formatter - - + - Abstract base implementation of a Store Template for creating Stardog Stores + Creates a new NTriples Formatter + Format Name - + - Creates a new Stardog Template + Gets the NTriples syntax being used - Store ID - Template Name - Template Description - Stardog Database Type - + - Gets the Database Type + Formats a URI Node + URI Node + Triple Segment + - + - Gets/Sets the minimum differential index limit + Formats a Literal Node + Literal Node + Triple Segment + - + - Gets/Sets the maximum differential merge limit + Formats a Character + Character + - + - Gets/Sets whether the database should canonicalise literals + Formats a sequence of characters as a String + Characters + String - + - Gets/Sets whether to optimize indexes for named graph queries + Formats a Blank Node + Blank Node + Triple Segment + - + + + + + + + - Gets/Sets whether to persist indexes + Formatter for formatting as NTriples according to the RDF 1.1 specification - + - Gets/Sets whether to persist indexes synchronously + Creaates a new formatter - + - Gets/Sets whether to automatically update statistics + Abstract Base Class for Formatters that can compress URIs to QNames - + - Gets/Sets the active graphs for ICV + QName Map used for compressing URIs to QNames - + - Enables/Disables ICV + Creates a new QName Formatter + Format Name + QName Map - + - Gets/Sets the reasoning mode for ICV + Creates a new QName Formatter + Format Name + QName Map + Whether the 'a' keyword can be used for the RDF type predicate - + - Gets/Sets whether to perform automatic consistency checking on transactions + Determines whether a QName is valid + Value + - + - Enables/Disables punning + Formats a URI Node using QName compression if possible + URI + Triple Segment + - + - Gets/Sets the graphs that contain the schema (TBox) that are used for reasoning + Formats a Literal Node using QName compression for the datatype if possible + Literal Node + Triple Segment + - + - Enables/Disables Full Text search + Formats a Namespace as a String + Namespace Prefix + Namespace URI + - + - Gets/Sets the Search re-indexing mode + A formatter which formats triples for RDF/XML output - + - Gets/Sets whether to use durable transactions + Formats a Graph Header by creating an <rdf:RDF> element and adding namespace definitions + Graph + - + - Validates that the template is filled out such that a store can be created from it + Formats a Graph Header by creating an <rdf:RDF> element and adding namespace definitions - Enumeration of errors that occurred + Namespaces + - + - Does any additional validation a derived template may require + Formats a Graph Header by creating an <rdf:RDF> element - Error collection to add to - + - Gets the JSON Template for creating a store + Formats a Graph Footer by closing the <rdf:RDF> element - + - Template for creating Stardog Disk stores + Formats a Triple as a <rdf:Description> element + Triple + - + - Creates a new template + Gets the String description of this formatter - Store ID + - + - Template for creating Stardog in-memory stores + Formatter for formatting Nodes for use in SPARQL and for formatting SPARQL Queries - + - Creates a new template + Creates a new SPARQL Formatter - Store ID - + - - Namespace containing implementations of which provide templates for creating new stores on Stardog servers - + Creates a new SPARQL Formatter using the given Graph + Graph - + - A basic store template where the only parameter is the Store ID + Creates a new SPARQL Formatter using the given Namespace Map + Namespace Map - + - Creates a new template + Determines whether a QName is valid - Store ID + Value + - + - Creates a new template + Formats a Variable Node in SPARQL Syntax - Store ID - Template Name - Template Description + Variable Node + Triple Segment + - + - Gets/Sets the Store ID + Formats a Namespace Declaration + Namespace Prefix + Namespace URI + - + - Gets the name of the type of store the template will create + Formats a Base URI Declaration + Base URI + - + - Gets the description of the type of store the template will create + Formats a Query in nicely formatted SPARQL syntax + SPARQL Query + - + - Validates the template + Formats a Graph Pattern in nicely formatted SPARQL syntax + Graph Pattern - - This default implementation does no validation, derived classes must override this to add their required validation - - + - Gets the string representation of the template which is the Template Name + Formats a Triple Pattern in nicely formatted SPARQL syntax + Triple Pattern - + - - Namespace for storage classes which provide support for creating new stores in conjunction with a - - - In order for an to create a new store it requires an instance of the interface from this namespace. The basic interface provides only a Store ID, specific implementations may provide many more customizable properties to allow new stores to be created that take advantage of the capabilties of the server the store is being created on. A provides methods to generate the basic templates that it accepts and should be used in preference to creating any of the implementations directly. - + Formats a Pattern Item in nicely formatted SPARQL syntax + Pattern Item + Triple Pattern Segment + - + - Represents a connection to a Sesame Server + Formats the Variable List for a SPARQL Query + Variables + - + - System Repository ID + Formats the Variable/QName/URI for a SPARQL DESCRIBE Query + SPARQL Query + - + - Base Uri for the Server + Formats a SPARQL Expression + SPARQL Expression + - + - Username for accessing the Server + Formats a SPARQL Aggregate + SPARQL Aggregate + - + - Password for accessing the Server + Formats a SPARQL Property Path + SPARQL Property Path + - + - Whether the User has provided credentials for accessing the Server using authentication + Formats a SPARQL GROUP BY Clause + GROUP BY Clause + - + - Repositories Prefix + Formats a SPARQL ORDER BY Clause + ORDER BY Clause + - + - Available Sesame template types + Formats the Inline Data portion of a Query + Inline Data + - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Formats a SPARQL Result using this Formatter to format the Node values for each Variable - Base Uri of the Store + SPARQL Result + - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Formats a Boolean Result - Base Uri of the Store - Username to use for requests that require authentication - Password to use for requests that require authentication + Boolean Result + - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + A Result Format that formats using the official SPARQL XML Results format - Base Uri of the Store - Proxy Server - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Formats the Header for a SPARQL Result Set - Base Uri of the Store - Username to use for requests that require authentication - Password to use for requests that require authentication - Proxy Server + Variables + - + - Gets the IO Behaviour of the server + Formats the Header for a SPARQL Result Set + - + - Gets a default template for creating a store + Formats the Footer for a SPARQL Result Set - Store ID - + - Gets all available templates for creating a store + Formats a SPARQL Result - Store ID + SPARQL Result - + - Creates a new Store based on the given template + Formats a Boolean Result - Template + Boolean Result - - - Templates must inherit from - - - + - Gets the Store with the given ID + Gets the string representation of the formatter - Store ID - - If the Store ID requested represents the current instance then it is acceptable for an implementation to return itself. Consumers of this method should be aware of this and if necessary use other means to create a connection to a store if they want a unique instance of the provider. - - + - Deletes the Store with the given ID + Formatter for formatting as TSV - Store ID - - Whether attempting to delete the Store that you are accessing is permissible is up to the implementation - - + - Gets the list of available stores + Creates a new TSV Formatter - - + - Gets a default template for creating a store + Formatter which formats Turtle without any compression - Store ID - Callback - State to pass to the callback - - + - Gets all available templates for creating a store + Creates a new Uncompressed Turtle Formatter - Store ID - Callback - State to pass to the callback - - + - Creates a new store based on the given template + Creates a new Uncompressed Formatter - Template - Callback - State to pass to the callback - - - Template must inherit from - - + Format Name - + - Gets a store asynchronously + Formats characters - Store ID - Callback - State to pass to the callback - - If the store ID requested matches the current instance an instance MAY invoke the callback immediately returning a reference to itself - + Character + - + - Deletes a store asynchronously + Formats a sequence of characters as a String - ID of the store to delete - Callback - State to pass to the callback + Characters + String - + - Lists the available stores asynchronously + Formatter which formats Turtle with QName compression - Callback - State to pass to the callback - + - Helper method for creating HTTP Requests to the Store + Set of characters that must be escaped for Long Literals - Path to the Service requested - Acceptable Content Types - HTTP Method - Querystring Parameters - - + - Ensures the connection to the Sesame SYSTEM repository is prepared if it isn't already + Set of characters that must be escaped for Literals - + - Disposes of the server + Creates a new Turtle Formatter - + - Serializes the connection's configuration + Creates a new Turtle Formatter that uses the given QName mapper - Configuration Serialization Context + QName Mapper - + - Abstract implementation of a management connection to a Stardog server using the HTTP protocol + Creates a new Turtle Formatter for the given Graph + Graph - + - Available Stardog template types + Creates a new Turtle Formatter for the given Namespace Map + Namespace Map - + - Creates a new connection to a Stardog Server + Creates a new Turtle Formatter - Base Uri of the Server + Format Name - + - Creates a new connection to a Stardog Server + Creates a new Turtle Formatter - Base Uri of the Server - Username - Password + Format Name + Graph - + - Creates a new connection to a Stardog Server + Creates a new Turtle Formatter - Base Uri of the Server - Proxy Server + Format Name + Namespace Map - + - Creates a new connection to a Stardog Server + Creates a new Turtle Formatter - Base Uri of the Server - Username - Password - Proxy Server + Format Name + QName Map - + - Gets the IO Behaviour of the server + Formats a Literal Node as a String + Literal Node + Triple Segment + - + - Lists the database available on the server + Formats a Blank Node as a String + + Blank Node + Triple Segment + + + + + Formats a Namespace Decalaration as a @prefix declaration + Namespace Prefix + Namespace URI - + - Gets a default template for creating a new Store + Formats a Base URI declaration as a @base declaration - Store ID + Base URI - + - Gets all available templates for creating a new Store + Formatter which formats Turtle with QName compression using the newer W3C syntax which permits a wider range of valid QNames - Store ID - - + - Creates a new Store based off the given template + Creates a new Turtle Formatter - Template - - - - Templates must inherit from - - - Uses some code based off on answers here to help do the multipart form data request. - - - + - Deletes a Store with the given ID + Creates a new Turtle Formatter that uses the given QName mapper - Store ID + QName Mapper - + - Gets a provider for the Store with the given ID + Creates a new Turtle Formatter for the given Graph - Store ID - + Graph - + - Lists all databases available on the server + Creates a new Turtle Formatter for the given Namespace Map - Callback - State to pass to the callback + Namespace Map - + - Gets a default template for creating a new Store + Creates a new Turtle Formatter - Store ID - Callback - State to pass to the callback - + Format Name - + - Gets all available templates for creating a new Store + Creates a new Turtle Formatter - Store ID - Callback - State to pass to the callback - + Format Name + Graph - + - Creates a new store based on the given template + Creates a new Turtle Formatter - Template - Callback - State to pass to the callback - - - Template must inherit from - - + Format Name + Namespace Map - + - Deletes a database from the server + Creates a new Turtle Formatter - Store ID - Callback - State to pass to the callback + Format Name + QName Map - + - Gets a database from the server + Gets whether a QName is valid in Turtle as specified by the W3C - Store ID - Callback - State to pass to the callback + QName + - + - Disposes of the server + + Namespace for classes related to .Net serialization integration in the library + - + - Serializes the connection's configuration + Helper Class for use in serialization and deserialization - Configuration Serialization Context - + - Static Class containing constants relevant to provisioning new Stardog stores + Abstract Base class for Dataset writers that produce GZipped Output + + + While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such + + - + - Constants for valid Stardog Options + Creates a new GZiped Writer + Underlying writer - + - Constants for valid Stardog Options + Saves a RDF Dataset as GZipped output + Store to save + File to save to - + - Constants for valid Stardog Options + Saves a RDF Dataset as GZipped output + Store to save + Writer to save to - + - Constants for valid Stardog Options + Saves a RDF Dataset as GZipped output + Store to save + Writer to save to. Must be an instance of + Boolean flag indicating if the output stream should remain open after the output is written - + - Constants for valid Stardog Options + Helper method for raising warning events + Warning Message - + - Constants for valid Stardog Options + Event raised when non-fatal output errors - + - Constants for valid Stardog Options + Gets the description of the writer + - + - Constants for valid Stardog Options + Writer for creating GZipped NQuads output - + - Constants for valid Stardog Options + Creates a new GZipped NQuads output - + - Constants for valid Stardog Options + Writer for creating GZipped TriG outptut - + - Constants for valid Stardog Options + Creates a new GZipped TriG output - + - Constants for valid Stardog Options + Writer for creating GZipped TriX output - + - Constants for valid Stardog Options + Creates a new GZipped TriX output - + - Constants for valid Stardog Options + Abstract Base class for Results writers which generate GZipped output + + + While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such + + - + - Constants for valid Stardog Options + Creates a new GZipped Results writer + Underlying writer - + - Constants for valid Stardog Options + Saves a Result Set as GZipped output + Result Set to save + File to save to - + - Constants for valid Stardog Options + Saves a Result Set as GZipped output + Result Set to save + Writer to save to - + - Constants for valid Stardog Options + Helper method for raising warning events + Warning message - + - Constants for valid Stardog Database types + Event which is raised if non-fatal errors occur writing results - + - Constants for valid Stardog Database types + Gets the description of the writer + - + - Constanst for valid Search Re-Index Modes + Writer for GZipped SPARQL XML - + - Constanst for valid Search Re-Index Modes + Creates a new GZipped SPARQL XML writer - + - Constants for special named graph URIs + Writer for GZipped SPARQL JSON - + - Constants for special named graph URIs + Creates a new GZipped SPARQL JSON writer - + - Constants for various Stardog reasoning settings + Writer for GZipped SPARQL CSV - + - Constant for various Stardog integer settings + Creates a new GZipped SPARQL CSV writer - + - Constant for various Stardog integer settings + Writer for GZipped SPARQL TSV - + - Constants for various Stardog boolean flags + Creates a new GZipped SPARQL TSV writer - + - Constants for various Stardog boolean flags + Abstract base class for RDF writers that generate GZipped output + + + While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such + + - + - Constants for various Stardog boolean flags + Creates a new GZipped writer + Underlying writer + raised if is null - + - Constants for various Stardog boolean flags + Saves a Graph as GZipped output + Graph to save + File to save to - + - Constants for various Stardog boolean flags + Saves a Graph as GZipped output + Graph to save + Writer to save to - + - Constants for various Stardog boolean flags + Helper method for raising warning events + Warning message - + - Constants for various Stardog boolean flags + Event which is raised if non-fatal errors occur writing RDF output - + - Constants for various Stardog boolean flags + Gets the description of the writer + - + - Constants for various Stardog boolean flags + Writer for GZipped NTriples - + - Constants for various Stardog boolean flags + Creates a new GZipped NTriples writer - + - Pattern for valid Stardog database names + Writer for GZipped Turtle - + - Validates whether a Database Name is valid + Creates a new GZipped Turtle writer - Database Name - - + - Validates whether a Database Type is valid + Writer for GZipped Notation 3 - Database Type - - + - Validates whether a Search Re-Index Mode is valid + Creates a new GZipped Notation 3 writer - Mode - - + - Validates whether a Named Graph URI is valid + Writer for GZipped RDF/XML - URI - - + - Management connection for Stardog 1.* servers + Creates a new GZipped RDF/XML writer - + - Creates a new connection to a Stardog Server + Writer for GZipped RDF/JSON - Base Uri of the Server - + - Creates a new connection to a Stardog Server + Creates a new GZipped RDF/JSON writer - Base Uri of the Server - Username - Password - + - Creates a new connection to a Stardog Server + Writer for GZipped RDFa - Base Uri of the Server - Proxy Server - + - Creates a new connection to a Stardog Server + Creates a new GZipped RDFa writer - Base Uri of the Server - Username - Password - Proxy Server - + - Gets a provider for the Store with the given ID + Writer for GZipped JSON-LD - Store ID - - + - Gets a database from the server + Create a new GZippedJsonLdWriter - Store ID - Callback - State to pass to the callback - + - Management connection for Stardog 2.* servers + Create a new GZippedJsonLdWriter with a specific set of + + The writer options to pass through + to the underlying - + - Creates a new connection to a Stardog Server + Abstract Base Class for HTML Writers which provides basic implementation of the IHtmlWriter interface - Base Uri of the Server - + - Creates a new connection to a Stardog Server + Gets/Sets a path to a Stylesheet which is used to format the Graph output - Base Uri of the Server - Username - Password - + - Creates a new connection to a Stardog Server + Gets/Sets the CSS class used for the anchor tags used to display the URIs of URI Nodes - Base Uri of the Server - Proxy Server - + - Creates a new connection to a Stardog Server + Gets/Sets the CSS class used for the span tags used to display Blank Node IDs - Base Uri of the Server - Username - Password - Proxy Server - + - Gets a provider for the Store with the given ID + Gets/Sets the CSS class used for the span tags used to display Literals - Store ID - - + - Gets a database from the server + Gets/Sets the CSS class used for the anchor tags used to display Literal datatypes - Store ID - Callback - State to pass to the callback - + - Management connection for Stardog 3.* servers + Gets/Sets the CSS class used for the span tags used to display Literal language specifiers - + - Creates a new connection to a Stardog Server + Gets/Sets the CSS class used for div tags used to group chunks of markup into a box - Base Uri of the Server - + - Creates a new connection to a Stardog Server + Gets/Sets the Prefix applied to href attributes - Base Uri of the Server - Username - Password - + - Creates a new connection to a Stardog Server + Base implementation of that simply handles the logic of optionally closing a text writer stream. - Base Uri of the Server - Proxy Server - + + + + + + + + + + + + + - Creates a new connection to a Stardog Server + Method to be implemented in derived classes to perform the actual writing to a TextWriter - Base Uri of the Server - Username - Password - Proxy Server + The graph to be saved + The to save the graph to. - + - Gets a provider for the Store with the given ID + Class for generating Turtle Concrete RDF Syntax which provides varying levels of Syntax Compression - Store ID - + + Similar in speed to the standard TurtleWriter but capable of using more syntax compressions depending on the Compression level set + + Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Gets a database from the server + Creates a new Compressing Turtle Writer which uses the Default Compression Level - Store ID - Callback - State to pass to the callback - + - Management connection for Stardog servers running the latest version, current this is 3.* + Creates a new Compressing Turtle Writer which uses the given Compression Level + Desired Compression Level + See Remarks for this classes CompressionLevel property to see what effect different compression levels have - + - Creates a new connection to a Stardog Server + Creates a new compressing Turtle writer using the given syntax level - Base Uri of the Server + Syntax Level - + - Creates a new connection to a Stardog Server + Creates a new Compressing Turtle Writer which uses the given Compression Level and Syntax Level - Base Uri of the Server - Username - Password + Desired Compression Level + Syntax Level + See Remarks for this classes CompressionLevel property to see what effect different compression levels have - + - Creates a new connection to a Stardog Server + Gets/Sets whether Pretty Printing is used - Base Uri of the Server - Proxy Server - + - Creates a new connection to a Stardog Server + Gets/Sets whether High Speed Write Mode should be allowed - Base Uri of the Server - Username - Password - Proxy Server - + + Gets/Sets the Compression Level to be used + + - Namespace for storage classes which provide support for managing servers that provide multiple backing Stores + If the Compression Level is set to None then High Speed mode will always be used regardless of the input Graph and the HighSpeedMorePermitted property. - Servers are managed via the interface, a server can provide lists of available stores, retrieve a reference to a store, create new stores and delete existing stores. The exact capabilites may depend on the implementation and may be inspected via the property. + If the Compression Level is set to Minimal or above then full Predicate Object lists will be used for Triples. - - - - - Possible Async Storage API Actions - + + If the Compression Level is set to More or above then Blank Node Collections and Collection syntax will be used if the Graph contains Triples that can be compressed in that way. + - + - Loaded a Graph + Gets/Sets the Default Namespaces that are always available - + - Loaded data with a RDF Handler + Gets the type of the Triple Formatter used by the writer - + - Saved a Graph + Saves a Graph to a file using Turtle Syntax + Graph to save + File to save to - + - Updates a Graph + Saves a Graph to the given Stream using Turtle Syntax + Graph to save + Stream to save to - + - Deleted a Graph + Generates the Turtle Syntax for the Graph - + - Listed Graphs + Generates Output for Triples as a single "s p o." Triple + Writer Context + Triple to output + + Used only in High Speed Write Mode - + - Made a SPARQL Query + Generates Output for Nodes in Turtle syntax + Writer Context + Node to generate output for + Segment of the Triple being written + Indentation + - + - Made a SPARQL Query with a handler + Internal Helper method which converts a Collection into Turtle Syntax + Writer Context + Collection to convert + Indentation + - + - Made a SPARQL Update + Helper method for generating Parser Warning Events + Warning Message - + - Began a Transaction + Event which is raised when there is a non-fatal issue with the Graph being written - + - Committed a Transaction + Gets the String representation of the writer which is a description of the syntax it produces + - + - Rolled back a Transaction + Class for generating CSV output from RDF Datasets - + - Gettting a new store template + Gets the type of the Triple Formatter used by the writer - + - Getting all available templates + Saves a Triple Store to CSV Format + Triple Store to save + File to save to - + - Created a Store + Saves a Triple Store to CSV Format + Triple Store to save + Writer to save to - + - Deleted a Store + Saves a Triple Store to CSV Format + Triple Store to save + Writer to save to + Boolean flag indicating if should be left open after the writer completes - + - Retrieved a reference to a Store + Delegate for the SaveGraphs method + Context for writing the Store - + - Got the list of Stores + Thread Worker method which writes Graphs to the output + Context for writing the Store - + - Unknown Action + Generates the Output for a Graph as a String in CSV syntax + Context for writing the Store + Context for writing the Graph + - + - Represents arguments passed to callbacks on success/failure of a async storage API call + Generates Output for the given Node - - - Primarily used to provide simple method signatures on the async storage API callbacks - - + Writer Context + Node + Triple Segment - + - Creates new callback arguments + Event which is raised when a non-fatal error occurs while outputting CSV - Operation - + - Creates new callback arguments + Gets the String representation of the writer which is a description of the syntax it produces - Operation - Error that occurred + - + - Creates new callback arguments + Class for generating CSV output from RDF Graphs - Operation - Graph to return - + - Creates new callback arguments + Gets the type of the Triple Formatter used by the writer - Operation - Graph to return - Error that occurred - + - Creates new callback arguments + Saves a Graph to CSV format - Operation - URI of the affected Graph + Graph + File to save to - + - Creates new callback arguments + Saves a Graph to CSV format - Operation - URI of the affected Graph - Error that occurred + Graph + Writer to save to - + - Creates new callback arguments + Generates Node Output for the given Node - Operation - Enumeration of Graph URIs + Text Writer + Node + Triple Segment - + - Creates new callback arguments + Event which is raised if the Writer detects a non-fatal error while outputting CSV - Operation - Handler to return - + - Creates new callback arguments + Gets the String representation of the writer which is a description of the syntax it produces - Operation - Handler to return - Error that occurred + - + - Creates new callback arguments + A helper class containing GraphML name and URI constants - Operation - SPARQL Query - RDF Handler to return - Results Handler to return - + - Creates new callback arguments + The namespace URI for GraphML XML elements - Operation - SPARQL Query - RDF Handler - Results Handler - Error that occurred - + - Creates new callback arguments + The URL of the GraphML XML schema - Operation - SPARQL Query - Results to return - + - Creates new callback arguments + The name of the GraphML XML root element - Operation - SPARQL Query - Results to return - Error that occurred - + - Creates new callback arguments + The name of the GraphML XML element representing a graph - Operation - Data to return - + - Creates new callback arguments + The name of the GraphML XML attribute representing the default directedness of and edge - Operation - Data to return - Error that occurred - + - Creates new callback arguments + The value representing a directed edge - Operation - Enumeration of Store IDs - + - Creates new callback arguments + The name of the GraphML XML element representing an edge - Operation - Enumeration of Store IDs - Error that occurred - + - Creates new callback arguments + The name of the GraphML attribute representing the source of an edge - Operation - Store ID - Storage Provider - Error that occurred - + - Creates new callback arguments + The name of the GraphML attribute representing the target of an edge - Operation - Store ID - Template - + - Creates new callback arguments + The name of the GraphML element representing the source of a node - Operation - Store ID - Templates - + - Sets the Data to the appropriate property based on the operation type + The name of the GraphML element representing custom attributes for nodes and edges - Data - + - Gets whether the async operation succeeded (no error occurred) + The name of the GraphML attribute representing the domain of a key - + - Gets the Graph that was saved/loaded (if applicable) + The name of the GraphML attribute representing the type of an attribute - + - Gets the error that occurred (for failed operations) + The value representing the string type - + - Gets the URI of the Graph affected by the operation + The value representing a node label attribute id - + - Gets the list of Graph URIs (if applicable) + The value representing an edge label attribute id - + - Gets the RDF Handler used (if applicable) + The name of the GraphML attribute representing the id of a node or edge - + - Gets the Results Handler used (if applicable) + The name of the GraphML element representing a key - + - Gets the Query Results (if applicable) + Class for serializing a graph in GraphML format - + - Gets the SPARQL Query (if applicable) + Event raised when there is ambiguity in the syntax being producing + This class doesn't raise this event - + - Gets the SPARQL Update (if applicable) + Saves a triple store to a file in GraphML format + The source triple store + The name of the target file - + - Gets the Store ID (if applicable) + Saves a triple store to a text writer in GraphML format + The source triple store + The target text writer - + - Gets the list of Store IDs (if applicable) + Saves a triple store to a text writer in GraphML format + The source triple store + The target text writer + Boolean flag indicating if the output writer should be left open by the writer when it completes - + - Gets the Storage Provider (if applicable) + Saves a triple store to an XML writer in GraphML format - - - For the operation this will be the reference to the newly returned store instance - - + The source triple store + The target XML writer - + - Gets the operation that was performed + A Class which creates GraphViz Graphs entirely dynamically - + - Gets the template that was created (if any) + Creates a new GraphVizGenerator + Format for the Output (svg is default) + Only use this form if you're certain that dot.exe is in your PATH otherwise the code will throw an error - + - Gets the templates that were created (if any) + Creates a new GraphVizGenerator + Format for the Output + Directory in which GraphViz is installed - + - Generic callback for async storage API operations + Gets/Sets the Format for the Output - Originator of the callback - Callback Arguments - State object originally passed to the async call - + - Static Helper for the Storage API + Generates GraphViz Output for the given Graph + Graph to generated GraphViz Output for + File you wish to save the Output to + Whether you want to open the Output in the default application (according to OS settings) for the filetype after it is Created - + - Template for posting form data as part of a HTTP multipart request + Internal Helper Method for locating the GraphViz Directory using the PATH Environment Variable - + - Gets a new unique boundary for HTTP mutlipart requests + A Writer which generates GraphViz DOT Format files from an RDF Graph - + - Handles HTTP Query Errors obtaining additional information from the HTTP response if possible + Saves a Graph into GraphViz DOT Format - HTTP Error - + Graph to save + File to save to - + - Handles HTTP Errors obtaining additional information from the HTTP response if possible + Saves a Graph into GraphViz DOT Format - HTTP Error - Action being performed - + Graph to save + Stream to save to - + - Handles HTTP Errors obtaining additional information from the HTTP response if possible + Internal Helper Method for converting a Triple into DOT notation - HTTP Error - Action being performed - Function that generates the actual errors - - Adapted from Ron Michael's Zettlemoyer's original patch for this in Stardog to use it across all operations as far as possible - + Triple to convert + Writer Context + - + - Tries to get the status line for inclusion in the HTTP error message + Internal Helper method for converting a Node into DOT notation - Web exception - Status line if available, empty string otherwise + Node to Convert + Writer Context + + Currently Graphs containing Graph Literal Nodes cannot be converted - + - Handles Query Errors + Internal Helper method for converting Uri Nodes to DOT Notation - Error + Uri Node to convert + Writer Context - + - Handles Errors + Internal Helper Method for converting Blank Nodes to DOT notation - Error - Action being performed + Blank Node to Convert - + - Handles Errors + Internal Helper Method for converting Literal Nodes to DOT notation - Error Type - Error - Action being performed - Function that generates the actual errors + Literal Node to convert - + - Base class for update operations on virtualized graphs. Implementors have to provide a method to - convert standard Nodes to their virtual form according to the IVirtualRdfProvider which is in use. + Internal Helper method which handles raising the Warning event if an Event Handler is registered to it - Node ID Type - Graph ID Type + Warning Message - + - Converts a standard INode to a virtualized node with a pre-materialized value. + Event that is raised if there is a potential problem with the RDF being output - Virtual RDF Provider, the object, e.g. a storage manger, that provides virtualization of nodes - Node that has to be converted to it's virtualized form with itself as materialized value. Usually a parsed Literal or Uri. + Not used by this Writer - + - Virtual RDF Provider + Gets the String representation of the writer which is a description of the syntax it produces + - + - Creates a new Store Graph Persistence Wrapper for Virtualized Nodes + HTML Schema Writer is a HTML Writer which writes a human readable description of a Schema/Ontology - Generic IO Manager - Virtual RDF Provider - Graph with virtualized Nodes to wrap - Graph URI (the URI the Graph will be persisted as) - Whether to operate in write-only mode - - - Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph - - - + - Asserts a Triple after virtualization in the Graph + Saves the Graph to the given File as an XHTML Table with embedded RDFa - Triple + Graph to save + File to save to - + - Retracts a Triple after virtualization from the Graph + Saves the Result Set to the given Stream as an XHTML Table with embedded RDFa - Triple + Graph to save + Stream to save to - + + + + - Gets whether the virtualized form of a given Triple exists in this Graph + Internal method which generates the HTML Output for the Graph - Triple to test - Triple is known to the Graph + Writer Context - + - Converts subject, predicate and object of a given Triple to their respective virtualized forms + Helper method for raising the Warning event - Triple to virtualize - The virtualized Triple. Itself, if it was already virtual. + Warning Message - + - Virtualizes a Node + Event which is raised if there is a non-fatal error with the RDF being output - Node to be virtualized - The Node in its virtual form. Itself, if it was already virtual. - + - Interface for Virtual Nodes + Gets the String representation of the writer which is a description of the syntax it produces - Node ID Type - Graph ID Type + - + - Gets the Node ID + Class for saving RDF Graphs to a XHTML Table format with the actual Triples embedded as RDFa + + + Since not all Triples can be embedded into XHTML those Triples will not have RDFa generated for them but all Triples will be shown in a human readable format. Triples that can be serialized are roughly equivalent to anything that can be serialized in Turtle i.e. URI/BNode subject, URI predicates and URI/BNode/Literal object. + + + If you encode Triples which have values datatyped as XML Literals with this writer then round-trip Graph equality is not guaranteed as the RDFa parser will add appropriate Namespace declarations to elements as required by the specification + + - + - Gets the Virtual Node provider + Gets/Sets the Default Namespaces to use for writers - + - Gets whether the Nodes value has been materialised + Saves the Graph to the given File as an XHTML Table with embedded RDFa + Graph to save + File to save to - + - Gets the materialised value forcing it to be materialised if necessary + Saves the Result Set to the given Stream as an XHTML Table with embedded RDFa + Graph to save + Stream to save to - + - Interface for comparing nodes on their VirtualID property + Save the Graph to the given Stream as an XHTML Table with embedded RDFa + Graph to save + Stream to save to + Whether to leave open after writing the graph - + - Attempt to compare the VirtualID of this node with the VirtualID of the other node + Internal method which generates the HTML Output for the Graph - The other node to try to compare against - The result of the comparison if it could be performed - True if a comparison could be performed, false otherwise. + Writer Context - + - Interface for nodes that know for themseves how to create a copy of themselves to a different graph + Generates Output for a given Node - - Especially virtual nodes need to copy themselves during query algebra processing, - because the standard copy tools might destroy their virtual state by duplicating it's virtualized - values. In consequence all indices in the various triple stores fail to match such value-copied nodes - + Writer Context + Node - + - Copies the Node into another Graph, currently only used by virtual nodes + Generates Output for a given Node - Target Graph - + Writer Context + Node + Triple being written - + - A Virtual RDF Provider is a provider that transforms materialised values into virtual ID values. These virtual values can be used to do much faster term equality checking and to minimise memory usage when accessing out of memory data. + Helper method for raising the Warning event - Node ID Type - Graph ID Type - - - An implementation of this is typically in addition to a more general RDF store implementation (such as an IStorageProvider) and was originally designed and intended for use in creating ISparqlDataset instances which allow out of memory data to be queried more efficiently. - - - It is expected that most implementations will use a cache to ensure that repeated transformations are as fast as possible - -

Important Note re: Blank Nodes

- - In order for code that uses this class to function correctly it must be ensured that IDs issued for Blank Nodes are graph scoped, as such a specific method for converting Blank Nodes into Virtual Node IDs is given - -
+ Warning Message
- + - Given a Node ID returns the materialised value in the given Graph + Event which is raised if there is a non-fatal error with the RDF being output - Graph to create the Node in - Node ID - - + - Given a Graph ID returns the value of the Graph URI + Gets the String representation of the writer which is a description of the syntax it produces - Graph ID - + - Given a non-blank Node returns the Node ID + Class for serializing a Triple Store in JSON-LD syntax - Node - - Should function as equivalent to the two argument version with the createIfNotExists parameter set to false - - + - Gets the Graph ID for a Graph + Create a new serializer with default serialization options - Graph - - - Should function as equivalent to the two argument version with the createIfNotExists parameter set to false - - + - Gets the Graph ID for a Graph creating it if necessary + Create a new serializer with the specified serialization options - Graph - Determines whether to create a new Graph ID if there is not already one for the given Graph - + - + + + + + + + + + + - Gets the Graph ID for a Graph URI + Serialize a Triple Store to an expanded JSON-LD document - Graph URI + - - Should function as equivalent to the two argument version with the createIfNotExists parameter set to false - - + + + + - Gets the Graph ID for a Graph URI + Class representing the options that can be passed to the via its constructor. - Graph URI - Determines whether to create a new Graph ID if there is not already one for the given Graph URI - - + - Given a non-blank Node returns the Node ID + If the this flag is set to true, RDF literals with a datatype IRI that equals xsd:integer or xsd:double are converted + to a JSON numbers and RDF literals with a datatype IRI that equals xsd:boolean are converted to true or false based + on their lexical form. - Node - Determines whether to create a new Node ID if there is not already one for the given value - - + - Given a Blank Node returns a Graph scoped Node ID + Unless this flag is set to true, rdf:type predicates will be serialized as @type as long as the + associated object is either an IRI or blank node identifier. - Blank Node - Determines whether to create a new Node ID if there is not already one for the given value - - + - Given a Blank Node returns a Graph scoped Node ID + Get or set the formatting to apply to the JSON generated by the writer - Blank Node - - - Should function as equivalent to the two argument version with the createIfNotExists parameter set to false - + Defaults to - + - Gets the Node ID that is used to indicate that a Node does not exist in the underlying storage + Class for generating Notation 3 Concrete RDF Syntax which provides varying levels of Syntax Compression + Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Loads a Graph creating all the Triples with virtual node values + Creates a new Notation 3 Writer which uses the Default Compression Level - Graph to load into - URI of the Graph to load - + - Simple implementation of a Virtual Blank Node where the virtual IDs are integers + Creates a new Notation 3 Writer which uses the given Compression Level + Desired Compression Level + See Remarks for this classes CompressionLevel property to see what effect different compression levels have - + - Creates a new Virtual Blank Node + Gets/Sets whether Pretty Printing is used - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Blank Node + Gets/Sets whether High Speed Write Mode should be allowed - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value - + - Determines whether this Node is equal to another virtual Blank node + Gets/Sets the Compression Level to be used - Other Blank Node - + + + If the Compression Level is set to None then High Speed mode will always be used regardless of the input Graph and the HighSpeedMorePermitted property. + + + If the Compression Level is set to Minimal or above then full Predicate Object lists will be used for Triples. + + + If the Compression Level is set to More or above then Blank Node Collections and Collection syntax will be used if the Graph contains Triples that can be compressed in that way. + - + - Compares this Node to another virtual Blank node + Gets/Sets the Default Namespaces that are always available - Other Blank Node - - + - Copies the Node to another Graph + Gets the type of the Triple Formatter used by this writer - Target Graph - - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Saves a Graph to a file using Notation 3 Syntax - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. + Graph to save + File to save to - + - Simple implementation of a Virtual Graph Literal Node where the virtual IDs are integers + Saves a Graph to the given Stream using Notation 3 Syntax + Graph to save + Stream to save to - + - Creates a new Virtual Graph Literal Node + Generates the Notation 3 Syntax for the Graph - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Graph Literal Node + Generates Output for Triples as a single "s p o." Triple - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Values + Writer Context + Triple to output + + Used only in High Speed Write Mode - + - Determines whether this Node is equal to another virtual Graph Literal node + Generates Output for Nodes in Notation 3 syntax - Other Graph Literal Node + Writer Context + Node to generate output for + Segment of the Triple being output + Indent to use for pretty printing - + - Compares this Node to another virtual Graph Literal node + Internal Helper method which converts a Collection into Notation 3 Syntax - Other Graph Literal Node + Writer Context + Collection to convert + Indent to use for pretty printing - + - Copies the Node to another Graph including the materialised value if present + Helper method for generating Parser Warning Events - Target Graph - + Warning Message - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Event which is raised when there is a non-fatal issue with the Graph being written - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. - + - Simple implementation of a Virtual Literal Node where the virtual IDs are integers + Gets the String representation of the writer which is a description of the syntax it produces + - + - Creates a new Virtual Literal Node + Class for serializing a Triple Store in the NQuads (NTriples plus context) syntax - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Literal Node + Creates a new writer - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value - + - Determines whether this Node is equal to another virtual Literal node + Creates a new writer - Other Literal Node - + NQuads Syntax mode to use - + - Compares this Node to another virtual Literal node + Controls whether Pretty Printing is used - Other Literal Node - + + For NQuads this simply means that Graphs in the output are separated with Whitespace and comments used before each Graph + - + - Copies the Node to another Graph including the materialised value if present + Gets/Sets whether Multi-Threaded Writing - Target Graph - - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Gets the type of the Triple Formatter used by this writer - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. - + - Simple implementation of a Virtual URI Node where the virtual IDs are integers + Gets/Sets the NQuads syntax mode - + - Creates a new Virtual URI Node + Saves a Store in NQuads format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider + Store to save + File to save to - + - Creates a new Virtual URI Node + Saves a Store in NQuads format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Store to save + Writer to save to - + - Determines whether this Node is equal to another virtual URI node + Saves a Store in NQuads format - Other URI Node - + Store to save + Writer to save to + Boolean flag indicating if should be left open after the store is written - + - Compares this Node to another virtual URI node + Converts a Triple into relevant NQuads Syntax - Other URI Node + Writer Context + Triple to convert + Graph URI - + - Copies the Node to another Graph including the materialised value if present + Converts a Node into relevant NTriples Syntax - Target Graph + Node to convert + Writer Context + Triple Segment being written - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Delegate for the SaveGraphs method - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. + Context for writing the Store - + - Simple implementation of a Virtual URI Node where the virtual IDs are integers + Thread Worker method which writes Graphs to the output + Context for writing the Store - + - Creates a new Virtual Variable Node + Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Variable Node + Internal Helper method which raises the Warning event only if there is an Event Handler registered - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Warning Message - + - Determines whether this Node is equal to another virtual variable node + Gets the String representation of the writer which is a description of the syntax it produces - Other Variable Node - + - Compares this Node to another virtual Variable node + Class for generating RDF in NTriples Concrete Syntax - Other Variable Node - + Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Copies the Node to another Graph including the materialised value if present + Creates a new writer - Target Graph - + NTriples Syntax Mode - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Creates a new writer - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. - + - Abstract Base implementation of a Virtual Node which is a Node that is represented only by some ID until such time as its value actually needs materialising + Gets/Sets whether Triples are sorted before being Output - Node ID Type - Graph ID Type - - - As far as possible equality checks are carried out using these IDs and limited comparisons may also be done this way. More specific implementations may wish to derive from this class in order to override the default comparison implementation to further reduce the number of places where value materialisation is done. - - - Note that this class does not implement any of the specialised Node interfaces and instead relies on the casting of its materialised value to an appropriately typed node to provide the true values to code that needs it - - - + - The materialised value of the Virtual Node + Gets the type of the Triple Formatter used by this writer - + - Creates a new Base Virtual Node + Gets/Sets the NTriples syntax mode - Graph the Node belongs to - Type of the node - Virtual ID - Virtual RDF Provider - + - Creates a new Base Virtual Node + Saves the Graph in NTriples Syntax to the given stream - Graph the Node belongs to - Type of the node - Virtual ID - Virtual RDF Provider - Materialised Value + Graph to save + File to save to - + - Materialises the Value if it is not already materialised + Saves the Graph in NTriples Syntax to the given stream + Graph to save + Stream to save to - + - Called after the value is materialised for the first time + Converts a Triple into relevant NTriples Syntax + Writer Context + Triple to convert + - + - Gets the Virtual ID of the Node + Converts a Node into relevant NTriples Syntax + Writer Context + Node to convert + Segment of the Triple being written + - + - Gets the Virtual RDF Provider of the Node + Event which is raised when there is an issue with the Graph being serialized that doesn't prevent serialization but the user should be aware of - + - Gets whether the Nodes value has been materialised + Internal Helper method which raises the Warning event only if there is an Event Handler registered + Warning Message - + - Gets the materialised value of the Node forcing it to be materialised if it hasn't already + Gets the String representation of the writer which is a description of the syntax it produces + - + - Gets the Type of the Node + Class for generating RDF/XML Concrete Syntax + + + This is a fast writer based on the fast writing technique used in the other non-RDF/XML Writers. + + + Note: If the Graph to be serialized makes heavy use of collections it may result in a StackOverflowException. To address this set the CompressionLevel property to < 5 + + - + - Gets the Graph the Node belongs to + Creates a new RDF/XML Writer - + - Gets/Sets the Graph URI of the Node + Creates a new RDF/XML Writer + Compression Level - + - Gets the String representation of the Node formatted with the given Node formatter + Creates a new RDF/XML Writer - Formatter - + Compression Level + Whether to use DTDs to further compress output - + - Gets the String representation of the Node formatted with the given Node formatter + Creates a new RDF/XML Writer - Formatter - Triple Segment - + Compression Level + Whether to use DTDs to further compress output + Whether to use attributes to encode triples with simple literal objects where possible - + - Compares this Node to another Virtual Node + Gets/Sets Pretty Print Mode for the Writer - Other Virtual Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Compares this Node to another Virtual Node + Gets/Sets the Compression Level in use - Other Virtual Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + + Compression Level defaults to High - if Compression Level is set to below More i.e. < 5 then Collections will not be compressed into more compact syntax + - + - Compares this Node to another Node + Gets/Sets whether DTDs are used in the output - Other Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Compares this Node to another Blank Node + Gets/Sets whether triples which have a literal object will be expressed as attributes rather than elements where possible (defaults to true) - Other Blank Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Compares this Node to another Graph LiteralNode + Gets/Sets the Default Namespaces that are always available + + + + + Gets the type of the Triple Formatter used by the writer + + + + + Saves a Graph in RDF/XML syntax to the given File + + Graph to save + Filename to save to + + + + Saves a Graph to an arbitrary output stream + + Graph to save + Stream to save to + + + + Internal method which generates the RDF/Json Output for a Graph + + Graph to save + Stream to save to + + + + Internal Helper method for raising the Warning event - Other Graph Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + Warning Message - + - Compares this Node to another Literal Node + Event which is raised when there is a non-fatal issue with the RDF being output - Other Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Compares this Node to another URI Node + Gets the String representation of the writer which is a description of the syntax it produces - Other URI Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Compares this Node to another Variable Node + Class for generating RDF/Json Concrete Syntax - Other Variable Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. +

+ Uses the Json.Net library by James Newton-King to output RDF/Json according to the specification located on the Talis n2 Wiki +

+ Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue
- + - Checks this Node for equality against another Object + Gets/Sets Pretty Print Mode for the Writer - Other Object - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another Virtual Node + Saves a Graph in RDF/Json syntax to the given File - Other Virtual Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Graph to save + Filename to save to - + - Checks this Node for equality against another Virtual Node + Saves a Graph to an arbitrary output stream - Other Virtual Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Graph to save + Stream to save to - + - Checks this Node for equality against another Node + Internal method which generates the RDF/Json Output for a Graph - Other Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Graph to save + Stream to save to - + - Checks the Node Types and if they are equal invokes the INode based comparison + Internal Helper method for raising the Warning event - Node to compare with for equality - + Warning Message - + - Tries to check for equality using virtual node IDs + Event which is raised when there is a non-fatal issue with the RDF being output - Node to test against - Whether the virtual nodes are equal - - Whether the virtual equality test was valid, if false then other means must be used to determine equality - - + - Checks this Node for equality against another Blank Node + Gets the String representation of the writer which is a description of the syntax it produces - Other Blank Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another Graph Literal Node + Class for generating RDF/XML Concrete Syntax - Other Graph Literal Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + + This is a fast writer based on the fast writing technique used in the other non-RDF/XML Writers. + + + Note: If the Graph to be serialized makes heavy use of collections it may result in a StackOverflowException. To address this set the CompressionLevel property to < 5 + - + - Checks this Node for equality against another Literal Node + Creates a new RDF/XML Writer - Other Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another URI Node + Creates a new RDF/XML Writer - Other URI Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Compression Level - + - Checks this Node for equality against another Variable Node + Creates a new RDF/XML Writer - Other Variable Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Compression Level + Whether to use DTDs to further compress output - + - Copies the Virtual Node into another Graph + Gets/Sets Pretty Print Mode for the Writer - Target Graph - - + - Gets the Hash Code of the Virtual Node + Gets/Sets the Compression Level in use - + + + Compression Level defaults to High - if Compression Level is set to below More i.e. < 5 then Collections will not be compressed into more compact syntax + + - + - Method to be implemented in derived classes to provide comparison of VirtualId values + Gets/Sets whether DTDs are used in the output - The other virtual ID value to be compared with this node's virtual ID value. - The comparison result. - + - Gets the String representation of the Node + Gets/Sets the Default Namespaces that are always available - - + - Gets the data for serialization + Gets the type of the Triple Formatter used by this writer - Serialization Information - Streaming Context - Thrown because serializing a Virtual Node would be lossy - + - Gets the schema for XML serialization + Saves a Graph in RDF/XML syntax to the given File - + Graph to save + Filename to save to - + - Reads the data for XML deserialization + Saves a Graph to an arbitrary output stream - XML Reader - Thrown because serializing a Virtual Node would be lossy + Graph to save + Stream to save to - + - Writes the data for XML deserialization + Internal method which generates the RDF/Json Output for a Graph - XML Writer - Thrown because serializing a Virtual Node would be lossy + Graph to save + Stream to save to - + - Abstract Base implementation of a Virtual Blank Node + Internal Helper method for raising the Warning event - Node ID Type - Graph ID Type + Warning Message - + - Creates a new Virtual Blank Node + Event which is raised when there is a non-fatal issue with the RDF being output - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Blank Node + Gets the String representation of the writer which is a description of the syntax it produces - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + - + - Takes post materialisation actions + A convenience wrapper that allows a single graph to be written as the default + graph using a store writer. - + - Gets the Internal ID of the Blank Node + Create a new writer instance that wraps the specified instance. + The instance that will do the writing - + + + + + + + + + + + + + - Compares this Node to another Blank Node + Class for saving SPARQL Result Sets to CSV format (not a standardised format) - Other Blank Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Checks this Node for equality against another Blank Node + Saves a SPARQL Result Set to CSV format - Other Blank Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Result Set + File to save to - + - Checks this Node for equality against another Blank Node + Saves a SPARQL Result Set to CSV format - Other Blank Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Result Set + Writer to save to - + - Compares this Node to another Blank Node + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected - Other Blank Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + Warning Message - + - Throws an error as blank nodes cannot be cast to types + Event raised when a non-fatal issue with the SPARQL Results being written is detected - - + - Throws an error as blank nodes cannot be cast to types + Gets the String representation of the writer which is a description of the syntax it produces - + - Throws an error as blank nodes cannot be cast to types + Class for saving SPARQL Result Sets to a HTML Table format (this is not a standardised format) - - + - Throws an error as blank nodes cannot be cast to types + Gets/Sets the Default Namespaces used to pretty print URIs in the output - - + - Throws an error as blank nodes cannot be cast to types + Saves the Result Set to the given File as a HTML Table - + Result Set to save + File to save to - + - Throws an error as blank nodes cannot be cast to types + Saves the Result Set to the given Stream as a HTML Table - + Result Set to save + Stream to save to - + - Throws an error as blank nodes cannot be cast to types + Internal method which generates the HTML Output for the Sparql Results - + + - + - Throws an error as blank nodes cannot be cast to types + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected - + Warning Message - + - Throws an error as blank nodes cannot be cast to a time span + Event raised when a non-fatal issue with the SPARQL Results being written is detected - - + - Gets the URI of the datatype this valued node represents as a String + Gets the String representation of the writer which is a description of the syntax it produces + - + - Gets the numeric type of the node + Class for saving Sparql Result Sets to the SPARQL Results JSON Format - + - Abstract Base implementation of a Virtual Graph Literal Node + Saves the Result Set to the given File in the SPARQL Results JSON Format - Node ID Type - Graph ID Type + Result Set to save + File to save to - + - Creates a new Virtual Graph Literal Node + Saves the Result Set to the given Stream in the SPARQL Results JSON Format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider + Result Set to save + Stream to save to - + - Creates a new Virtual Graph Literal Node + Internal method which generates the SPARQL Query Results JSON output - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Result Set to save + Stream to save to - + - Takes post materialisation actions + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Warning Message - + - Gets the subgraph this Graph Literal represents + Event raised when a non-fatal issue with the SPARQL Results being written is detected - + - Compares this Node to another Graph Literal Node + Gets the String representation of the writer which is a description of the syntax it produces - Other Graph Literal Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Checks this Node for equality against another Graph Literal Node + Class for saving SPARQL Result Sets in the RDF serialization in the RDF format of your choice (default Turtle) - Other Graph Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another Graph Literal Node + Creates a new SPARQL RDF Writer which will save Result Sets in the RDF serialization using Turtle syntax - Other Graph Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Compares this Node to another Graph Literal Node + Creates a new SPARQL RDF Writer which will save Result Sets in the RDF serialization in your chosen RDF Syntax - Other Graph Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + RDF Writer to use - + - Throws an error as graph literal nodes cannot be cast to types + Saves the SPARQL Result Set to the given File - + Result Set to save + File to save to - + - Throws an error as graph literal nodes cannot be cast to types + Saves the SPARQL Result Set to the given Stream - + Result Set to save + Stream to save to - + - Throws an error as graph literal nodes cannot be cast to types + Method which generates the RDF Graph of a SPARQL Result Set + Result Set - + - Throws an error as graph literal nodes cannot be cast to types + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected - + Warning Message - + - Throws an error as graph literal nodes cannot be cast to types + Event raised when a non-fatal issue with the SPARQL Results being written is detected - - + - Throws an error as graph literal nodes cannot be cast to types + Gets the String representation of the writer which is a description of the syntax it produces - + - Throws an error as graph literal nodes cannot be cast to types + Class for saving SPARQL Result Sets to TSV format (not a standardised format) - - + - Throws an error as graph literal nodes cannot be cast to types + Saves a SPARQL Result Set to TSV format - + Result Set + File to save to - + - Throws an error as graph literals cannot be cast to a time span + Saves a SPARQL Result Set to TSV format - + Result Set + Writer to save to - + - Gets the URI of the datatype this valued node represents as a String + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Warning Message - + - Gets the numeric type of the node + Event raised when a non-fatal issue with the SPARQL Results being written is detected - + - Abstract Base implementation of a Virtual Literal Node + Gets the String representation of the writer which is a description of the syntax it produces - Node ID Type - Graph ID Type + - + - Creates a new Virtual Literal Node + Class for saving Sparql Result Sets to the Sparql Results XML Format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - + - Creates a new Virtual Literal Node + Saves the Result Set to the given File in the Sparql Results XML Format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Result Set to save + File to save to - + - Takes post materialisation actions + Saves the Result Set to the given Stream in the Sparql Results XML Format + + - + - Gets the lexical value of the Literal + Method which generates the Sparql Query Results XML Format serialization of the Result Set + - + - Gets the language specifier (if any) of the Literal + Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Warning Message - + - Gets the Datatype (if any) of the Literal + Event raised when a non-fatal issue with the SPARQL Results being written is detected - + - Compares this Node to another Literal Node + Gets the String representation of the writer which is a description of the syntax it produces - Other Literal Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Checks this Node for equality against another Literal Node + Class for generating arbitrary XML Output from SPARQL Result Sets by transforming the XML Results Format via an XSLT stylesheet - Other Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another Literal Node + Creates a new SPARQL XSLT Writer - Other Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Stylesheet URI - + - Compares this Node to another Literal Node + Creates a new SPARQL XSLT Writer - Other Literal Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + Stylesheet URI - + - Ensures that a strong value has been determined for this node + Saves a SPARQL Result Set to the given File + Result Set + File to save to - + - Gets the value as a string + Saves a SPARQL Result Set to the given Text Writer - - - Forces a materialisation of the value - + Result Set + Text Writer to write to - + - Gets the value as an integer + Gets the String representation of the writer which is a description of the syntax it produces - - Forces a materialisation of the value - - + - Gets the value as a decimal + Static Helper class for the writing of RDF Graphs and SPARQL Result Sets to Strings rather than Streams/Files - - - Forces a materialisation of the value - - + - Gets the value as a float + Writes the Graph to a String and returns the output in your chosen concrete RDF Syntax + Graph to save + Writer to use to generate the concrete RDF Syntax - Forces a materialisation of the value + Since the API allows for any TextWriter to be passed to the Save() method of a IRdfWriter you can just pass in a StringWriter to the Save() method to get the output as a String. This method simply provides a wrapper to doing just that. - + - Gets the value as a double + Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax + Triple Store + Writer to use to generate conrete RDF Syntax - - Forces a materialisation of the value - - + - Gets the value as a boolean + Writes the SPARQL Result Set to a String and returns the Output in your chosen format + SPARQL Result Set + Writer to use to generate the SPARQL Results output - - Forces a materialisation of the value - - + - Gets the value as a date time + Class for writing a Triple Store in named Graph TriG syntax to a file/stream - - Forces a materialisation of the value + + For efficiency the TriG Writer splits it's writing over several threads (currently 4), these threads share a reference to a Context object which gives Global writing context eg. the target TextWriter being written to. Each thread generates temporary local writing contexts as it goes along, each of these is scoped to writing a specific Graph. Graphs are written to a StringWriter so the output for each Graph is built completely and then written in one go to the TextWriter specified as the target of the writing in the global context. + + Designed to be Thread Safe - should be able to call Save() from several threads with no issue. See Remarks for potential performance impact of this. - + - Gets the value as a date time + Gets/Sets whether High Speed Write Mode is permitted - - - Forces a materialisation of the value - - + - Gets the value as a time span + Gets/Sets whether Pretty Printing is used - - - Forces a materialisation of the value - - + - Gets the URI of the datatype this valued node represents as a String + Gets/Sets the Compression Level for the writer - + - Gets the numeric type of the node + Gets/Sets whether N3 Compatability Mode is used, in this mode an = is written after Graph Names so an N3 parser can read the TriG file correctly + + Defaults to false from the 0.4.1 release onwards + - + - Abstract Base implementation of a Virtual URI Node + Gets/Sets whether multi-threaded writing will be used to generate output faster - Node ID Type - Graph ID Type - + - Creates a new Virtual URI Node + Saves a Store in TriG (Turtle with Named Graphs) format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider + Store to save + File to save to - + - Creates a new Virtual URI Node + Saves a Store in TriG (Turtle with Named Graphs) format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Store to save + Writer to save to - + - Takes post materialisation actions + Saves a Store in TriG (Turtle with Named Graphs) format + Store to save + Writer to save to + Boolean flag indicating if should be left open after the store is saved - + - Gets the URI + Generates the Output for a Graph as a String in TriG syntax + Context for writing the Store + Context for writing the Graph + - + - Compares this Node to another URI Node + Generates the Output for a Triple as a String in Turtle syntax - Other URI Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + Context for writing the Store + Context for writing the Graph - + - Checks this Node for equality against another URI Node + Generates Output for Nodes in Turtle syntax - Other URI Node + Context for writing the Store + Context for writing the Graph + Node to generate output for + Segment of the Triple being written - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Checks this Node for equality against another URI Node + Delegate for the SaveGraphs method - Other URI Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Context for writing the Store - + - Compares this Node to another URI Node + Thread Worker method which writes Graphs to the output - Other URI Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + Context for writing the Store - + - Gets the string value of the node + Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of - + - Throws an error as URI nodes cannot be cast to numerics + Internal Helper method which raises the Warning event only if there is an Event Handler registered - + Warning Message - + - Throws an error as URI nodes cannot be cast to numerics + Gets the String representation of the writer which is a description of the syntax it produces - + - Throws an error as URI nodes cannot be cast to numerics + Class for serialzing Triple Stores in the TriX format - - + - Throws an error as URI nodes cannot be cast to numerics + Saves a Store in TriX format - + Store to save + File to save to - + - Throws an error as URI nodes cannot be cast to a boolean + Saves a Store in TriX format - + Store to save + Writer to save to - + - Throws an error as URI nodes cannot be cast to a date time + Saves a Store in TriX format - + Store to save + Writer to save to + Boolean flag indicating if should be closed after the store is saved - + - Throws an error as URI nodes cannot be cast to a date time + Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of - - + - Throws an error as URIs cannot be cast to a time span + Internal Helper method which raises the Warning event only if there is an Event Handler registered + + Warning Message + + + + Gets the String representation of the writer which is a description of the syntax it produces - + - Gets the URI of the datatype this valued node represents as a String + Class for generating TSV output from RDF Datasets - + - Gets the numeric type of the expression + Gets the type of the Triple Formatter used by this writer - + - Abstract Base implementation of a Virtual Variable Node + Saves a Triple Store to TSV format - Node ID Type - Graph ID Type + Triple Store to save + File to save to - + - Creates a new Virtual Variable Node + Saves a Triple Store to TSV format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider + Triple Store to save + Writer to save to - + - Creates a new Virtual Variable Node + Saves a Triple Store to TSV format - Graph the Node belongs to - Virtual ID - Virtual RDF Provider - Materialised Value + Triple Store to save + Writer to save to + Boolean flag indicating if should be left open after the store is saved. - + - Takes post materialisation actions + Delegate for the SaveGraphs method + Context for writing the Store - + - Gets the Variable Name + Thread Worker method which writes Graphs to the output + Context for writing the Store - + - Compares this Node to another Variable Node + Generates the Output for a Graph as a String in TSV syntax - Other Variable Node + Context for writing the Store + Context for writing the Graph - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Checks this Node for equality against another Variable Node + Generates Output for the given Node - Other Variable Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + Writer Context + Node + Triple Context - + - Checks this Node for equality against another Variable Node + Event which is raised if the Writer detects a non-fatal error with the RDF being output - Other Variable Node - - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Compares this Node to another Variable Node + Gets the String representation of the writer which is a description of the syntax it produces - Other Variable Node - - Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - - + - Throws an error as variables nodes cannot be cast to types + Class for generating TSV files from RDF Graphs - - + - Throws an error as variables nodes cannot be cast to types + Gets the type of the Triple Formatter used by this writer - - + - Throws an error as variables nodes cannot be cast to types + Saves a Graph to TSV format - + Graph + File to save to - + - Throws an error as variables nodes cannot be cast to types + Saves a Graph to TSV format - + Graph + Writer to save to - + - Throws an error as variables nodes cannot be cast to types + Event which is raised if the Writer detects a non-fatal error with the RDF being output - - + - Throws an error as variables nodes cannot be cast to types + Gets the String representation of the writer which is a description of the syntax it produces - + - Throws an error as variables nodes cannot be cast to types + Class for generating RDF in Turtle Syntax - + + Similar in speed to the CompressingTurtleWriter but doesn't use the full Blank Node and Collection syntax compressions + + Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Throws an error as variables nodes cannot be cast to types + Creates a new Turtle Writer - - + - Throws an error as variables cannot be cast to a time span + Creates a new Turtle Writer - + Turtle Syntax - + - Gets the URI of the datatype this valued node represents as a String + Gets/Sets whether Pretty Printing is used - + - Gets the numeric type of the node + Gets/Sets whether the Writer is allowed to use High Speed write mode + High Speed Write Mode is engaged when the Writer determines that the contents of the Graph are not well suited to Turtle syntax compressions. Usually the writer compresses triples into groups by Subject using Predicate-Object lists to output the Triples relating to each Subject. If the number of distinct Subjects is greater than 75% of the Triples in the Graph then High Speed write mode will be used, in High Speed mode all Triples are written fully and no grouping of any sort is done. - + - A Cache that maps from Virtual IDs to Materialised Values + Gets the type of the Triple Formatter used by this writer - + - Creates a new Virtual ID cache + Saves a Graph to a File - Function that maps Node IDs to dictionary keys + Graph to save + Filename to save to - + - Gets/Sets the materialised value for a particular Virtual ID + Saves a Graph using an arbitrary TextWriter - Virtual ID - + Graph to save + Writer to save using - + - A Cache that maps from Virtual IDs to Materialised Values where the IDs map directly to dictionary keys + Generates the Output for a Graph - Node ID Type + Context for writing the Graph - + - Creates a new Simple Virtual Node Cache + Generates the Output for a Node in Turtle Syntax + Context for writing the Graph + Node to generate Output for + Segment of the Triple being written + - + - - Namespace for ctorage classes which provide support for using arbitrary backing Stores - - - Storage is managed via the IStorageProvider interface, see the Triple Store Integration documentation on the main website for more detail. - -

Data Provider Libraries

- - From the 0.5.0 release onwards any triple store integration that requires additional dependencies are provided with their own library to reduce dependencies in the Core library and allow that functionality to be optional. The following stores are currently provided in separate libraries: - -
    -
  • Virtuoso - Virtuoso support can be found in the dotNetRDF.Data.Virtuoso.dll library and requires one additional dependency.
  • -
+ Helper method for raising the Warning event
+ Warning Message
- + - Abstract Base Class for HTTP based Storage API implementations + Event which is raised when a non-fatal issue with the Graph being serialized is encountered - - - Does not actually implement any interface rather it provides common functionality around HTTP Proxying - - - If the library is compiled with the NO_PROXY symbol then this code adds no functionality - - - + - Creates a new connector + Gets the String representation of the writer which is a description of the syntax it produces + - + - Whether the User has provided credentials for accessing the Store using authentication + Possible URI Reference Types - + - Sets a Proxy Server to be used + Must be a QName - Proxy Address - + - Sets a Proxy Server to be used + May be a QName or a URI - Proxy Address - + - Gets/Sets a Proxy Server to be used + URI Reference - + - Clears any in-use credentials so subsequent requests will not use a proxy server + URI - + - Sets Credentials to be used for Proxy Server + Class containing constants for possible Compression Levels - Username - Password + These are intended as guidance only, Writer implementations are free to interpret these levels as they desire or to ignore them entirely and use their own levels - + - Sets Credentials to be used for Proxy Server + No Compression should be used (-1) - Username - Password - Domain - + - Gets/Sets Credentials to be used for Proxy Server + Minimal Compression should be used (0) - + - Clears the in-use proxy credentials so subsequent requests still use the proxy server but without credentials + Default Compression should be used (1) - + - Gets/Sets the HTTP Timeouts used specified in milliseconds + Medium Compression should be used (3) - - - Defaults to 30 seconds (i.e. the default value is 30,000) - - - It is important to understand that this timeout only applies to the HTTP request portions of any operation performed and that the timeout may apply more than once if a POST operation is used since the timeout applies separately to obtaining the request stream to POST the request and obtaining the response stream. Also the timeout does not in any way apply to subsequent work that may be carried out before the operation can return so if you need a hard timeout on an operation you should manage that yourself. - - - When set to a zero/negative value then the standard .Net timeout of 100 seconds will apply, use if you want the maximum possible timeout i.e. if you expect to launch extremely long running operations. - - - Not supported under Silverlight, Windows Phone and Portable Class Library builds - - - + - Password for accessing the Store + More Compression should be used (5) - + - Password for accessing the Store + High Compression should be used (10) - + - Helper method which applies standard request options to the request, these currently include proxy settings and HTTP timeout + Class containing constants for standardised Writer Error Messages - HTTP Web Request - HTTP Web Request with standard options applied - + - Helper method which adds standard configuration information (proxy and timeout settings) to serialized configuration + Error message produced when a User attempts to serialize a Graph containing Graph Literals - Object Node representing the IStorageProvider whose configuration is being serialized - Serialization Context - + - Sets the credentials to be used with basic authentication + Error message produced when a User attempts to serialize a Graph containing Unknown Node Types - + - Abstract Base Class for HTTP Based IAsyncStorageProvider implementations + Error message produced when a User attempts to serialize a Graph containing Triples with Literal Subjects - - - It is expected that most classes extending from this will also then implement separately for their synchronous communication, this class purely provides partial helper implementations for the asynchronous communication - - - + - Creates a new Base Async HTTP Connector + Error message produced when a User attempts to serialize a Graph containing Triples with Literal Predicates - + - Gets the parent server (if any) + Error message produced when a User attempts to serialized a Graph containing Triples with Graph Literal Predicates - + - Gets the parent server (if any) + Error message produced when a User attempts to serialize a Graph containing Triples with Blank Node Predicates - + - Loads a Graph from the Store asynchronously + Error message produced when a User attempts to serialize a Graph containing URIs which cannot be reduced to a URIRef or QName as required by the serialization - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Error message produced when a User attempts to serialize a Graph containing collections where a collection item has more than one rdf:first triple - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Error messages produced when errors occur in a multi-threaded writing process - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Error message produced when a User attempts to serialize a Variable Node in a format which does not support it - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback - + - Helper method for doing async load operations, callers just need to provide an appropriately prepared HTTP request + Gets an Error message indicating that Graph Literals are not serializable with the appropriate RDF format name inserted in the error - HTTP Request - Handler to load with - Callback - State to pass to the callback + RDF format (syntax) + - + - Saves a Graph to the Store asynchronously + Gets an Error message indicating that Unknown Node Types are not serializable with the appropriate RDF format name inserted in the error - Graph to save - Callback - State to pass to the callback + RDF format (syntax) + - + - Helper method for doing async save operations, callers just need to provide an appropriately perpared HTTP requests and a RDF writer which will be used to write the data to the request body + Gets an Error message indicating that Variable Nodes are not serializable with the appropriate RDF format name inserted in the error - HTTP request - RDF Writer - Graph to save - Callback - State to pass to the callback + RDF format (syntax) + - + - Updates a Graph in the Store asychronously + Gets an Error message indicating that Literal Subjects are not serializable with the appropriate RDF format name inserted in the error - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + RDF format (syntax) + - + - Updates a Graph in the Store asychronously + Gets an Error message indicating that Literal Predicates are not serializable with the appropriate RDF format name inserted in the error - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + RDF format (syntax) + - + - Helper method for doing async update operations, callers just need to provide an appropriately prepared HTTP request and a RDF writer which will be used to write the data to the request body + Gets an Error message indicating that Graph Literal Predicates are not serializable with the appropriate RDF format name inserted in the error - HTTP Request - RDF writer - URI of the Graph to update - Triples - Callback - State to pass to the callback + RDF format (syntax) + - + - Deletes a Graph from the Store + Gets an Error message indicating that Blank Node Predicates are not serializable with the appropriate RDF format name inserted in the error - URI of the Graph to delete - Callback - State to pass to the callback + RDF format (syntax) + - + - Deletes a Graph from the Store + Gets an Error message indicating that a multi-threading writer process failed - URI of the Graph to delete - Callback - State to pass to the callback + RDF format (syntax) + - + - Helper method for doing async delete operations, callers just need to provide an appropriately prepared HTTP request + Indicates which Segment of a Triple Node Output is being generated for - HTTP request - Whether a 404 response counts as success - URI of the Graph to delete - Callback - State to pass to the callback + + Used by Writers and Formatters to ensure restrictions on which Nodes can appear where in the syntax are enforced + - + - Lists the Graphs in the Store asynchronously + Subject of the Triple - Callback - State to pass to the callback - + - Indicates whether the Store is ready to accept requests + Predicate of the Triple - + - Gets whether the Store is read only + Object of the Triple - + - Gets the IO Behaviour of the Store + Controls what type of collections - + - Gets whether the Store supports Triple level updates via the UpdateGraph() method + Find all collections - + - Gets whether the Store supports Graph deletion via the DeleteGraph() method + Find explicit collections only (those specified with Blank Node syntax) - + - Gets whether the Store supports listing graphs via the ListGraphs() method + Find implicit collections only (those using rdf:first and rdf:rest) - + - Diposes of the Store + Class used to store Collections as part of the writing process for Compressing Writers - + - Helper method for doing async operations where a sequence of HTTP requests must be run + Creates a new Instance of a Collection - HTTP requests - Callback - State to pass to the callback + Whether the collection is explicit (specified using square bracket notation) or implicit (specified using normal parentheses) - + - Flags Enumeration which is used to express the IO Behaviours of a specific Store + Gets whether this is an Explicit collection (specified using square bracket notation) - + - Indicates whether the Store is Read Only i.e. Saving is not supported + Gets/Sets whether the Collection has been written - + - Indicates that the Store is a Triple Store + Gets the Triples that make up the Collection - + - Indicates that the Store is a Quad (Graph) Store + Possible Output Formats for Nodes - + - Indicates whether the Store has an explicit unnamed default graph + Format for NTriples - + - Indicates whether the Store has named graphs + Format for Turtle - + - Indicates that a Triple Store appends Triples when the SaveGraph() method is used + Format for Notation 3 - + - Indicates that a Triple Store overwrites Triples when the SaveGraph() method is used + Format for Uncompressed Turtle - + - Indicates that Graph data written to the Default Graph is always appended when the SaveGraph() method is used + Format for Uncompressed Notation 3 - + - Indicates that Graph data written to the Default Graph overwrites existing data when the SaveGraph() method is used + Helper methods for writers - + - Indicates that Graph data written to Named Graphs is always appended when the SaveGraph() method is used + Determines whether a Blank Node ID is valid as-is when serialised in NTriple like syntaxes (Turtle/N3/SPARQL) + ID to test + + If false is returned then the writer will alter the ID in some way - + - Indicates that Graph data written to Named Graphs overwrites existing data when the SaveGraph() method is used + Determines whether a Blank Node ID is valid as-is when serialised as NTriples + + - + - Indicates a Store that can do Triple Level additions on existing Graphs using the UpdateGraph() method + Determines whether a given Uri refers to one of the Default Graph URIs assigned to the default Graph when parsing from some RDF dataset syntax + Uri to test + - + - Indicates a Store that can do Triple Level removals on existing Graphs using the UpdateGraph() method + Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes + Graph to find collections in + Triple Collection in which Triples that have been output are to be listed - + - Indicates that a Store has a notion of explicit empty graphs + Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes - - For some quad stores the existence of a graph may only be defined in terms of one/more quads being stored in that graph - + Writer Context + Collection Search Mode - + - Indicates that the Store is from a system which provides access to multiple stores (such an implementation will usually implement the IStorageServer interface) - at a minimum this usually means the store will allow you to list other available stores. More complex abilities like creating and deleting stores are indicated by other flags. + Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes + Writer Context - + - Indicates that the Store provides the means to create additional Stores + Encodes values for use in XML + Value to encode + + The value with any ampersands escaped to & + - + - Indicates that the Store provides the means to delete Stores + Interface for Handlers which handle the SPARQL Results produced by parsers - + - Indicates a Store that can do Triple Level additions and removals on existing Graphs using the UpdateGraph() method + Starts the Handling of Results - + - Default Behaviour for Read Only Triple Stores + Ends the Handling of Results + Indicates whether parsing completed without error - + - Default Behaviour for Read Only Quad (Graph) Stores + Handles a Boolean Result + Result - + - Default Behaviour for Triple Stores + Handles a Variable Declaration - - Default Behaviour is considered to be a Triple Store where data is appended - + Variable Name + - + - Default Behaviour for Quad (Graph) Stores + Handles a SPARQL Result - - Default Behaviour is considered to be Quad Store with Default and Named Graphs, data is appended to the default graph and overwrites named graphs - + Result + - + - Behaviour for fully fledged storage servers i.e. multiple stores are supported and can be created and deleted as desired + Interface for Reader Classes which parser Sparql Result Set syntaxes into Result Set objects - + - Reasoning modes supported by Stardog + Loads a Result Set from the given Stream + Stream to read from + Result Set to load into + + Should throw an error if the Result Set is not empty - + - No Reasoning (default) + Loads a Result Set from the given File + File containing a Result Set + Result Set to load into + + Should throw an error if the Result Set is not empty - + - OWL-QL Reasoning + Loads a Result Set from the given Input + Input to read from + Result Set to load into + + Should throw an error if the Result Set is not empty - + - OWL-EL Reasoning + Loads a Result Set using a Results Handler from the given Stream + Results Handler + Stream to read from - + - OWL-RL Reasoning + Loads a Result Set using a Results Handler from the given file + Results Handler + File to read results from - + - OWL-DL Reasoning + Loads a Result Set using a Results Handler from the given Input + Results Handler + Input to read from - + - RDFS Reasoning + Event raised when a non-fatal issue with the SPARQL Results being parsed is detected - + - RDFS, QL, RL, and EL axioms, plus SWRL rules + Interface for Writer classes which serialize Sparql Result Sets into concrete results set syntaxes - + - As of Stardog 3.x the reasoning mode is no longer a connection property and is instead managed at the database level + Saves the Result Set to the given File + Result Set to save + File to save to - + - Abstract implementation of a connector for Stardog that connects using the HTTP protocol + Saves the Result Set to the given Stream - - - Has full support for Stardog Transactions, connection is in auto-commit mode by default i.e. all write operations (Delete/Save/Update) will create and use a dedicated transaction for their operation, if the operation fails the transaction will automatically be rolled back. You can manage Transactions using the Begin(), Commit() and Rollback() methods. - - - The connector maintains a single transaction which is shared across all threads since Stardog is currently provides only MRSW (Multiple Reader Single Writer) concurrency and does not permit multiple transactions to occur simultaneously. - - + Result Set to save + Stream to save to - + - Constant for the default Anonymous user account and password used by Stardog if you have not supplied a shiro.ini file or otherwise disabled security + Event raised when a non-fatal issue with the SPARQL Results being written is detected - + - Creates a new connection to a Stardog Store + Interface to be implemented by Triple Store Readers - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode - + - Creates a new connection to a Stardog Store + Loads a RDF dataset into a Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID + Triple Store + File to load from - + - Creates a new connection to a Stardog Store + Loads a RDF dataset into a Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password + Triple Store + Input to load from - + - Creates a new connection to a Stardog Store + Loads a RDF dataset using a RDF Handler - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode + RDF Handler to use + File to load from - + - Creates a new connection to a Stardog Store + Loads a RDF dataset using a RDF Handler - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode - Proxy Server + RDF Handler to use + Input to load from - + - Creates a new connection to a Stardog Store + Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode - Proxy Server - + - Creates a new connection to a Stardog Store + Interface to be implemented by Triple Store Writers - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Proxy Server - + - Creates a new connection to a Stardog Store + Method for saving data to a Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Proxy Server + Triple Store + File to save to - + - Gets the Base URI of the Stardog server + Method for saving data to a Triple Store + Triple Store + Write to save to - + - Gets/Sets the reasoning mode to use for queries + Method for saving data to a Triple Store + Triple Store + Write to save to + Boolean flag indicating if the output writer should be left open by the writer when it completes - + - Gets the IO Behaviour of Stardog + Event which writers can raise to indicate possible ambiguities or issues in the syntax they are producing - + - Returns that listing Graphs is supported + An Interface for classes which provide Context Information for Triples thus allowing you to create Quads with arbitrary extra information attached to Triples via your Context Objects + + A Triple Context is simply a name-value pair collection of arbitrary data that can be attached to a Triple. Internal representation of this is left to the implementor. + - + - Returns that the Connection is ready + A Method which will indicate whether the Context contains some arbitrary property - + - Returns that the Connection is not read-only + A Property which exposes the arbitrary properties of the Context as an Key Based Index + Name of the Property + - + - Returns that Updates are supported on Stardog Stores + Class which implements a very basic Triple Context + + The Name Value collection is represented internally as a Dictionary + - + - Returns that deleting graphs from the Stardog store is not yet supported (due to a .Net specific issue) + Creates a new Basic Triple Context without a Source - + - Gets the parent server + Checks whether a given property is defined in this Context object + Name of the Property + - + - Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use + Gets/Sets the value of a Property - Sparql Query + Name of the Property - + - Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use, the reasoning can be set by query + Interface for Triple Stores - Sparql Query - - + A Triple Store may be a representation of some storage backed actual store or just a temporary collection of Graphs created for working with. Note that an implementation is not required to provide a definitive view of a Triple Store and may only provide a limited/partial snapshot of the underlying store. Check the documentation for the various implementations to see what type of view of a Triple Store they actually provide. - + - Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use processing the results using an appropriate handler from those provided + Gets whether a TripleStore is Empty - RDF Handler - Results Handler - SPARQL Query - - + - Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use processing the results using an appropriate handler from those provided, the reasoning can be set by query + Gets the Graph Collection of Graphs in this Triple Store - RDF Handler - Results Handler - SPARQL Query - - - + - Loads a Graph from the Store + Gets all the Triples in the Triple Store which are currently loaded in memory (see remarks) - Graph to load into - URI of the Graph to load - - If an empty/null URI is specified then the Default Graph of the Store will be loaded - + Since a Triple Store object may represent only a snapshot of the underlying Store evaluating this enumerator may only return some of the Triples in the Store and may depending on specific Triple Store return nothing. - + - Loads a Graph from the Store + Adds a Graph into the Triple Store - RDF Handler - URI of the Graph to load - - If an empty/null URI is specified then the Default Graph of the Store will be loaded - + Graph to add - + - Loads a Graph from the Store + Adds a Graph into the Triple Store - Graph to load into - Uri of the Graph to load - - If an empty/null Uri is specified then the Default Graph of the Store will be loaded - + Graph to add + Controls whether the Graph should be merged with an existing Graph of the same Uri if it already exists in the Triple Store - + - Loads a Graph from the Store + Adds a Graph into the Triple Store by dereferencing the Graph Uri to get the RDF and then load the resulting Graph into the Triple Store - RDF Handler - URI of the Graph to load - - If an empty/null URI is specified then the Default Graph of the Store will be loaded - + Uri of the Graph to be added - + - Saves a Graph into the Store (see remarks for notes on merge/overwrite behaviour) + Adds a Graph into the Triple Store by dereferencing the Graph Uri to get the RDF and then load the resulting Graph into the Triple Store - Graph to save - - - If the Graph has no URI then the contents will be appended to the Store's Default Graph. If the Graph has a URI then existing Graph associated with that URI will be replaced. To append to a named Graph use the UpdateGraph() method instead - - + Uri of the Graph to be added + Controls whether the Graph should be merged with an existing Graph of the same Uri if it already exists in the Triple Store - + - Updates a Graph in the Stardog Store + Removes a Graph from the Triple Store - Uri of the Graph to update - Triples to be added - Triples to be removed - - Removals happen before additions - + Graph Uri of the Graph to remove - + - Updates a Graph in the Stardog store + Checks whether the Graph with the given Uri is in this Triple Store - Uri of the Graph to update - Triples to be added - Triples to be removed + Graph Uri + - + - Deletes a Graph from the Stardog store + Gets a Graph from the Triple Store; - URI of the Graph to delete + Graph URI + - + - Deletes a Graph from the Stardog store + Event which is raised when a Graph is added - URI of the Graph to delete - + - Gets the list of Graphs in the Stardog store + Event which is raised when a Graph is removed - - + - Gets the parent server + Event which is raised when a Graphs contents changes - + - Saves a Graph to the Store asynchronously + Event which is raised when a Graph is cleared - Graph to save - Callback - State to pass to the callback - + - Loads a Graph from the Store asynchronously + Event which is raised when a Graph has a merge operation performed on it - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback - + - Updates a Graph in the Store asychronously + Interface for Triple Stores which can be queried in memory using method calls or the SPARQL implementation contained in this library - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + + + An in memory Triple Store will typically load most of the Graphs and consequently Triples contained within it into Memory as the in memory SPARQL implementation only operates over the part of the Triple Store loaded in memory. This being said there is no reason why an in memory store can't provide a Snapshot view of an underlying store to allow only the relevant parts of Store to be loaded and queried. + + + All the Selection Methods which do not specify a subset of Graphs on such a Triple Store should operate over the entire store + + - + - Deletes a Graph from the Store + Returns whether a given Triple is contained anywhere in the Query Triples - URI of the Graph to delete - Callback - State to pass to the callback + Triple to check for existence of + - + - Queries the store asynchronously + Selects all Triples which have a Uri Node with the given Uri from all the Query Triples - SPARQL Query - Callback - State to pass to the callback + Uri + - + - Queries the store asynchronously + Selects all Triples which contain the given Node from all the Query Triples - SPARQL Query - RDF Handler - Results Handler - Callback - State to pass to the callback + Node + - + - Helper method for creating HTTP Requests to the Store + Selects all Triples where the Object is a Uri Node with the given Uri from all the Query Triples - Path to the Service requested - Acceptable Content Types - HTTP Method - Querystring Parameters + Uri - - + - Begins a new Transaction + Selects all Triples where the Object is a given Node from all the Query Triples - - A single transaction - + Node + - + - Commits the active Transaction + Selects all Triples where the Predicate is a given Node from all the Query Triples - Thrown if there is not an active Transaction on the current Thread - - Transactions are scoped to Managed Threads - + Node + - + - Rolls back the active Transaction + Selects all Triples where the Predicate is a Uri Node with the given Uri from all the Query Triples - Thrown if there is not an active Transaction on the current Thread - - Transactions are scoped to Managed Threads - + Uri + - + - Begins a transaction asynchronously + Selects all Triples where the Subject is a given Node from all the Query Triples - Callback - State to pass to the callback + Node + - + - Commits a transaction asynchronously + Selects all Triples where the Subject is a Uri Node with the given Uri from all the Query Triples - Callback - State to pass to the callback + Uri + - + - Rolls back a transaction asynchronously + Selects all the Triples with the given Subject-Predicate pair from all the Query Triples - Callback - State to pass to the callback + Subject + Predicate + - + - Disposes of the Connector + Selects all the Triples with the given Predicate-Object pair from all the Query Triples + Predicate + Object + - + - Gets a String which gives details of the Connection + Selects all the Triples with the given Subject-Object pair from all the Query Triples + Subject + Object - + - Serializes the connection's configuration + Selects all Triples which have a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Configuration Serialization Context + List of the Graph URIs of Graphs you want to select over + Uri + - + - A Stardog Connector for connecting to Stardog version 1.* servers + Selects all Triples which contain the given Node from a Subset of Graphs in the Triple Store + List of the Graph URIs of Graphs you want to select over + Node + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Object is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode + List of the Graph URIs of Graphs you want to select over + Uri + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Object is a given Node from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID + List of the Graph URIs of Graphs you want to select over + Node + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Predicate is a given Node from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password + List of the Graph URIs of Graphs you want to select over + Node + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Predicate is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode + List of the Graph URIs of Graphs you want to select over + Uri + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Subject is a given Node from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode - Proxy Server + List of the Graph URIs of Graphs you want to select over + Node + - + - Creates a new connection to a Stardog Store + Selects all Triples where the Subject is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode - Proxy Server + List of the Graph URIs of Graphs you want to select over + Uri + - + - Creates a new connection to a Stardog Store + Executes a SPARQL Query on the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Proxy Server + SPARQL Query as an unparsed string + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Creates a new connection to a Stardog Store + Executes a SPARQL Query on the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Proxy Server + SPARQL Query as a SparqlQuery instance + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - A Stardog Connector for connecting to Stardog version 2.* servers + Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query as an unparsed string + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Creates a new connection to a Stardog Store + Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode + RDF Handler + Results Handler + Parsed SPARQL Query + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Creates a new connection to a Stardog Store + Interface for Triple Stores which can be queried natively i.e. the Stores provide their own SPARQL implementations - Base Uri of the Server - Knowledge Base (i.e. Database) ID + + A Natively Queryable store will typically not load its Graphs and Triples into memory as this is generally unecessary. + - + - Creates a new connection to a Stardog Store + Executes a SPARQL Query on the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password + Sparql Query as unparsed String + + + This assumes that the Store has access to some native SPARQL query processor on/at the Store which will be used to return the results. Implementations should parse the returned result into a SparqlResultSet or Graph. + - + - Creates a new connection to a Stardog Store + Executes a SPARQL Query on the Triple Store processing the results using an appropriate handler from those provided - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode + RDF Handler + Results Handler + SPARQL Query as unparsed String - + - Creates a new connection to a Stardog Store + Interface for Triple Stores which support SPARQL Update as per the SPARQL 1.1 specifications - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode - Proxy Server + + + A Store which supports this may implement various access control mechanisms which limit what operations are actually permitted + + + It is the responsibility of the Store class to ensure that commands are permissible before invoking them + + - + - Creates a new connection to a Stardog Store + Executes an Update against the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode - Proxy Server + SPARQL Update Command(s) + + As per the SPARQL 1.1 Update specification the command string may be a sequence of commands + - + - Creates a new connection to a Stardog Store + Executes a single Update Command against the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Proxy Server + SPARQL Update Command - + - Creates a new connection to a Stardog Store + Executes a set of Update Commands against the Triple Store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Proxy Server + SPARQL Update Command Set - + - Adds Stardog specific request headers + Interface for Triple Stores which can have a IInferenceEngine attached to them - - + - Executes a SPARQL Update against the Stardog store + Adds an Inference Engine to the Triple Store - SPARQL Update - - Stardog executes SPARQL update requests in their own self contained transactions which do not interact with normal Stardog transactions that may be managed via this API. In some cases this can lead to unexpected behaviour, for example if you call , make an update and then call the updates will not be rolled back. - + Reasoner to add - + - Executes a SPARQL Update against the Stardog store + Removes an Inference Engine from the Triple Store - SPARQL Update - Callback - State to pass to callback - - Stardog executes SPARQL update requests in their own self contained transactions which do not interact with normal Stardog transactions that may be managed via this API. In some cases this can lead to unexpected behaviour, for example if you call , make an update and then call the updates will not be rolled back. - + Reasoner to remove - + - A Stardog Connector for connecting to Stardog version 3.* servers + Clears all Inference Engines from the Triple Store - + - Creates a new connection to a Stardog Store + Applies Inference to the given Graph - Base Uri of the Server - Knowledge Base (i.e. Database) ID + Graph to apply inference to + + Allows you to apply Inference to a Graph even if you're not putting that Graph into the Store + - + - Creates a new connection to a Stardog Store + Interface for Triple Stores which are backed by some storage layer that may delay persistence and thus require flushing to ensure changes are persisted to the backing store, as a by product such stores will typically have some notion of transactionality - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode - + - Creates a new connection to a Stardog Store + Flushes any outstanding changes to the underlying store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Proxy Server - + - Creates a new connection to a Stardog Store + Discards any outstanding changes to the underlying store - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Proxy Server - + - Adds Stardog specific request headers + Abstract Base Class for Literal Nodes - - + - A Stardog connector for connecting to Stardog servers running the latest version, currently this is version 3.* + Constants used to add salt to the hashes of different Literal Nodes - + - Creates a new connection to a Stardog Store + Constants used to add salt to the hashes of different Literal Nodes - Base Uri of the Server - Knowledge Base (i.e. Database) ID - + - Creates a new connection to a Stardog Store + Constants used to add salt to the hashes of different Literal Nodes - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - + - Creates a new connection to a Stardog Store + Internal Only Constructor for Literal Nodes - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Reasoning Mode - Proxy Server + Graph this Node is in + String value of the Literal - + - Creates a new connection to a Stardog Store + Internal Only Constructor for Literal Nodes - Base Uri of the Server - Knowledge Base (i.e. Database) ID - Username - Password - Reasoning Mode - Proxy Server + Graph this Node is in + String value of the Literal + Whether to Normalize the Literal Value - + - Allows you to treat an RDF Dataset File - NQuads, TriG or TriX - as a read-only generic store + Internal Only Constructor for Literal Nodes + Graph this Node is in + String value of the Literal + String value for the Language Specifier for the Literal - + - Creates a new Dataset File Manager + Internal Only Constructor for Literal Nodes - File to load from - Whether to load asynchronously + Graph this Node is in + String value of the Literal + String value for the Language Specifier for the Literal + Whether to Normalize the Literal Value - + - Internal helper method for loading the data + Internal Only Constructor for Literal Nodes - File to load from + Graph this Node is in + String value of the Literal + Uri for the Literals Data Type - + - Makes a query against the in-memory copy of the Stores data + Internal Only Constructor for Literal Nodes - SPARQL Query - + Graph this Node is in + String value of the Literal + Uri for the Literals Data Type + Whether to Normalize the Literal Value - + - Makes a query against the in-memory copy of the Stores data processing the results with one of the given handlers + Deserialization Only Constructor - RDF Handler - Results Handler - SPARQL Query - + - Loads a Graph from the Dataset + Deserialization Constructor - Graph to load into - URI of the Graph to load + Serialization Information + Streaming Context - + - Loads a Graph from the Dataset with the given Handler + Gives the String Value of the Literal - RDF Handler - URI of the Graph to load - + - Loads a Graph from the Dataset + Gives the Language Specifier for the Literal (if it exists) or the Empty String - Graph to load into - URI of the Graph to load - + - Loads a Graph from the Dataset with the given Handler + Gives the Data Type Uri for the Literal (if it exists) or a null - RDF Handler - URI of the Graph to load - + - Throws an error since this Manager is read-only + Implementation of the Equals method for Literal Nodes - Graph to save - Always thrown since this Manager provides a read-only connection + Object to compare the Node with + + + The default behaviour is for Literal Nodes to be considered equal IFF +
    +
  1. Their Language Specifiers are identical (or neither has a Language Specifier)
  2. +
  3. Their Data Types are identical (or neither has a Data Type)
  4. +
  5. Their String values are identical
  6. +
+ This behaviour can be overridden to use value equality by setting the LiteralEqualityMode option to be Loose if this is more suited to your application. +
- + - Gets the Save Behaviour of the Store + Implementation of the Equals method for Literal Nodes + Object to compare the Node with + + + The default behaviour is for Literal Nodes to be considered equal IFF +
    +
  1. Their Language Specifiers are identical (or neither has a Language Specifier)
  2. +
  3. Their Data Types are identical (or neither has a Data Type)
  4. +
  5. Their String values are identical
  6. +
+ This behaviour can be overridden to use value equality by setting the LiteralEqualityMode option to be Loose if this is more suited to your application. +
- + - Throws an error since this Manager is read-only + Determines whether this Node is equal to a Blank Node (should always be false) - Graph URI - Triples to be added - Triples to be removed + Blank Node + - + - Throws an error since this Manager is read-only + Determines whether this Node is equal to a Graph Literal Node (should always be false) - Graph URI - Triples to be added - Triples to be removed + Graph Literal Node + - + - Returns that Updates are not supported since this is a read-only connection + Determines whether this Node is equal to a Literal Node + Literal Node + - + - Throws an error since this connection is read-only + Determines whether this Node is equal to a URI Node (should always be false) - URI of the Graph to delete - Thrown since you cannot delete a Graph from a read-only connection + URI Node + - + - Throws an error since this connection is read-only + Determines whether this Node is equal to a Variable Node (should always be false) - URI of the Graph to delete - Thrown since you cannot delete a Graph from a read-only connection + Variable Node + - + - Returns that deleting graphs is not supported + Determines whether this Node is equal to a Literal Node + Literal Node + - + - Returns that the Manager is ready if the underlying file has been loaded + Gets a String representation of a Literal Node + + Gives a value without quotes (as some syntaxes use) with the Data Type/Language Specifier appended using Notation 3 syntax - + - Returns that the Manager is read-only + Implementation of CompareTo for Literal Nodes + Node to Compare To + + + Literal Nodes are greater than Blank Nodes, Uri Nodes and Nulls, they are less than Graph Literal Nodes. +

+ Two Literal Nodes are initially compared based upon Data Type, untyped literals are less than typed literals. Two untyped literals are compared purely on lexical value, Language Specifier has no effect on the ordering. This means Literal Nodes are only partially ordered, for example "hello"@en and "hello"@en-us are considered to be the same for ordering purposes though they are different for equality purposes. Datatyped Literals can only be properly ordered if they are one of a small subset of types (Integers, Booleans, Date Times, Strings and URIs). If the datatypes for two Literals are non-matching they are ordered on Datatype Uri, this ensures that each range of Literal Nodes is sorted to some degree. Again this also means that Literals are partially ordered since unknown datatypes will only be sorted based on lexical value and not on actual value. +
- + - Gets the list of URIs of Graphs in the Store + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against - + - Returns that listing graphs is supported + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the Source File this manager represents a read-only view of + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the String representation of the Connection + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against - + - Disposes of the Manager + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Serializes the connection's configuration + Returns an Integer indicating the Ordering of this Node compared to another Node - Configuration Serialization Context + Node to test against + - + - Class for connecting to any dataset that can be exposed via Fuseki + Gets the serialization information - - - Uses all three Services provided by a Fuseki instance - Query, Update and HTTP Update - - + Serialization Information + Streaming Context - + - Creates a new connection to a Fuseki Server + Reads the data for XML deserialization - The /data URI of the Fuseki Server + XML Reader - + - Creates a new connection to a Fuseki Server + Writes the data for XML serialization - The /data URI of the Fuseki Server + XML Writer - + - Creates a new connection to a Fuseki Server + Class for representing Literal Nodes - The /data URI of the Fuseki Server - Proxy Server - + - Creates a new connection to a Fuseki Server + Constants used to add salt to the hashes of different Literal Nodes - The /data URI of the Fuseki Server - Proxy Server - + - Returns that Listing Graphs is supported + Constants used to add salt to the hashes of different Literal Nodes - + - Gets the IO Behaviour of the Store + Constants used to add salt to the hashes of different Literal Nodes - + - Returns that Triple level updates are supported using Fuseki + Internal Only Constructor for Literal Nodes + Graph this Node is in + String value of the Literal - + - Gets the List of Graphs from the store + Internal Only Constructor for Literal Nodes - + Graph this Node is in + String value of the Literal + Whether to Normalize the Literal Value - + - Updates a Graph in the Fuseki store + Internal Only Constructor for Literal Nodes - URI of the Graph to update - Triples to be added - Triples to be removed + Graph this Node is in + String value of the Literal + String value for the Language Specifier for the Literal - + - Updates a Graph in the Fuseki store + Internal Only Constructor for Literal Nodes - URI of the Graph to update - Triples to be added - Triples to be removed + Graph this Node is in + String value of the Literal + String value for the Language Specifier for the Literal + Whether to Normalize the Literal Value - + - Executes a SPARQL Query on the Fuseki store + Internal Only Constructor for Literal Nodes - SPARQL Query - + Graph this Node is in + String value of the Literal + Uri for the Literals Data Type - + - Executes a SPARQL Query on the Fuseki store processing the results using an appropriate handler from those provided + Internal Only Constructor for Literal Nodes - RDF Handler - Results Handler - SPARQL Query - + Graph this Node is in + String value of the Literal + Uri for the Literals Data Type + Whether to Normalize the Literal Value - + - Executes SPARQL Updates against the Fuseki store + Deserialization Only Constructor - SPARQL Update - + - Makes a SPARQL Query against the underlying store + Deserialization Constructor - SPARQL Query - Callback - State to pass to the callback - SparqlResultSet or a Graph depending on the Sparql Query + Serialization Information + Streaming Context - + - Executes a SPARQL Query on the Fuseki store processing the results using an appropriate handler from those provided + Implementation of Compare To for Literal Nodes - RDF Handler - Results Handler - SPARQL Query - Callback - State to pass to the callback + Literal Node to Compare To + + Simply invokes the more general implementation of this method + - + - Executes SPARQL Updates against the Fuseki store + Determines whether this Node is equal to a Literal Node - SPARQL Update - Callback - State to pass to the callback + Literal Node + - + - Lists the graph sin the Store asynchronously + Class for representing Literal Nodes where the Literal values are not normalized - Callback - State to pass to the callback - + - Updates a Graph on the Fuseki Server + Internal Only Constructor for Literal Nodes - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + Graph this Node is in + String value of the Literal - + - Gets a String which gives details of the Connection + Internal Only Constructor for Literal Nodes - + Graph this Node is in + String value of the Literal + Lanaguage Specifier for the Literal - + - Serializes the connection's configuration + Internal Only Constructor for Literal Nodes - Configuration Serialization Context + Graph this Node is in + String value of the Literal + Uri for the Literals Data Type - + - Provides a wrapper around an in-memory store + Deserialization Only Constructor - - - Useful if you want to test out some code using temporary in-memory data before you run the code against a real store or if you are using some code that requires an IStorageProvider interface but you need the results of that code to be available directly in-memory. - - - + - Creates a new In-Memory Manager which is a wrapper around a new empty in-memory store + Deserialization Constructor + Serialization Information + Streaming Context - + - Creates a new In-Memory Manager which is a wrapper around an in-memory store + Implementation of Compare To for Literal Nodes - Triple Store + Literal Node to Compare To + + + Simply invokes the more general implementation of this method + - + - Creates a new In-Memory Manager which is a wrapper around a SPARQL Dataset + Represents the definition of a MIME Type including mappings to relevant readers and writers - Dataset - + - Loads a Graph from the Store + Creates a new MIME Type Definition - Graph to load into - Graph URI to load + Syntax Name for the Syntax which has this MIME Type definition + MIME Types + File Extensions - + - Loads a Graph from the Store + Creates a new MIME Type Definition - RDF Handler - Graph URI to load + Syntax Name for the Syntax which has this MIME Type definition + Format URI as defined by the W3C + MIME Types + File Extensions - + - Loads a Graph from the Store + Creates a new MIME Type Definition - Graph to load into - Graph URI to load + Syntax Name for the Syntax which has this MIME Type definition + MIME Types + File Extensions + Type to use to parse RDF (or null if not applicable) + Type to use to parse RDF Datasets (or null if not applicable) + Type to use to parse SPARQL Results (or null if not applicable) + Type to use to writer RDF (or null if not applicable) + Type to use to write RDF Datasets (or null if not applicable) + Type to use to write SPARQL Results (or null if not applicable) - + - Loads a Graph from the Store + Creates a new MIME Type Definition - RDF Handler - Graph URI to load + Syntax Name for the Syntax which has this MIME Type definition + Format URI as defined by the W3C + MIME Types + File Extensions + Type to use to parse RDF (or null if not applicable) + Type to use to parse RDF Datasets (or null if not applicable) + Type to use to parse SPARQL Results (or null if not applicable) + Type to use to writer RDF (or null if not applicable) + Type to use to write RDF Datasets (or null if not applicable) + Type to use to write SPARQL Results (or null if not applicable) - + - Saves a Graph to the Store + Gets the name of the Syntax to which this MIME Type Definition relates - Graph - + - Gets the IO Behaviour for In-Memory stores + Gets the Format URI as defined by the W3C (where applicable) - + - Updates a Graph in the Store + Gets the Encoding that should be used for reading and writing this Syntax - URI of the Graph to Update - Triples to be added - Triples to be removed - + - Updates a Graph in the Store + Gets the MIME Types defined - URI of the Graph to Update - Triples to be added - Triples to be removed - + - Returns that Triple level updates are supported + Checks that MIME Types are valid + Type - + - Deletes a Graph from the Store + Adds a MIME Type to this definition - URI of the Graph to delete + MIME Type - + - Deletes a Graph from the Store + Gets the Canonical MIME Type that should be used - URI of the Graph to delete - + - Returns that Graph Deletion is supported + Determines whether the Definition supports a particular MIME type + MIME Type + - + - Lists the URIs of Graphs in the Store + Determines whether the definition supports the MIME type specified by the selector + MIME Type selector - + - Returns that listing graphs is supported + Gets the File Extensions associated with this Syntax - + - Returns that the Store is ready + Adds a File Extension for this Syntax + File Extension - + - Returns that the Store is not read-only + Gets whether any file extensions are associated with this syntax - + - Makes a SPARQL Query against the Store + Gets/Sets the Canonical File Extension for this Syntax - SPARQL Query - - + - Makes a SPARQL Query against the Store processing the results with the appropriate processor from those given + Determines whether the Definition supports a particular File Extension - RDF Handler - Results Handler - SPARQL Query + File Extension - + - Applies SPARQL Updates to the Store + Ensures that a given Type implements a required Interface - SPARQL Update + Property to which we are assigning + Type + Required Interface Type - + - Queries the store asynchronously + Gets/Sets the Type to use to parse RDF (or null if not applicable) - SPARQL Query - Callback - State to pass to the callback - + - Queries the store asynchronously + Gets/Sets the Type to use to parse RDF Datasets (or null if not applicable) - SPARQL Query - RDF Handler - Results Handler - Callback - State to pass to the callback - + - Updates the store asynchronously + Gets/Sets the Type to use to parse SPARQL Results (or null if not applicable) - SPARQL Update - Callback - State to pass to the callback - + - Disposes of the Manager + Gets/Sets the Type to use to writer RDF (or null if not applicable) - + - Gets a String representation of the Manager + Gets/Sets the Type to use to writer RDF Dataets (or null if not applicable) - - + - Serializes the Configuration of the Manager + Gets/Sets the Type to use to write SPARQL Results (or null if not applicable) - Configuration Serialization Context - + - Provides a Read-Only wrapper that can be placed around another IStorageProvider instance + Gets whether this definition can instantiate a Parser that can parse RDF - - - This is useful if you want to allow some code read-only access to a mutable store and ensure that it cannot modify the store via the manager instance - - - + - Creates a new Read-Only connection which is a read-only wrapper around another store + Gets whether this definition can instantiate a Parser that can parse RDF Datasets - Manager for the Store you want to wrap as read-only - + - Gets the parent server (if any) + Gets whether this definition can instantiate a Parser that can parse SPARQL Results - + - Loads a Graph from the underlying Store + Gets whether the definition provides a RDF Writer - Graph to load into - URI of the Graph to load - + - Loads a Graph from the underlying Store + Gets whether the Definition provides a RDF Dataset Writer - Graph to load into - URI of the Graph to load - + - Loads a Graph from the underlying Store + Gets whether the Definition provides a SPARQL Results Writer - RDF Handler - URI of the Graph to load - + - Loads a Graph from the underlying Store + Gets an instance of a RDF parser - RDF Handler - URI of the Graph to load + - + - Throws an exception since you cannot save a Graph using a read-only connection + Gets an instance of a RDF writer - Graph to save - Thrown since you cannot save a Graph using a read-only connection + - + - Gets the IO Behaviour of the read-only connection taking into account the IO Behaviour of the underlying store + Gets an instance of a RDF Dataset parser + - + - Throws an exception since you cannot update a Graph using a read-only connection + Gets an instance of a RDF Dataset writer - URI of the Graph - Triples to be added - Triples to be removed - Thrown since you cannot update a Graph using a read-only connection + - + - Throws an exception since you cannot update a Graph using a read-only connection + Gets an instance of a SPARQL Results parser - URI of the Graph - Triples to be added - Triples to be removed - Thrown since you cannot update a Graph using a read-only connection + - + - Returns that Update is not supported + Gets an instance of a SPARQL Results writer + - + - Throws an exception as you cannot delete a Graph using a read-only connection + Gets whether a particular Type of Object can be parsed - URI of the Graph to delete - Thrown since you cannot delete a Graph using a read-only connection + Object Type + - + - Throws an exception as you cannot delete a Graph using a read-only connection + Gets an Object Parser for the given Type - URI of the Graph to delete - Thrown since you cannot delete a Graph using a read-only connection + Object Type + - + - Returns that deleting graphs is not supported + Sets an Object Parser for the given Type + Object Type + Parser Type - + - Gets the list of graphs in the underlying store + Gets an Object Parser for the given Type + Object Type - + - Returns whether listing graphs is supported by the underlying store + Gets the registered Object Parser Types - + - Returns whether the Store is ready + Selector used in selecting which MIME type to use - + - Returns that the Store is read-only + Creates a MIME Type selector + MIME Type + Order the selector appears in the input + - + - Disposes of the Store + Creates an enumeration of MIME type selectors + MIME Types + - + - Gets the String representation of the Manager + Creates a new MIME Type Selector - + MIME Type to match + Charset + Quality (in range 0.0-1.0) + Order of appearance (used as precendence tiebreaker where necessary) - + - Serializes the Configuration of the Manager + Gets the selected type - Configuration Serialization Context + A type string of the form type/subtype assuming the type if valid - + - Provides a Read-Only wrapper that can be placed around another IQueryableStorage instance + Gets the range type if this is a range selector - - - This is useful if you want to allow some code read-only access to a mutable store and ensure that it cannot modify the store via the manager instance - - + A type string of the form type/ if this is a range selector, otherwise null - + - Creates a new Queryable Read-Only connection which is a read-only wrapper around another store + Gets the Charset for the selector (may be null if none specified) - Manager for the Store you want to wrap as read-only - + - Executes a SPARQL Query on the underlying Store + Gets the quality for the selector (range of 0.0-1.0) - SPARQL Query - - + - Executes a SPARQL Query on the underlying Store processing the results with an appropriate handler from those provided + Gets the order of apperance for the selector (used as precedence tiebreaker where necessary) - RDF Handler - Results Handler - SPARQL Query - - + - Lists the Graphs in the Store + Gets whether the selector if for a */* pattern i.e. accept any - - + - Returns that listing Graphs is supported + Gets whether the selector is for a type/* pattern i.e. accept any sub-type of the given type - + - Controls how the SparqlConnector loads Graphs from the Endpoint + Gets whether the selector is invalid - + - Graphs are loaded by issuing a DESCRIBE query using the Graph URI + Gets whether the selector is for a specific MIME type e.g. type/sub-type - + - Graphs are loaded by issuing a CONSTRUCT FROM query using the Graph URI + Sorts the selector in precedence order according to the content negotiation rules from the relevant RFCs + Selector to compare against + - + - Class for connecting to any SPARQL Endpoint as a read-only Store + Gets the string representation of the selector as it would appear in an Accept header + - - This class is effectively a read-only wrapper around a SparqlRemoteEndpoint using it with it's default settings, if you only need to query an endpoint and require more control over the settings used to access the endpoint you should use that class directly or use the constructors which allow you to provide your own pre-configure SparqlRemoteEndpoint instance - - - Unlike other HTTP based connectors this connector does not derive from BaseHttpConnector - if you need to specify proxy information you should do so on the SPARQL Endpoint you are wrapping either by providing a SparqlRemoteEndpoint instance pre-configured with the proxy settings or by accessing the endpoint via the Endpoint property and programmatically adding the settings. - + Unless this is an invalid selector this will always be a valid selector that could be appended to a MIME type header - - - Underlying SPARQL query endpoint - - - + - Method for loading graphs + Helper Class containing definitions of MIME Types for the various RDF Concrete Syntaxes and Content Negotation Methods - + - Whether to skip local parsing + Constant for W3C File Formats Namespace - + - Timeout for endpoints + MIME Type for accept any content Type - + - Creates a new SPARQL Connector which uses the given SPARQL Endpoint + MIME Type for URL Encoded WWW Form Content used when POSTing over HTTP - Endpoint - + - Creates a new SPARQL Connector which uses the given SPARQL Endpoint + MIME Type for URL Enoded WWW Form Content used when POSTing over HTTP in UTF-8 encoding - Endpoint - Load Method to use - + - Creates a new SPARQL Connector which uses the given SPARQL Endpoint + MIME Type for Multipart Form Data - Endpoint URI - + - Creates a new SPARQL Connector which uses the given SPARQL Endpoint + MIME Types for Turtle - Endpoint URI - Load Method to use - + - Gets the parent server (if any) + MIME Types for RDF/XML - + - Controls whether the Query will be parsed locally to accurately determine its Query Type for processing the response + MIME Types for Notation 3 - - If the endpoint you are connecting to provides extensions to SPARQL syntax which are not permitted by the libraries parser then you may wish to enable this option as otherwise you will not be able to execute such queries - - + - Gets/Sets the HTTP Timeout in milliseconds used for communicating with the SPARQL Endpoint + MIME Types for NTriples - + - Gets the underlying SparqlRemoteEndpoint which this class is a wrapper around + MIME Types for NQuads - + - Makes a Query against the SPARQL Endpoint + MIME Types for TriG - SPARQL Query - - + - Makes a Query against the SPARQL Endpoint processing the results with an appropriate handler from those provided + MIME Types for TriX - RDF Handler - Results Handler - SPARQL Query - - + - Loads a Graph from the SPARQL Endpoint + MIME Types for RDF/JSON - Graph to load into - URI of the Graph to load - + - Loads a Graph from the SPARQL Endpoint + MIME types for JSON-LD - RDF Handler - URI of the Graph to load - + - Loads a Graph from the SPARQL Endpoint + MIME Types for SPARQL Result Sets - Graph to load into - URI of the Graph to load - + - Loads a Graph from the SPARQL Endpoint + MIME Types for SPARQL Results XML - RDF Handler - URI of the Graph to load - + - Throws an error since this Manager is read-only + MIME Types for SPARQL Results JSON - Graph to save - Always thrown since this Manager provides a read-only connection - + - Gets the IO Behaviour of SPARQL Connections + MIME Types for SPARQL Boolean Result - + - Throws an error since this Manager is read-only + MIME Types for CSV - Graph URI - Triples to be added - Triples to be removed - + - Throws an error since this Manager is read-only + MIME Types for TSV - Graph URI - Triples to be added - Triples to be removed - + - Returns that Updates are not supported since this connection is read-only + MIME Types for HTML - + - Throws an exception as this connector provides a read-only connection + MIME Type for SPARQL Queries - URI of this Graph to delete - Thrown since this connection is read-only so you cannot delete graphs using it - + - Throws an exception as this connector provides a read-only connection + MIME Type for SPARQL Updates - URI of this Graph to delete - Thrown since this connection is read-only so you cannot delete graphs using it - + - Returns that deleting graphs is not supported + Default File Extension for Turtle Files - + - Lists the Graphs in the Store + Default File Extension for RDF/XML - - + - Returns that listing graphs is supported + Default File Extension for Notation 3 - + - Returns that the Connection is ready + Default File Extension for NTriples - + - Returns that the Connection is read-only + Default File Extension for Json formats - + - Disposes of the Connection + Default file extension for JSON-LD formats - + - Gets a String which gives details of the Connection + Default File Extension for RDF/JSON - - + - Serializes the connection's configuration + Default File Extension for SPARQL XML Results Format - Configuration Serialization Context - + - Class for connecting to any SPARQL server that provides both a query and update endpoint + Default File Extension for SPARQL JSON Results Format - - - This class is a wrapper around a and a . The former is used for the query functionality while the latter is used for the update functionality. As updates happen via SPARQL the behaviour with respects to adding and removing blank nodes will be somewhat up to the underlying SPARQL implementation. This connector is not able to carry out operations which attempt to delete blank nodes and cannot guarantee that added blank nodes bear any relation to existing blank nodes in the store. - - - Unlike other HTTP based connectors this connector does not derive from BaseHttpConnector - if you need to specify proxy information you should do so on the SPARQL Endpoint you are wrapping either by providing endpoint instance pre-configured with the proxy settings or by accessing the endpoint via the Endpoint and UpdateEndpoint properties and programmatically adding the settings. - - - + - Creates a new connection + Default File Extension for TriG - Query Endpoint - Update Endpoint - Method for loading graphs - + - Creates a new connection + Default File Extension for NQuads - Query Endpoint - Update Endpoint - + - Creates a new connection + Default File Extension for TriX - Query Endpoint - Update Endpoint - Method for loading graphs - + - Creates a new connection + Default File Extension for CSV - Query Endpoint - Update Endpoint - + - Gets the underlying SparqlRemoteUpdateEndpoint which this class is a wrapper around + Default File Extension for TSV - + - Gets/Sets the HTTP Timeout in milliseconds used for communicating with the SPARQL Endpoint + Default File Extension for HTML - + - Gets that deleting graphs is supported + Default File Extension for XHTML - + - Gets that the store is not read-only + Default File Extension for SPARQL Queries - + - Gets the IO behaviour for the store + Default File Extension for SPARQL Updates - + - Gets that triple level updates are supported, see the remarks section of the for exactly what is and isn't supported + Default File Extension for GZip - + - Deletes a graph from the store + Extensions which are considered stackable - URI of the graph to delete - + - Deletes a graph from the store + Charset constants - URI of the graph to delete - + - Saves a graph to the store + Charset constants - Graph to save - + - Updates a graph in the store + List of MIME Type Definition - URI of the graph to update - Triples to add - Triples to remove - + - Updates a graph in the store + Whether MIME Type Definitions have been initialised - URI of the graph to update - Triples to add - Triples to remove - + - Makes a SPARQL Update against the store + Checks whether something is a valid MIME Type - SPARQL Update + MIME Type + - + - Gets a String which gives details of the Connection + Determines whether the given string is valid as a type/subtype for a MIME type + String - + - Serializes the connection's configuration + Initialises the MIME Type definitions - Configuration Serialization Context - + - Class for connecting to any store that implements the SPARQL Graph Store HTTP Protocol for Managing Graphs + Resets the MIME Type Definitions (the associations between file extensions, MIME types and their respective parsers and writers) to the library defaults - The SPARQL Graph Store HTTP Protocol is defined as part of SPARQL 1.1 and is currently a working draft so implementations are not guaranteed to be fully compliant with the draft and the protocol may change in the future. - - - Note: While this connector supports the update of a Graph the Graph Store HTTP Protocol only allows for the addition of data to an existing Graph and not the removal of data, therefore any calls to UpdateGraph() that would require the removal of Triple(s) will result in an error. + May be useful if you've altered the definitions and caused something to stop working as a result - - - URI of the Protocol Server - - - - - Creates a new SPARQL Graph Store HTTP Protocol Connector - - URI of the Protocol Server - - + - Creates a new SPARQL Graph Store HTTP Protocol Connector + Gets the available MIME Type Definitions - URI of the Protocol Server - + - Creates a new SPARQL Graph Store HTTP Protocol Connector + Adds a new MIME Type Definition - URI of the Protocol Server - Proxy Server + MIME Type Definition - + - Creates a new SPARQL Graph Store HTTP Protocol Connector + Registers a parser as the default RDF Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions - URI of the Protocol Server - Proxy Server + RDF Parser + MIME Types + File Extensions - + - Gets the IO Behaviour of SPARQL Graph Store protocol based stores + Registers a parser as the default RDF Dataset Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + RDF Dataset Parser + MIME Types + File Extensions - + - Gets that Updates are supported + Registers a parser as the default SPARQL Rsults Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + SPARQL Results Parser + MIME Types + File Extensions - + - Returns that deleting Graphs is supported + Registers a writer as the default RDF Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + RDF Writer + MIME Types + File Extensions - + - Returns that listing Graphs is not supported + Registers a writer as the default RDF Dataset Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + RDF Dataset Writer + MIME Types + File Extensions - + - Gets that the Store is ready + Registers a writer as the default SPARQL Results Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + SPARQL Results Writer + MIME Types + File Extensions - + - Gets that the Store is not read-only + Gets all MIME Type definitions which support the given MIME Type + MIME Type + - + - Loads a Graph from the Protocol Server + Gets all MIME Type definitions which support the given MIME Types - Graph to load into - URI of the Graph to load + MIME Types + - + - Loads a Graph from the Protocol Server + Gets all MIME Types definitions which are associated with a given file extension - RDF Handler - URI of the Graph to load + File Extension + - + - Loads a Graph from the Protocol Server + Builds the String for the HTTP Accept Header that should be used when you want to ask for content in RDF formats (except Sparql Results) - Graph to load into - URI of the Graph to load + - + - Loads a Graph from the Protocol Server + Builds the String for the HTTP Accept Header that should be used for querying Sparql Endpoints where the response will be a SPARQL Result Set format - RDF Handler - URI of the Graph to load + - + - Sends a HEAD Command to the Protocol Server to determine whether a given Graph exists + Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data may be RDF or a SPARQL Result Set - URI of the Graph to check for + - + - Sends a HEAD Command to the Protocol Server to determine whether a given Graph exists + Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data will be an RDF dataset - URI of the Graph to check for - + - Saves a Graph to the Protocol Server + Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data may be RDF or an RDF dataset - Graph to save - + - Updates a Graph on the Protocol Server + Creates a Custom HTTP Accept Header containing the given selection of MIME Types - URI of the Graph to update - Triples to be added - Triples to be removed + Enumeration of MIME Types to use + - Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown + + Note: No validation is done on MIME Types so it is possible to generated a malformed header using this function + - + - Updates a Graph on the Protocol Server + Creates a Custom HTTP Accept Header containing the given selection of MIME Types where those MIME Types also appear in the list of supported Types - URI of the Graph to update - Triples to be added - Triples to be removed + Enumeration of MIME Types to use + Enumeration of supported MIME Types + - Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown + + Note: No validation is done on MIME Types so it is possible to generated a malformed header using this function + + + Use this function when you wish to generate a Custom Accept Header where the URI to which you are making requests supports a set range of URIs (given in the parameter) where that range of types may exceed the range of types actually supported by the library or your response processing code. + - - - Deletes a Graph from the store - - URI of the Graph to delete - - - - Deletes a Graph from the store - - URI of the Graph to delete - - + - Throws an exception as listing graphs in a SPARQL Graph Store HTTP Protocol does not support listing graphs + Creates a Custom HTTP Accept Header containing only the Accept Types supported by a specific parser + RDF Parser - Thrown since SPARQL Graph Store HTTP Protocol does not support listing graphs - + - Loads a Graph from the Protocol Server + Creates a Custom HTTP Accept Header containing only the Accept Types supported by a specific parser - Graph to load into - URI of the Graph to load - Callback - State to pass to the callback + RDF Parser + - + - Loads a Graph from the Protocol Server + Gets the Enumeration of supported MIME Types for RDF Graphs - RDF Handler - URI of the Graph to load - Callback - State to pass to the callback - + - Saves a Graph to the Protocol Server + Gets the Enumeration of supported MIME Types for RDF Datasets - Graph to save - Callback - State to pass to the callback - + - Updates a Graph on the Protocol Server + Gets the Enumeration of supported MIME Types for SPARQL Results - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback - - Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown - - + - Lists the Graphs in the Store asynchronously + Gets the Enumeration of supported MIME Types for RDF Graphs or SPARQL Results - Callback - State to pass to the callback - + - Deletes a Graph from the store asynchronously + Generates a Filename Filter that can be used with any .Net application and includes all formats that dotNetRDF is aware of - URI of the graph to delete - Callback - State to pass to the callback + - + - Disposes of the Connection + Generates a Filename Filter that can be used with any .Net application and includes a user dictated subset of the formats that dotNetRDF is aware of + Allow RDF Graph formats (e.g. Turtle) + Allow RDF Dataset formats (e.g. NQuads) + Allow SPARQL Results formats (e.g. SPARQL Results XML) + Allow SPARQL Query (i.e. .rq files) + Allow SPARQL Update (i.e. .ru files) + Allow All Files (i.e. */*) + - + - Gets a String representation of the connection + Applies global options to a writer - + Writer - + - Serializes the connection's configuration + Applies global options to a parser - Configuration Serialization Context + Parser - + - Class for connecting to an AllegroGraph Store + Selects an appropriate IRdfWriter based on the given MIME Types + MIME Types + - Connection to AllegroGraph is based on their new HTTP Protocol which is an extension of the Sesame 2.0 HTTP Protocol. The specification for the AllegroGraph protocol can be found here + This method does not take account of any quality/charset preference parameters included in the Accept Header - If you wish to use a Store which is part of the Root Catalog on an AllegroGraph 4.x and higher server you can either use the constructor overloads that omit the catalogID parameter or pass in null as the value for that parameter + Global options pertaining to writers will be applied to the selected writer - - - Creates a new Connection to an AllegroGraph store - - Base URI for the Store - Catalog ID - Store ID - - - - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - - Base Uri for the Store - Store ID - - - - Creates a new Connection to an AllegroGraph store - - Base Uri for the Store - Catalog ID - Store ID - Username for connecting to the Store - Password for connecting to the Store - - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Selects an appropriate IRdfWriter based on the given MIME Types - Base Uri for the Store - Store ID - Username for connecting to the Store - Password for connecting to the Store + MIME Types + The Content Type header that should be sent in the Response to the Request + + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + + + - + - Creates a new Connection to an AllegroGraph store + Selects an appropriate IRdfWriter based on the HTTP Accept header form a HTTP Request - Base Uri for the Store - Catalog ID - Store ID - Proxy Server + Value of the HTTP Accept Header + The Content Type header that should be sent in the Response to the Request + A Writer for a Content Type the client accepts and the Content Type that should be sent to the client + + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + + - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Selects an appropriate IRdfWriter based on the HTTP Accept header form a HTTP Request - Base Uri for the Store - Store ID - Proxy Server + Value of the HTTP Accept Header + A Writer for a Content Type the client accepts + + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + + - + - Creates a new Connection to an AllegroGraph store + Selects a based on the file extension - Base Uri for the Store - Catalog ID - Store ID - Username for connecting to the Store - Password for connecting to the Store - Proxy Server + File Extension + Thrown if no writers are associated with the given file extension + + + Global options pertaining to writers will be applied to the selected writer + + + - + - Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) + Selects a based on the file extension - Base Uri for the Store - Store ID - Username for connecting to the Store - Password for connecting to the Store - Proxy Server + File Extension + Content Type of the chosen writer + Thrown if no writers are associated with the given file extension + + + Global options pertaining to writers will be applied to the selected writer + + + - + - Gets the Catalog under which the repository you are connected to is located + Selects an appropriate IRdfReader based on the given MIME Types + MIME TYpes + - + - Makes a SPARQL Update request to the Allegro Graph server + Selects an appropriate IRdfReader based on the HTTP Content-Type header from a HTTP Response - SPARQL Update + Value of the HTTP Content-Type Header + - + - Makes a SPARQL Update request to the Allegro Graph server + Selects a based on the file extension - SPARQL Update - Callback - State to pass to the callback + File Extension + - + - Does nothing as AllegroGraph does not require the same query escaping that Sesame does + Selects a SPARQL Parser based on the MIME types - Query to escape + MIME Types + Whether to allow for plain text results - + - Helper method for creating HTTP Requests to the Store + Selects an appropriate ISparqlResultsReader based on the HTTP Content-Type header from a HTTP Response - Path to the Service requested - Acceptable Content Types - HTTP Method - Querystring Parameters + Value of the HTTP Content-Type Header - + - Gets a String which gives details of the Connection + Selects an appropriate ISparqlResultsReader based on the HTTP Content-Type header from a HTTP Response + Value of the HTTP Content-Type Header + Whether you allow Sparql Boolean results in text/plain format (Boolean results in text/boolean are handled properly but text/plain results can be conflated with CONSTRUCT/DESCRIBE results in NTriples format) - + - Serializes the connection's configuration + Selects a based on the file extension - Configuration Serialization Context + File Extension + - + - Class for connecting to 4store + Selects an appropriate ISparqlResultsWriter based on the given MIME Types + MIME Types + A Writer for a Content Type the client accepts - Depending on the version of RASQAL used for your 4store instance and the options it was built with some kinds of queries may not suceed or return unexpected results. + This method does not take account of any quality/charset preference parameters included in the Accept Header - Prior to the 1.x releases 4store did not permit the saving of unamed Graphs to the Store or Triple level updates. There was a branch of 4store that supports Triple level updates and you could tell the connector if your 4store instance supports this when you instantiate it. From the 0.4.0 release of the library onwards this support was enabled by default since the 1.x builds of 4store have this feature integrated into them by default. + Global options pertaining to writers will be applied to the selected writer - - - Creates a new 4store connector which manages access to the services provided by a 4store server - - Base Uri of the 4store - - Note: As of the 0.4.0 release 4store support defaults to Triple Level updates enabled as all recent 4store releases have supported this. You can still optionally disable this with the two argument version of the constructor - - - - - Creates a new 4store connector which manages access to the services provided by a 4store server - - Base Uri of the 4store - Indicates to the connector that you are using a 4store instance that supports Triple level updates - - If you enable Update support but are using a 4store instance that does not support Triple level updates then you will almost certainly experience errors while using the connector. - - - + - Creates a new 4store connector which manages access to the services provided by a 4store server + Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request - Base Uri of the 4store - Proxy Server + String array of accepted Content Types + The Content Type header that should be sent in the Response to the Request + A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - Note: As of the 0.4.0 release 4store support defaults to Triple Level updates enabled as all recent 4store releases have supported this. You can still optionally disable this with the two argument version of the constructor + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + - + - Creates a new 4store connector which manages access to the services provided by a 4store server + Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request - Base Uri of the 4store - Indicates to the connector that you are using a 4store instance that supports Triple level updates - Proxy Server + Value of the HTTP Accept Header + The Content Type header that should be sent in the Response to the Request + A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - If you enable Update support but are using a 4store instance that does not support Triple level updates then you will almost certainly experience errors while using the connector. + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + - + - Returns whether this connector has been instantiated with update support or not + Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request + Value of the HTTP Accept Header + A Writer for a Content Type the client accepts - If this property returns true it does not guarantee that the 4store instance actually supports updates it simply indicates that the user has enabled updates on the connector. If Updates are enabled and the 4store server being connected to does not support updates then errors will occur. + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + Global options pertaining to writers will be applied to the selected writer + - - - Returns that the Connection is ready - - - - - Returns that the Connection is not read-only - - - - - Gets the IO Behaviour of 4store - - - - - Returns that deleting Graph is supported - - - + - Returns that Listing Graphs is supported + Selects a based on a file extension + File Extension + - + - Loads a Graph from the 4store instance + Selects a based on a file extension - Graph to load into - Uri of the Graph to load + File Extension + Content Type of the selected writer + - + - Loads a Graph from the 4store instance using an RDF Handler + Selects a Store parser based on the MIME types - RDF Handler - URI of the Graph to load + MIME Types + - + - Loads a Graph from the 4store instance + Selects an appropriate IStoreReader based on the HTTP Content-Type header from a HTTP Response - Graph to load into - URI of the Graph to load + Value of the HTTP Content-Type Header + - + - Loads a Graph from the 4store instance + Selects a Store parser based on the file extension - RDF Handler - URI of the Graph to load + File Extension + - + - Saves a Graph to a 4store instance (Warning: Completely replaces any existing Graph with the same URI) + Selects an appropriate IStoreWriter based on the given MIME Types - Graph to save + MIME Types + - Completely replaces any existing Graph with the same Uri in the store + This method does not take account of any quality/charset preference parameters included in the Accept Header - Attempting to save a Graph which doesn't have a Base Uri will result in an error + For writers which support ICompressingWriter they will be instantiated with the Compression Level specified by Options.DefaultCompressionLevel - Thrown if you try and save a Graph without a Base Uri or if there is an error communicating with the 4store instance - + - Updates a Graph in the store + Selects an appropriate IStoreWriter based on the given MIME Types - Uri of the Graph to Update - Triples to be added - Triples to be removed + MIME Types + The Content Type header that should be sent in the Response to the Request + - May throw an error since the default builds of 4store don't support Triple level updates. There are builds that do support this and the user can instantiate the connector with support for this enabled if they wish, if they do so and the underlying 4store doesn't support updates errors will occur when updates are attempted. + + This method does not take account of any quality/charset preference parameters included in the Accept Header + + + For writers which support ICompressingWriter they will be instantiated with the Compression Level specified by Options.DefaultCompressionLevel + - + - Updates a Graph in the store + Selects an appropriate IStoreWriter based on the HTTP Accept header form a HTTP Request - Uri of the Graph to Update - Triples to be added - Triples to be removed - - May throw an error since the default builds of 4store don't support Triple level updates. There are builds that do support this and the user can instantiate the connector with support for this enabled if they wish, if they do so and the underlying 4store doesn't support updates errors will occur when updates are attempted. - + Value of the HTTP Accept Header + The Content Type header that should be sent in the Response to the Request + A Writer for a Content Type the client accepts and the Content Type that should be sent to the client + This method does not take account of any quality/charset preference parameters included in the Accept Header - + - Makes a SPARQL Query against the underlying 4store Instance + Selects an appropriate IStoreWriter based on the HTTP Accept header form a HTTP Request - SPARQL Query - A Graph or a SparqlResultSet - - Depending on the version of RASQAL used and the options it was built with some kinds of queries may not suceed or return unexpected results. - + Value of the HTTP Accept Header + A Writer for a Content Type the client accepts + This method does not take account of any quality/charset preference parameters included in the Accept Header - + - Makes a SPARQL Query against the underlying 4store Instance processing the results with the appropriate handler from those provided + Selects a by file extension - RDF Handler - Results Handler - SPARQL Query + File Extension + - + - Deletes a Graph from the 4store server + Selects a by file extension - Uri of Graph to delete + File Extension + Content Type of the selected writer + - + - Deletes a Graph from the 4store server + Selects the appropriate MIME Type for the given File Extension if the File Extension is a standard extension for an RDF format - Uri of Graph to delete + File Extension + - + - Lists the Graphs in the Store + Gets all the MIME Types associated with a given File Extension + File Extension - + - Applies a SPARQL Update against 4store + Gets the true file extension for a filename - SPARQL Update + + - Note: Please be aware that some valid SPARQL Updates may not be accepted by 4store since the SPARQL parser used by 4store does not support some of the latest editors draft syntax changes. + + This is an alternative to using which is designed to take into account known extensions which are used in conjunction with other extensions and mask the true extension, for example .gz + + + Consider the filename example.ttl.gz, obtaining the extension the standard way gives only .gz which is unhelpful since it doesn't actually tell us the underlying format of the data only that it is GZipped and if it is GZipped we almost certainly want to stream the data rather than read all into memory and heuristically detect the actual format. Instead we'd like to get .ttl.gz as the file extension which is much more useful and this is what this function does. + + + Important: This method does not blindly return double extensions whenever they are present (since they may simply by period characters in the filename and not double extensions at all) rather it returns double extensions only when the standard extension is an extension is known to be used with double extensions e.g. .gz that is relevan to the library + - + - Saves a Graph to the Store asynchronously + Gets the true extension for a resource - Graph to save - Callback - State to pass to the callback + Resource + - + - Loads a Graph from the Store asynchronously + Selects the appropriate File Extension for the given MIME Type - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback + MIME Type + - + - Updates a Graph in the Store asychronously + Selects the appropriate File Extension for the given RDF Writer - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + RDF Writer + - + - Deletes a Graph from the Store + Selects the appropriate File Extension for the given Store Writer - URI of the Graph to delete - Callback - State to pass to the callback + Store Writer + - + - Updates the store asynchronously + Delegate Type for the Events of the Namespace Mapper - SPARQL Update - Callback - State to pass to the callback + Namespace Prefix + Namespace Uri - + - Queries the store asynchronously + Class for representing Mappings between Prefixes and Namespace URIs - SPARQL Query - Callback - State to pass to the callback - + - Queries the store asynchronously + Constant Uri for the RDF Namespace - SPARQL Query - RDF Handler - Results Handler - Callback - State to pass to the callback - + - Disposes of a 4store connection + Constant Uri for the RDF Scheme Namespace - + - Gets a String which gives details of the Connection + Constant Uri for the XML Scheme Namespace - - + - Serializes the connection's configuration + Constant Uri for the OWL Namespace - - + - Structure for representing Triples that are waiting to be Batch written to the Database + Mapping of Prefixes to URIs - + - Creates a new Batch Triple + Mapping of URIs to Prefixes - Triple - Graph ID to store Triple for - + - Triple + Constructs a new Namespace Map + The Prefixes rdf, rdfs and xsd are automatically defined - + - Graph ID + Constructs a new Namespace Map which is optionally empty + Whether the Namespace Map should be empty, if set to false the Prefixes rdf, rdfs and xsd are automatically defined - + - Equality for Batch Triples + Constructs a new Namespace Map which is based on an existing map - Object to test - + - + - Hash Code for Batch Triples + Returns the Prefix associated with the given Namespace URI - + The Namespace URI to lookup the Prefix for + String prefix for the Namespace - + - Abstract Base Class for connecting to any Store that supports the Sesame 2.0 HTTP Communication protocol + Returns the Namespace URI associated with the given Prefix - - - See here for the protocol specification, this base class supports Version 5 of the protocol which does not include SPARQL Update support - - + The Prefix to lookup the Namespace URI for + URI for the Namespace - + - Base Uri for the Store + Adds a Namespace to the Namespace Map + Namespace Prefix + Namespace Uri - + - Store ID + Removes a Namespace from the NamespaceMapper + Namespace Prefix of the Namespace to remove - + - Repositories Prefix + Method which checks whether a given Namespace Prefix is defined + Prefix to test + - + - Query Path Prefix + Method which checks whether a given Namespace is defined + Namespace to test - + - Update Path Prefix + Clears the Namespace Map - + - Whether to do full encoding of contexts + Gets a Enumerator of all the Prefixes - + - Whether queries should always be posted + A Function which attempts to reduce a Uri to a QName + The Uri to attempt to reduce + The value to output the QName to if possible + + This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. - + - Server the store is hosted on + Imports the contents of another Namespace Map into this Namespace Map + Namespace Map to import + + Prefixes in the imported Map which are already defined in this Map are ignored, this may change in future releases. + - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Event which is raised when a Namespace is Added - Base Uri of the Store - Store ID - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Event which is raised when a Namespace is Modified - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Event which is raised when a Namespace is Removed - Base Uri of the Store - Store ID - Proxy Server - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Internal Helper for the NamespaceAdded Event which raises it only when a Handler is registered - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - Proxy Server + Namespace Prefix + Namespace Uri - + - Gets the Base URI to the repository + Internal Helper for the NamespaceModified Event which raises it only when a Handler is registered + Namespace Prefix + Namespace Uri - + - Gets the Repository Name that is in use + Internal Helper for the NamespaceRemoved Event which raises it only when a Handler is registered + Namespace Prefix + Namespace Uri - + - Gets the Save Behaviour of Stores that use the Sesame HTTP Protocol + Disposes of a Namespace Map + + + + + Class for representing Mappings from URIs to QNames + + + Used primarily in outputting RDF syntax + + + + + Mapping of URIs to QNames - + - Returns that Updates are supported on Sesame HTTP Protocol supporting Stores + Next available Temporary Namespace ID - + - Returns that deleting graphs from the Sesame store is supported + Creates a new QName Output Mapper using the given Namespace Map + Namespace Map - + - Returns that listing Graphs is supported + Creates a new QName Output Mapper which has an empty Namespace Map - + - Returns that the Connection is ready + A Function which attempts to reduce a Uri to a QName + The Uri to attempt to reduce + The value to output the QName to if possible + + This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. - + - Returns that the Connection is not read-only + A Function which attempts to reduce a Uri to a QName and issues a Temporary Namespace if required + The Uri to attempt to reduce + The value to output the QName to if possible + The Temporary Namespace issued (if any) + + + + This function will always returns a possible QName for the URI if the format of the URI permits it. It doesn't guarentee that the QName will be valid for the syntax it is being written to - it is up to implementers of writers to validate the QNames returned. + + + Where necessary a Temporary Namespace will be issued and the tempNamespace parameter will be set to the prefix of the new temporary namespace + + - + - Gets the parent server + Adds a QName mapping to the cache + URI + Mapping - + - Makes a SPARQL Query against the underlying Store + Gets the next available Temporary Namespace ID - SPARQL Query - + - Makes a SPARQL Query against the underlying Store processing the results with an appropriate handler from those provided + Thread Safe version of the QNameOutputMapper - RDF Handler - Results Handler - SPARQL Query - - + - Escapes a Query to avoid a character encoding issue when communicating a query to Sesame + Creates a new Thread Safe QName Output Mapper - Query - + Namespace Mapper - + - Gets the Content Type used to save data to the store i.e. the MIME type to use for the Content-Type header + Adds a QName Mapping to the Cache in a Thread Safe way - + Key + Value - + - Creates an RDF Writer to use for saving data to the store + Adds a Namespace to the QName Output Mapper - + Prefix + Namespace URI - + - Loads a Graph from the Store + Represents a mapping from a URI to a QName - Graph to load into - Uri of the Graph to load - If a Null Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Loads a Graph from the Store + Creates a new QName Mapping - RDF Handler - Uri of the Graph to load - If a Null Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded + URI - + - Loads a Graph from the Store + URI this is a mapping for - Graph to load into - Uri of the Graph to load - If a Null/Empty Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Loads a Graph from the Store + QName this URI maps to - RDF Handler - Uri of the Graph to load - If a Null/Empty Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Saves a Graph into the Store (Warning: Completely replaces any existing Graph with the same URI unless there is no URI - see remarks for details) + Gets the String representation of the URI - Graph to save - - If the Graph has no URI then the contents will be appended to the Store, if the Graph has a URI then existing data associated with that URI will be replaced - + - + - Updates a Graph + Checks whether this is equal to another Object - Uri of the Graph to update - Triples to be added - Triples to be removed + Object to test against + - + - Updates a Graph + A Namespace Mapper which has an explicit notion of Nesting - Uri of the Graph to update - Triples to be added - Triples to be removed - + - Deletes a Graph from the Sesame store + Constructs a new Namespace Map - URI of the Graph to delete + The Prefixes rdf, rdfs and xsd are automatically defined - + - Deletes a Graph from the Sesame store + Constructs a new Namespace Map which is optionally empty - URI of the Graph to delete + Whether the Namespace Map should be empty, if set to false the Prefixes rdf, rdfs and xsd are automatically defined - + - Gets the list of Graphs in the Sesame store + Adds a Namespace at the Current Nesting Level - + Prefix + Namespace URI - + - Gets the parent server + Clears the Namespace Map - + - Saves a Graph to the Store asynchronously + Gets the Namespace URI for the given Prefix at the current Nesting Level - Graph to save - Callback - State to pass to the callback + Prefix + - + - Loads a Graph from the Store asynchronously + Gets the Namespace Prefix for the given URI at the current Nesting Level - Handler to load with - URI of the Graph to load - Callback - State to pass to the callback + Namespace URI + - + - Updates a Graph in the Store asychronously + Gets the Nesting Level at which the given Namespace is definition is defined - URI of the Graph to update - Triples to be added - Triples to be removed - Callback - State to pass to the callback + Prefix + - + - Deletes a Graph from the Store + Gets whether the given Namespace exists - URI of the Graph to delete - Callback - State to pass to the callback + Prefix + - + - Makes a SPARQL Query against the underlying store + Imports another Namespace Map into this one - SPARQL Query - Callback - State to pass to the callback - SparqlResultSet or a Graph depending on the Sparql Query + Namespace Map - + - Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice + Increments the Nesting Level - RDF Handler - SPARQL Results Handler - SPARQL Query - Callbakc - State to pass to the callback - + - Helper method for creating HTTP Requests to the Store + Decrements the Nesting Level - Path to the Service requested - Acceptable Content Types - HTTP Method - Querystring Parameters - + + When the Nesting Level is decremented any Namespaces defined at a greater Nesting Level are now out of scope and so are removed from the Mapper + - + - Disposes of the Connector + Gets the current Nesting Level - + - Gets a String which gives details of the Connection + Event which occurs when a Namespace is added - - + - Serializes the connection's configuration + Event which occurs when a Namespace is modified - Configuration Serialization Context - + - Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication protocol + Event which occurs when a Namespace is removed - - Acts as a synonym for whatever the latest version of the Sesame HTTP Protocol that is supported by dotNetRDF might be. Currently this is Version 6 which includes SPARQL Update support (Sesame 2.4+ required) - - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Internal Helper for the NamespaceAdded Event which raises it only when a Handler is registered - Base Uri of the Store - Store ID + Namespace Prefix + Namespace Uri - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Internal Helper for the NamespaceModified Event which raises it only when a Handler is registered - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication + Namespace Prefix + Namespace Uri - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Internal Helper for the NamespaceRemoved Event which raises it only when a Handler is registered - Base Uri of the Store - Store ID - Proxy Server + Namespace Prefix + Namespace Uri - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Gets the Namespace Prefixes - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - Proxy Server - + - Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication Protocol version 5 (i.e. no SPARQL Update support) + Tries to reduce a URI to a QName using this Namespace Map + URI + Resulting QName + - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Removes a Namespace provided that Namespace is defined on the current Nesting Level - Base Uri of the Store - Store ID + Prefix - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Disposes of the Namespace Map - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Class used to hold Nested Namespace definition information - Base Uri of the Store - Store ID - Proxy Server - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Creates a new Nested Mapping - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - Proxy Server + Prefix + Namespace URI + Nesting Level - + - Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication Protocol version 6 (i.e. includes SPARQL Update support) + Creates a new Nested Mapping + Prefix + Namespace URI - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Gets the Nesting Level - Base Uri of the Store - Store ID - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Gets the Namespace Prefix - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + Gets the Namespace URI - Base Uri of the Store - Store ID - Proxy Server - + - Creates a new connection to a Sesame HTTP Protocol supporting Store + A default implementation of a Node Factory which generates Nodes unrelated to Graphs (wherever possible we suggest using a Graph based implementation instead) - Base Uri of the Store - Store ID - Username to use for requests that require authentication - Password to use for requests that require authentication - Proxy Server - + - Makes a SPARQL Update request to the Sesame server + Creates a new Node Factory - SPARQL Update - + - Makes a SPARQL Update request to the Sesame server + Creates a Blank Node with a new automatically generated ID - SPARQL Update - Callback - State to pass to the callback + - + - Class of exceptions that may occur when performing SPARQL Updates + Creates a Blank Node with the given Node ID + Node ID + - + - Creates a new RDF Update Exception + Creates a Graph Literal Node which represents the empty Subgraph - Error Message + - + - Createa a new RDF Update Exception + Creates a Graph Literal Node which represents the given Subgraph - Error Message - Exception that caused this exception to be thrown + Subgraph + - + - Class for representing Timeout errors that occur while updating RDF using SPARQL + Creates a Literal Node with the given Value and Data Type + Value of the Literal + Data Type URI of the Literal + - + - Creates a new SPARQL Update Timeout Exception + Creates a Literal Node with the given Value - Error Message + Value of the Literal + - + - Class for representing Permissions errors with SPARQL Updates + Creates a Literal Node with the given Value and Language + Value of the Literal + Language Specifier for the Literal + - + - Creates a new Permission Exception + Creates a URI Node for the given URI - Error Message + URI + - + - Creates a new Permission Exception + Creates a Variable Node for the given Variable Name - Error Message - Exception that caused this exception to be thrown + + - + - Class for representing malformed SPARQL Updates + Creates a new unused Blank Node ID and returns it - - This is distinct from a RdfParseException as it is possible for an update to be syntactically valid but semantically malformed - + - + - Creates a new Malformed Update Exception + A Graph Factory provides access to consistent Graph References so that Nodes and Triples can be instantiated in the correct Graphs - Error Message + + + Primarily designed for internal use in some of our code but may prove useful to other users hence is a public class. Internally this is just a wrapper around a TripleStore instance. + + + The main usage for this class is scenarios where consistent graph references matter such as returning node references from out of memory datasets (like SQL backed ones) particularly with regards to blank nodes since blank node equality is predicated upon Graph reference. + + - + - Creates a new Malformed Update Exception + Gets a Graph Reference for the given Graph URI - Error Message - Exception that caused this exception to be thrown + Graph URI + - + - Class of exceptions that may occur when using the SPARQL Graph Store HTTP Protocol for Graph Management + Gets a Graph Reference for the given Graph URI + Graph URI + + + Synonym for the index access method i.e. factory[graphUri] + - + - Creates a new SPARQL Graph Store HTTP Protocol Exception + Gets a Graph Reference for the given Graph URI and indicates whether this was a new Graph reference - Error Message + Graph URI + Indicates whether the returned reference was newly created + - + - Creates a new SPARQL Graph Store HTTP Protocol Exception + Resets the Factory so any Graphs with contents are emptied - Error Message - Exception that caused this Exception - + - Exception that occurs when a Protocol Processor cannot resolve the URI for the Graph to be acted upon + A private implementation of a Node Factory which returns mock constants regardless of the inputs + + + Intended for usage in scenarios where the user of the factory does not care about the values returned, for example it is used internally in the CountHandler to speed up processing + + - + - Creates a new Protocol URI Resolution Exception + Possible Literal Equality Mode Settings - + - Creates a new Protocol URI Resolution Exception + Strict Mode compares Literals according to the official W3C RDF Specification - Error Message + + This means Literals are equal if and only if: +
    +
  1. The Lexical Values are identical when compared using Ordinal Comparison
  2. +
  3. The Language Tags if present are identical
  4. +
  5. The Datatypes if present are identical
  6. +
+
- + - Exception that occurs when a Protocol Processor is provided with a invalid URI for the Graph to be acted upon + Loose Mode compares Literals based on values (if they have known Datatypes) + + This means Literals can be equal if they have lexically different values which are equivalent when converted to the Datatype. +

+ Literals without Datatypes and those whose Datatypes are unknown or not handled by the Library will be compared using lexical equivalence as with Strict mode. +
- + - Creates a new Protocol Invalid URI Exception + Configures Global Static Options for the Library + + Some of these are Debug Build only, please see the Remarks on individual members for more detail + - + - - Namespaces containing classes which implement the SPARQL Graph Store HTTP Protocol for RDF Graph Management - + Gets/Sets the Mode used to compute Literal Equality (Default is Strict which enforces the W3C RDF Specification) - + - Abstract Base class for SPARQL Graph Store HTTP Protocol for Graph Management implementations + Gets/Sets whether Literal Values should be normalized - + - This is the Pattern that is used to check whether ?default is present in the querystring. This is needed since IIS does not recognise ?default as being a valid querystring key unless it ends in a = which the specification does not mandate so cannot be assumed + Gets/Sets the Hard Timeout limit for SPARQL Query Execution (in milliseconds) + + This is used to stop SPARQL queries running away and never completing execution, it defaults to 3 mins (180,000 milliseconds) + - + - Processes a GET operation + Gets/Sets whether Query Optimisation should be used - HTTP Context - + - Processes a POST operation + Gets/Sets whether Algebra Optimisation should be used - HTTP Context - + - Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph + Gets/Sets whether some Optimisations considered unsafe can be used - HTTP Context - This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + The notion of unsafe optimisations refers to optimisations that can make significant performance improvements to some types of queries but are disabled normally because they may lead to behaviour which does not strictly align with the SPARQL specification. + + + One example of such an optimisation is an implicit join where the optimiser cannot be sure that the variables involved don't represent literals. - - - Processes a PUT operation - - HTTP Context - - - - Processes a DELETE operation - - HTTP Context - - + - Processes a HEAD operation + Gets/Sets the default syntax used for parsing SPARQL queries - HTTP Context + + The default is SPARQL 1.1 unless you use this property to change it + - + - Processes a PATCH operation + Gets/Sets whether functions that can't be parsed into Expressions should be represented by the UnknownFunction - HTTP Context + When set to false a Parser Error will be thrown if the Function cannot be parsed into an Expression - + - Gets the Graph URI that the request should affect + Gets/Sets whether to use rigorous query evaluation - HTTP Context - + + + Rigorous Query evaluation applies more checks to the triples produced by datasets to ensure they actually match the patterns being scanned. If the underlying index structures are able to guarantee this then rigorous evaluation may be turned off for faster evaluation which it is by default since our default and implementations will guarantee this. + + - + - Gets the Graph URI that the request should affect + Gets/Sets whether to use strict operators - HTTP Context - Graph parsed from the request body - - The Graph parameter may be null in which case the other overload of this method will be invoked + + Strict Operators refers to the interpretation of certian operators like + and - in SPARQL expression evaluation. If enabled then the operators will function only as specified in the SPARQL specification, if disabled (which is the default) then certain extensions (which the SPARQL specification allows an implementation to provide) will be allowed e.g. date time arithmetic. + + + The only time you may want to disable this is if you are developing queries locally which you want to ensure are portable to other systems or when running the SPARQL compliance tests. + - + - Generates a new Graph URI that should be used to create a new Graph in the Store in conjunction with the ProcessPostCreate() operation + Gets/Sets whether the query engine will try to use PLinq where applicable to evaluate suitable SPARQL constructs in parallel - HTTP Context - Graph parsed from the request body - - Default behaviour is to mint a URI based on a hash of the Request IP and Date Time. Implementations can override this method to control URI creation as they desire + For the 0.6.1 release onwards this was an experimental feature and disabled by default, from 0.7.0 onwards this is enabled by default - + - Gets the Graph which can be parsed from the request body + Gets/Sets the Hard Timeout limit for SPARQL Update Execution (in milliseconds) - HTTP Context - - In the event that there is no request body a null will be returned + This is used to stop SPARQL Updates running away and never completing execution, it defaults to 3 mins (180,000 milliseconds) - + - Sends the given Graph to the Client via the HTTP Response + Gets/Sets the Default Compression Level used for Writers returned by the MimeTypesHelper class when the writers implement ICompressingWriter - HTTP Context - Graph to send - + - Retrieves the Graph with the given URI + Controls whether the indexed triple collections will create full indexes for the Triples inserted into it - Graph URI - - Helper method intended for use by the ProcessGet() and ProcessHead() methods + By default indexes triple collections creates indexes on Triples based upon Subjects, Predicates and Objects alone. When full indexing is enabled it also creates indexes based on Subject-Predicate, Predicate-Object and Subject-Object pairs which may improve query speed but will use additional memory. + + + Default setting for Full Indexing is enabled, enabling/disabling it only has an effect on indexed triple collection instances instantiated after full indexing was enabled/disabled i.e. existing Graphs in memory using the indexed triple collections continue to use the full indexing setting that was present when they were instantiated. - + - Determines whether a Graph with the given URI exists + Gets/Sets whether the UriLoader uses caching - Graph URI - - + - A processor for the SPARQL Graph Store HTTP Protocol which operates by performing the desired operations on some arbitrary underlying Store for which an IStorageProvider is available + Gets/Sets the Timeout for URI Loader requests (Defaults to 15 seconds) - + - Creates a new Generic Protocol Processor + Gets/Sets whether a UTF-8 BOM is used for UTF-8 Streams created by dotNetRDF (this does not affect Streams passed directly to methods as open streams cannot have their encoding changed) - Generic IO Manager - + - Processes a GET operation + Gets/Sets whether IRIs are validated by parsers which support this functionality - HTTP Context - Implemented by making a call to LoadGraph() on the underlying IStorageProvider + When enabled certain parsers will validate all IRIs they see to ensure that they are valid and throw a parser error if they are not. Since there is a performance penalty associated with this and many older RDF standards were written pre-IRIs (thus enforcing IRI validity would reject data considered valid by those specifications) this feature is disabled by default. - + - Processes a POST operation + Gets/Sets whether Blocking IO should be forced - HTTP Context - - Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. - - - Otherwise this is implemented using UpdateGraph() if updates are supported, if not then the Graph has to be loaded, the POSTed data merged into it and then the Graph is saved again. - + Blocking IO refers to how the parsing sub-system reads in inputs, it will use Blocking/Non-Blocking IO depending on the input source. In most cases the detection of which to use should never cause an issue but theoretically in some rare cases using non-blocking IO may lead to incorrect parsing errors being thrown (premature end of input detected), if you suspect this is the case try enabling this setting. If you still experience this problem with this setting enabled then there is some other issue with your input. - + - Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph + Gets/Sets whether Basic HTTP authentication should be forced - HTTP Context - This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + There have been reported problems where some servers don't cope nicely with the HTTP authentication challenge response procedure resulting in failed HTTP requests. If the server only uses Basic HTTP authentication then you can opt to force dotNetRDF to always include the HTTP basic authentication header in requests and thus workaround this problem. + + + Warning: Under Silverlight this will only work correctly if usernames and passwords are composed only of characters within the ASCII range. - + - Processes a PUT operation + Gets/Sets whether a DTD should be used for some XML formats to compress output - HTTP Context - - - Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. - - - Implemented by calling SaveGraph() on the underlying manager - - - + - Processes a DELETE operation + Gets/Sets whether multi-theaded writing is permitted - HTTP Context - - Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. - - - The delete operation does not explicitly remove the Graph but simply replaces it with an empty Graph - + In some contexts multi-threaded writing may not even work due to restrictions on thread types since we use the System.Threading.WaitAll method which is only valid in MTA contexts. - + - Processes a HEAD operation + Gets/Sets whether the library will attempt to intern URIs to reduce memory usage - HTTP Context - + - Processes a PATCH operation + Gets/Sets the default token queue mode used for tokeniser based parsers - HTTP Context - + - Retrieves the Graph with the given URI + Gets/Sets whether HTTP Request and Response Information should be output to the Console Standard Out for Debugging purposes - Graph URI - - + - Determines whether a Graph with the given URI exists + Gets/Sets whether the HTTP Response Stream should be output to the Console Standard Output for Debugging purposes - Graph URI - - + - A processor for the SPARQL Graph Store HTTP Protocol which operates by using the libraries in-memory Leviathan SPARQL engine and converting protocol actions to SPARQL Query/Update commands as appropriate + Gets/Sets the default culture literal comparison when literals are string or not implicitely comparable (different types, parse/cast error...) + + The default is set to the invariant culture to preserve behavioural backwards compatibility with past versions of dotNetRDF + - + - Creates a new Leviathan Protocol Processor + Gets/Sets the default collation for literal comparison when literals are string or not implicitely comparable (different types, parse/cast error...) - Triple Store + + The default is set to to preserve behavioural backwards compatibility with past versions of dotNetRDF + - + - Creates a new Leviathan Protocol Processor + Represents a Triple that is queued for persistence (either insertion/deletion) - SPARQL Dataset - + - A processor for the SPARQL Graph Store HTTP Protocol which operates by translating the requests into SPARQL Query/Update commands as specified by the SPARQL Graph Store HTTP Protocol specification and passing the generated commands to a ISparqlUpdateProcessor which will handle the actual application of the updates + Creates a new Triple Persistence Action (an insertion/deletion) - - The conversion from HTTP operation to SPARQL Query/Update is as defined in the SPARQL 1.1 Graph Store HTTP Protocol specification - + Triple to persist + Whether the Triple is to be deleted - + - Creates a new Protocol to Update Processor + Creates a new Triple Persistence Action (an insertion) - Query Processor - Update Processor + Triple to persist - + - Processes a GET operation + Gets the Triple to persist - HTTP Context - + - Processes a POST operation + Gets whether the action is a Delete Action - HTTP Context - + - Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph + Possible Types of Graph Persistence Actions - HTTP Context - - - This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. - - - + - Processes a PUT operation + Graph was Added - HTTP Context - + - Processes a DELETE operation + Graph was Deleted - HTTP Context - + - Processes a HEAD operation + Graph was Modified - HTTP Context - + - Processes a PATCH operation + Represents a Graph that is queued for persistence (added/modified/removed) - HTTP Context - + - Retrieves the Graph with the given URI + Creates a new Graph Persistence action - Graph URI - + Graph + Action Type - + - Determines whether a Graph with the given URI exists + Creates a new Graph Persistence action - Graph URI - + Graph + Action Type - + - Interface for SPARQL Graph Store HTTP Protocol for Graph Management processors + Gets the Graph to be persisted - + - Processes a GET operation which should retrieve a Graph from the Store and return it + Gets the Action Type - HTTP Context - + - Processes a POST operation which should add triples to a Graph in the Store + Represents an action on a Triple Store that is queued for persistence - HTTP Context - + - Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph + Creates a new persistence action that pertains to a Graph - HTTP Context - - - This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. - - + Graph Action - + - Processes a PUT operation which should save a Graph to the Store completely replacing any existing Graph with the same URI + Creates a new persistence action that pertains to a Triple - HTTP Context + Triple Action - + - Processes a DELETE operation which delete a Graph from the Store + Gets whether this action pertains to a Graph - HTTP Context - + - Processes a HEAD operation which gets information about a Graph in the Store + Gets whether this action peratins to a Triple - HTTP Context - + - Processes a PATCH operation which may choose + Gets the Graph Action (if any) - - + + + Gets the Triple Action (if any) + + + + Represents an in-memory view of a triple store provided by an IStorageProvider instance where changes to the in-memory view get reflected in the persisted view. + + +

Persistence Behaviour

- Namespace for performing updates on Triple Stores using SPARQL Update + Note: This is a transactional implementation - this means that changes made are not persisted until you either call Flush() or you dispose of the instance. Alternatively you may invoke the Discard() method to throw away changes made to the in-memory state. - This is a new part of the API introduced in the 0.3.0 release and adds support for using SPARQL to update Triple Stores. SPARQL Update is part of the new SPARQL 1.1 standard and provides syntax for inserting, modifying and deleting data as well as managing graphs in a store. + The actual level of persistence provided will vary according to the IStorageProvider instance you use. For example if the DeleteGraph() method is not supported then Graph removals won't persist in the underlying store. Similarily an instance which is read-only will allow you to pull out existing graphs from the store but won't persist any changes. - -
- - - Namespace containing classes which model SPARQL Update Commands. These can be used both to represent SPARQL Updates and to execute them over in-memory stores. + The Contains() method of the underlying BaseGraphCollection has been overridden so that invoking Contains causes the Graph from the underlying store to be loaded if it exists, this means that operations like HasGraph() may be slower than expected or cause applications to stop while they wait to load data from the store. - - - - - Represents the SPARQL Update ADD Command - +

SPARQL Query Behaviour

+ + The exact SPARQL Query behaviour will depend on the capabilities of the underlying IStorageProvider instance. If it also implements the IQueryableStorage interface then its own SPARQL implementation will be used, note that if you try and make a SPARQL query but the in-memory view has not been synced (via a Flush() or Discard() call) prior to the query then an RdfQueryException will be thrown. If you want to make the query regardless you can do so by invoking the query method on the underlying store directly by accessing it via the UnderlyingStore property. + + + If the underlying store does not support SPARQL itself then SPARQL queries cannot be applied and a NotSupportedException will be thrown. + +

SPARQL Update Behaviour

+ + Similarly to SPARQL Query support the SPARQL Update behaviour depends on whether the underlying IStorageProvider instance also implements the IUpdateableStorage interface. If it does then its own SPARQL implementation is used, otherwise a GenericUpdateProcessor will be used to approximate the SPARQL Update. + + + Please be aware that as with SPARQL Query if the in-memory view is not synced with the underlying store a SparqlUpdateException will be thrown. + +

Other Notes

+ + It is possible for the in-memory view of the triple store to get out of sync with the underlying store if that store is being modified by other processes or other code not utilising the PersistentTripleStore instance that you have created. Currently there is no means to resync the in-memory view with the underlying view so you should be careful of using this class in scenarios where your underlying store may be modified. + +
- + - Creates a Command which merges the data from the Source Graph into the Destination Graph + Creates a new in-memory view of some underlying store represented by the IStorageProvider instance - Source Graph URI - Destination Graph URI - Whether errors should be suppressed + IO Manager + + Please see the remarks for this class for notes on exact behaviour of this class + - + - Creates a Command which merges the data from the Source Graph into the Destination Graph + Finalizer which ensures that the instance is properly disposed of thereby persisting any outstanding changes to the underlying store - Source Graph URI - Destination Graph URI + + If you do not wish to persist your changes you must call Discard() prior to disposing of this instance or allowing it to go out of scope such that the finalizer gets called + - + - Evaluates the Command in the given Context + Gets the underlying store - Evaluation Context - + - Processes the Command using the given Update Processor + Disposes of the Triple Store flushing any outstanding changes to the underlying store - SPARQL Update Processor + + If you do not want to persist changes you have please ensure you call Discard() prior to disposing of the instance + - + - Abstract Base class for classes that represent SPARQL Update INSERT, DELETE and INSERT/DELETE commands + Flushes any outstanding changes to the underlying store - + - URI from the WITH statement + Discards any outstanding changes returning the in-memory view of the store to the state it was in after the last Flush/Discard operation - + - URIs for the USING clauses + Executes a SPARQL Query on the Triple Store + Sparql Query as unparsed String + - + - URIS for the USING NAMED clauses + Executes a SPARQL Query on the Triple Store processing the results using an appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query as unparsed String - + - Creates a new Base Modification Command + Executes an Update against the Triple Store - Update Command Type + SPARQL Update Command(s) + + As per the SPARQL 1.1 Update specification the command string may be a sequence of commands + - + - Gets the URIs specified in USING clauses + Executes a single Update Command against the Triple Store + SPARQL Update Command - + - Gets the URIs specified in USING NAMED clauses + Executes a set of Update Commands against the Triple Store + SPARQL Update Command Set - + - Gets the URI of the Graph specified in the WITH clause + Internal implementation of a Graph Collection for use by the PersistentTripleStore - + - Adds a new USING URI + Class for representing Graphs which can be directly queried using SPARQL - URI - + - Adds a new USING NAMED URI + Creates a new Queryable Graph - URI - + - Determines whether a Graph Pattern is valid for use in an DELETE pattern + Executes a SPARQL Query on the Graph - Graph Pattern - Is this the top level pattern? + SPARQL Query - - - Abstract Base Class for SPARQL Update Commands which move data between Graphs - - - + - Source Graph URI + Executes a SPARQL Query on the Graph handling the results with the given handlers + RDF Handler + SPARQL Results Handler + SPARQL Query - + - Destination Graph URI + Executes a SPARQL Query on the Graph + SPARQL Query + - + - Whether errors should be suppressed + Executes a SPARQL Query on the Graph handling the results with the given handlers + RDF Handler + SPARQL Results Handler + SPARQL Query - + - Creates a new Transfer Command + Implements a Sub-Graph Isomorphism Algorithm - Command Type - Source Graph URI - Destination Graph URI - Whether errors should be suppressed - + - Creates a new Transfer Command + Checks to see whether a given Graph is a sub-graph of the other Graph - Command Type - Source Graph URI - Destination Graph URI + Sub-Graph + Graph + - + - URI of the Source Graph + Uses a series of Rules to attempt to generate a mapping without the need for brute force guessing + 1st Graph + 2nd Graph + 1st Graph Node classification + 2nd Graph Node classification + 1st Graph Degree classification + 2nd Graph Degree classification + - + - URI of the Destination Graph + Generates and Tests all possibilities in a brute force manner + 1st Graph + 2nd Graph + 1st Graph Node classification + 2nd Graph Node classification + Dependencies in the 1st Graph + Dependencies in the 2nd Graph + - + - Whether errors during evaluation should be suppressed + Helper method for brute forcing the possible mappings + Possible Mappings + Dependencies in the 1st Graph + Dependencies in the 2nd Graph + Target Graph (2nd Graph) + - + - Gets whether the Command affects a Single Graph + Gets the Blank Node mapping if one could be found - + - Gets whether the Command affects a given Graph + An indexed triple collection that uses our and implementations under the hood for the index structures - Graph URI - + + + A variation on which structures the indexes slightly differently, this may give differing performance and reduced memory usage in some scenarios. + + - + - Gets the String representation of the Command + Indexes a Triple - + Triple - + - Mode by which to clear Graphs + Helper for indexing triples + Node to index by + Triple + Index to insert into + Comparer for the Index + Hash Function for the Index - + - Clears a specific Graph of Triples + Unindexes a triple + Triple - + - Clears all Named Graphs of Triples + Helper for unindexing triples + Node to index by + Triple + Index to remove from - + - Clears the Default Graph of Triples + Adds a Triple to the collection + Triple + - + - Clears all Graphs of Triples + Checks whether the collection contains a given Triple + Triple + - + - Represents the SPARQL Update CLEAR command + Gets the count of triples in the collection - + - Creates a Command which clears the given Graph or Graphs depending on the Clear Mode specified + Deletes a triple from the collection - Graph URI - Clear Mode - Whether errors should be suppressed + Triple + - + - Creates a Command which clears the given Graph + Gets the specific instance of a Triple in the collection - URI of the Graph to clear + Triple + - + - Creates a Command which clears the Default Graph (if any) + Gets all the triples with a given object + Object + - + - Creates a Command which performs the specified type of clear + Gets all the triples with a given predicate - Clear Mode - Whether errors should be suppressed + Predicate + - + - Creates a Command which performs the specified type of clear + Gets all the triples with a given subject - Clear Mode + Subject + - + - Gets whether this Command affects a Single Graph + Gets all the triples with a given predicate and object + Predicate + Object + - + - Gets whether this Command affects the given Graph + Gets all the triples with a given subject and object - Graph URI + Subject + Object - + - Gets the URI of the Graph to be cleared (or null if the default graph should be cleared) + Gets all the triples with a given subject and predicate + Subject + Predicate + - + - Gets whether errors should be suppressed + Gets the Object Nodes - + - Gets the Mode by which Graphs are to be cleared + Gets the Predicate Nodes - + - Evaluates the Command in the given Context + Gets the Subject Nodes - Evaluation Context - + - Processes the Command using the given Update Processor + Disposes of the collection - SPARQL Update Processor - + - Gets the String representation of the Command + Gets the enumerator for the collection - + - Represents the SPARQL Update COPY Command + A Thread Safe version of the Graph class + Should be safe for almost any concurrent read and write access scenario, internally managed using a ReaderWriterLockSlim. If you encounter any sort of Threading/Concurrency issue please report to the dotNetRDF Bugs Mailing List + Performance will be marginally worse than a normal Graph but in multi-threaded scenarios this will likely be offset by the benefits of multi-threading. - + - Creates a Command which Copies the contents of one Graph to another overwriting the destination Graph + Locking Manager for the Graph - Source Graph URI - Destination Graph URI - Whether errors should be suppressed - + - Creates a Command which Copies the contents of one Graph to another overwriting the destination Graph + Creates a new Thread Safe Graph - Source Graph URI - Destination Graph URI - + - Evaluates the Command in the given Context + Creates a new Thread Safe graph using the given Triple Collection - Evaluation Context + Triple Collection - + - Processes the Command using the given Update Processor + Creates a new Thread Safe graph using a Thread Safe triple collection - SPARQL Update Processor + Thread Safe triple collection - + - Represents the SPARQL Update CREATE command + Asserts a Triple in the Graph + The Triple to add to the Graph - + - Creates a new CREATE command + Asserts a List of Triples in the graph - URI of the Graph to create - Whether the create should be done silenty + List of Triples in the form of an IEnumerable - + - Creates a new CREATE command + Retracts a Triple from the Graph - URI of the Graph to create + Triple to Retract + Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted - + - Gets whether the Command affects a Single Graph + Retracts a enumeration of Triples from the graph + Enumeration of Triples to retract - + - Gets whether the Command affects a given Graph + Creates a new Blank Node ID and returns it - Graph URI - + - Gets the URI of the Graph to be created + Disposes of a Graph - + - Gets whether the Create should be done silently + Returns the Blank Node with the given Identifier + The Identifier of the Blank Node to select + Either the Blank Node or null if no Node with the given Identifier exists - + - Evaluates the Command in the given Context + Returns the LiteralNode with the given Value if it exists - Update Evaluation Context + The literal value of the Node to select + Either the LiteralNode Or null if no Node with the given Value exists + The LiteralNode in the Graph must have no Language or DataType set - + - Processes the Command using the given Update Processor + Returns the LiteralNode with the given Value in the given Language if it exists - SPARQL Update Processor + The literal value of the Node to select + The Language Specifier for the Node to select + Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists - + - Gets the String representation of the Command + Returns the LiteralNode with the given Value and given Data Type if it exists - + The literal value of the Node to select + The Uri for the Data Type of the Literal to select + Either the LiteralNode Or null if no Node with the given Value and Data Type exists - + - Represents the SPARQL Update DELETE command + Returns the UriNode with the given QName if it exists + The QName of the Node to select + - + - Creates a new DELETE command + Returns the UriNode with the given Uri if it exists - Pattern to construct Triples to delete - Pattern to select data which is then used in evaluating the deletions pattern - URI of the affected Graph + The Uri of the Node to select + Either the UriNode Or null if no Node with the given Uri exists - + - Creates a new DELETE command which operates on the Default Graph + Gets all the Triples involving the given Node - Pattern to construct Triples to delete - Pattern to select data which is then used in evaluating the deletions pattern + The Node to find Triples involving + Zero/More Triples - + - Creates a new DELETE command + Gets all the Triples involving the given Uri - Pattern to construct Triples to delete - URI of the affected Graph + The Uri to find Triples involving + Zero/More Triples - + - Createa a new DELETE command which operates on the Default Graph + Gets all the Triples with the given Node as the Object - Pattern to construct Triples to delete + The Node to find Triples with it as the Object + - + - Gets whether the Command affects a single Graph + Gets all the Triples with the given Uri as the Object + The Uri to find Triples with it as the Object + Zero/More Triples - + - Gets whether the Command affects a given Graph + Gets all the Triples with the given Node as the Predicate - Graph URI + The Node to find Triples with it as the Predicate - + - Gets the URI of the Graph the deletions are made from + Gets all the Triples with the given Uri as the Predicate + The Uri to find Triples with it as the Predicate + Zero/More Triples - + - Gets the pattern used for Deletions + Gets all the Triples with the given Node as the Subject + The Node to find Triples with it as the Subject + Zero/More Triples - + - Gets the pattern used for the WHERE clause + Gets all the Triples with the given Uri as the Subject + The Uri to find Triples with it as the Subject + Zero/More Triples - + - Optimises the Commands WHERE pattern + A Thread Safe version of the Graph class + Should be safe for almost any concurrent read and write access scenario, internally managed using a ReaderWriterLockSlim. If you encounter any sort of Threading/Concurrency issue please report to the dotNetRDF Bugs Mailing List + + + Performance will be marginally worse than a normal Graph but in multi-threaded scenarios this will likely be offset by the benefits of multi-threading. + + + Since this is a non-indexed version load performance will be better but query performance better + + - + - Evaluates the Command in the given Context + Creates a new non-indexed Thread Safe Graph - Evaluation Context - + - Processes the Command using the given Update Processor + Tools class which contains a number of utility methods which are declared as static methods - SPARQL Update Processor - + - Gets the String representation of the Command + Checks whether a Uri is valid as a Base Uri for resolving Relative URIs against - + Base Uri to test + True if the Base Uri can be used to resolve Relative URIs against + A Base Uri is valid if it is an absolute Uri and not using the mailto: scheme - + - Represents a SPARQL Update DELETE DATA command + Checks whether a URI Reference appears malformed and if so fixes it + URI Reference + - + - Creates a new DELETE DATA command + Returns a URI with any Fragment ID removed from it - Pattern composed of concrete Triples to delete + URI + - + - Determines whether a Graph Pattern is valid for use in an DELETE DATA command + Generic Helper Function which Resolves Uri References against a Base Uri - Graph Pattern - Is this the top level pattern? - + Uri Reference to resolve + Base Uri to resolve against + Resolved Uri as a String + RDF Parse Exception if the Uri cannot be resolved for a know reason + Uri Format Exception if one/both of the URIs is malformed - + - Gets the Data Pattern containing Triples to delete + Generic Helper Function which Resolves Uri References against a Base Uri + Uri Reference to resolve + Base Uri to resolve against + Resolved Uri as a String + Uri Format Exception if one/both of the URIs is malformed - + - Gets whether the Command affects a single Graph + Resolves a QName into a Uri using the Namespace Mapper and Base Uri provided + QName to resolve + Namespace Map to resolve against + Base Uri to resolve against + - + - Gets whether the Command affects a given Graph + Resolves a QName into a Uri using the Namespace Mapper and Base Uri provided - Graph URI + QName to resolve + Namespace Map to resolve against + Base Uri to resolve against + Whether when the default prefix is used but not defined it can fallback to Base URI - + - Evaluates the Command in the given Context + Resolves a QName/Uri into a Uri using the Namespace Mapper and Base Uri provided - Evaluation Context + QName/Uri to resolve + Namespace Map to resolve against + Base Uri to resolve against + - + - Processes the Command using the given Update Processor + Copies a Node so it can be used in another Graph since by default Triples cannot contain Nodes from more than one Graph - SPARQL Update Processor + Node to Copy + Graph to Copy into + Indicates whether the Copy should preserve the Graph Uri of the Node being copied + - + - Gets the String representation of the Command + Copies a Node so it can be used in another Graph since by default Triples cannot contain Nodes from more than one Graph + Node to Copy + Graph to Copy into + + + Warning: Copying Blank Nodes may lead to unforseen circumstances since no remapping of IDs between Graphs is done + + - + - Represents a SPARQL Update DROP command + Copies a Node using another Node Factory + Node to copy + Factory to copy into + + + + Warning: Copying Blank Nodes may lead to unforseen circumstances since no remapping of IDs between Factories is done + + - + - Creates a new DROP command + Copies a Triple from one Graph to another - URI ofthe Graph to DROP - DROP Mode to use - Whether the DROP should be done silently + Triple to copy + Graph to copy to + - + - Creates a new DROP command + Copies a Triple from one Graph to another - URI of the Graph to DROP - DROP Mode to use + Triple to copy + Graph to copy to + Indicates whether the Copy should preserve the Graph Uri of the Nodes being copied + - + - Creates a new DROP command + Does a quick and simple combination of the Hash Codes of two Objects - URI of the Graph to DROP + First Object + Second Object + - + - Creates a new DROP command which drops the Default Graph + Prints Debugging Output to the Console Standard Out for a HTTP Web Request + HTTP Web Request + Only available in Debug builds - + - Creates a new DROP command which performs a specific clear mode drop operation + Prints Debugging Output to the Console Standard Out for a HTTP Web Response - Clear Mode + HTTP Web Response + Only available in Debug builds - + - Creates a new DROP command which performs a specific clear mode drop operation + An indexed triple collection that uses our and implementations under the hood for the index structures - Clear Mode - Whether errors should be suppressed + + - + - Gets whether the Command affects a single Graph + Creates a new Tree Indexed triple collection - + - Gets whether the Command affects a given Graph + Creates a new Tree Indexed triple collection - Graph URI - + Mode to use for compound indexes - + - Gets the URI of the Graph to be dropped + Creates a new Tree Indexed triple collection with the given Indexing options + Whether to create a subject index + Whether to create a predicate index + Whether to create an object index + Whether to create a subject predicate index + Whether to create a subject object index + Whether to create a predicate object index + Mode to use for compound indexes - + - Gets whether the Drop should be done silently + Indexes a Triple + Triple - + - Gets the type of DROP operation to perform + Helper for indexing triples + Node to index by + Triple + Index to insert into - + - Evaluates the Command in the given Context + Helper for indexing triples - Evaluation Context + Triple to index by + Index to insert into - + - Processes the Command using the given Update Processor + Unindexes a triple - SPARQL Update Processor + Triple - + - Gets the String representation of the command + Helper for unindexing triples - + Node to index by + Triple + Index to remove from - + - Represents a SPARQL Update INSERT command + Helper for unindexing triples + Triple + Index to remove from - + - Creates a new INSERT command + Adds a Triple to the collection - Pattern to construct Triples to insert - Pattern to select data which is then used in evaluating the insertions - URI of the affected Graph + Triple + - + - Creates a new INSERT command which operates on the Default Graph + Checks whether the collection contains a given Triple - Pattern to construct Triples to insert - Pattern to select data which is then used in evaluating the insertions + Triple + - + - Gets whether the Command affects a single Graph + Gets the count of triples in the collection - + - Gets whether the Command affects a given Graph + Deletes a triple from the collection - Graph URI + Triple - + - Gets the URI of the Graph the insertions are made to + Gets the specific instance of a Triple in the collection + Triple + - + - Gets the pattern used for insertions + Gets all the triples with a given object + Object + - + - Gets the pattern used for the WHERE clause + Gets all the triples with a given predicate + Predicate + - + - Optimises the Commands WHERE pattern + Gets all the triples with a given subject + Subject + - + - Evaluates the Command in the given Context + Gets all the triples with a given predicate and object - Evaluation Context + Predicate + Object + - + - Processes the Command using the given Update Processor + Gets all the triples with a given subject and object - SPARQL Update Processor + Subject + Object + - + - Gets the String representation of the Command + Gets all the triples with a given subject and predicate + Subject + Predicate - + - Represents the SPARQL Update INSERT DATA command + Gets the Object Nodes - + - Creates a new INSERT DATA command + Gets the Predicate Nodes - Pattern containing concrete Triples to insert - + - Determines whether a Graph Pattern is valid for use in an INSERT DATA command + Gets the Subject Nodes - Graph Pattern - Is this the top level pattern? - - + - Gets the Data Pattern containing Triples to insert + Disposes of the collection - + - Gets whether the Command affects a single Graph + Gets the enumerator for the collection + - + - Gets whether the Command affects a given Graph + Class for representing RDF Triples in memory - Graph URI - - + - Evaluates the Command in the given Context + Constructs a Triple from Nodes that belong to the same Graph/Node Factory - Evaluation Context + Subject of the Triple + Predicate of the Triple + Object of the Triple + Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory + Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Processes the Command using the given Update Processor + Constructs a Triple from Nodes that belong to the same Graph/Node Factory and associates this Triple with the given Graph (doesn't assert the Triple) - SPARQL Update Processor + Subject + Predicate + Object + Graph + Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory + Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Gets the String representation of the Command + Constructs a Triple from Nodes that belong to the same Graph/Node Factory with some Context - + Subject of the Triple + Predicate of the Triple + Object of the Triple + Context Information for the Triple + Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory + Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Represents the SPARQL Update LOAD command + Creates a Triple and associates it with the given Graph URI permanently (though not with a specific Graph as such) + Subject of the Triple + Predicate of the Triple + Object of the Triple + Graph URI + Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory + Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Creates a new LOAD command + Constructs a Triple from Nodes that belong to the same Graph/Node Factory with some Context - Source URI to load data from - Target URI for the Graph to store data in - Whether errors loading should be suppressed + Subject of the Triple + Predicate of the Triple + Object of the Triple + Context Information for the Triple + Graph URI + Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory + Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Creates a new LOAD command + Gets the Subject of the Triple - Source URI to load data from - Whether errors loading should be suppressed - + - Creates a new LOAD command + Gets the Predicate of the Triple - Source URI to load data from - Target URI for the Graph to store data in - + - Creates a new LOAD command which operates on the Default Graph + Gets the Object of the Triple - Source URI to load data from - + - Gets whether the Command affects a specific Graph + Gets the Graph this Triple was created for + This is not necessarily the actual Graph this Triple is asserted in since this property is set from the Subject of the Triple when it is created and it is possible to create a Triple without asserting it into an actual Graph or to then assert it into a different Graph. - + - Gets whether the Command affects a given Graph + Gets the Uri of the Graph this Triple was created for - Graph URI - + This is not necessarily the actual Graph Uri of the Graph this Triple is asserted in since this property is set from the Subject of the Triple when it is created and it is possible to create a Triple without asserting it into an actual Graph or to then assert it into a different Graph. - + - Gets the URI that data is loaded from + Gets the Context Information for this Triple + + Context may be null where no Context for the Triple has been defined + - + - Gets the URI of the Graph to load data into + Gets an enumeration of the Nodes in the Triple + + Returned as subject, predicate, object + - + - Gets whether errors loading the data are suppressed + Gets whether the Triple is a Ground Triple + + + A Ground Triple is any Triple considered to state a single fixed fact. In practise this means that the Triple does not contain any Blank Nodes. + + - + - Evaluates the Command in the given Context + Checks whether the Triple involves a given Node - Evaluation Context + The Node to test upon + True if the Triple contains the given Node - + - Processes the Command using the given Update Processor + Checks whether the Triple involves a given Uri - SPARQL Update Processor + The Uri to test upon + True if the Triple has a UriNode with the given Uri - + - Gets the String representation of the Command + Indicates whether the Triple has the given Node as the Subject + Node to test upon - + - Represents the SPARQL Update INSERT/DELETE command + Indicates whether the Triple has the given Node as the Predicate + Node to test upon + - + - Creates a new INSERT/DELETE command + Indicates whether the Triple has the given Node as the Object - Pattern to construct Triples to delete - Pattern to construct Triples to insert - Pattern to select data which is then used in evaluating the insertions and deletions - URI of the affected Graph + Node to test upon + - + - Creates a new INSERT/DELETE command which operates on the Default Graph + Implementation of Equality for Triples - Pattern to construct Triples to delete - Pattern to construct Triples to insert - Pattern to select data which is then used in evaluating the insertions and deletions + Object to compare with + + + Triples are considered equal on the basis of two things: +
    +
  1. The Hash Codes of the Triples are identical
  2. +
  3. The logical conjunction (AND) of the equality of the Subject, Predicate and Object is true. Each pair of Nodes must either be Equal using Node Equality or are both Blank Nodes and have identical Node IDs (i.e. are indistinguishable for equality purposes on a single Triple level)
  4. +
+
- + - Gets whether the Command affects a Single Graph + Implementation of Hash Codes for Triples + + + + Returns the Hash Code of the Triple which is calculated as the Hash Code of the String formed by concatenating the Hash Codes of its constituent Nodes. This Hash Code is precomputed in the Constructor of a Triple since it will be used a lot (in Triple Equality calculation, Triple Collections etc) + + + Since Hash Codes are based on a String representation there is no guarantee of uniqueness though the same Triple will always give the same Hash Code (on a given Platform - see the MSDN Documentation for string.GetHashCode() for further details) + + - + - Gets whether the Command affects a given Graph + Gets a String representation of a Triple in the form 'Subject , Predicate , Object' - Graph URI - + - Gets the URI of the Graph the insertions are made to + Gets a String representation of a Triple in the form 'Subject , Predicate , Object' with optional compression of URIs to QNames + Controls whether URIs will be compressed to QNames in the String representation + - + - Gets the pattern used for deletions + Gets the String representation of a Triple using the given Triple Formatter + Formatter + - + - Gets the pattern used for insertions + Implementation of CompareTo for Triples which allows Triples to be sorted + Triple to compare to + + Triples are Ordered by Subjects, Predicates and then Objects. Triples are only partially orderable since the CompareTo methods on Nodes only define a partial ordering over Nodes - + - Gets the pattern used for the WHERE clause + Gets the data for serialization + Serilization Information + Streaming Context - + - Optimises the Commands WHERE pattern + Gets the schema for XML serialization + - + - Evaluates the Command in the given Context + Reads the data for XML deserialization - Evaluation Context + XML Reader - + - Processes the Command using the given Update Processor + Writes the data for XML serialization - SPARQL Update Processor + XML Writer - + - Gets the String representation of the Command + Basic Triple Collection which is not indexed - - + - Represents a SPARQL Update MOVE Command + Underlying Storage of the Triple Collection - + - Creates a Command which Moves data from one Graph to another overwriting the destination Graph and deleting the source Graph + Creates a new Triple Collection - Source Graph URI - Destination Graph URI - Whether errors should be suppressed - + - Creates a Command which Moves data from one Graph to another overwriting the destination Graph and deleting the source Graph + Determines whether a given Triple is in the Triple Collection - Source Graph URI - Destination Graph URI + The Triple to test + True if the Triple already exists in the Triple Collection - + - Evaluates the Command in the given Context + Adds a Triple to the Collection - Evaluation Context + Triple to add - + - Processes the Command using the given Update Processor + Deletes a Triple from the Colleciton - SPARQL Update Processor + Triple to remove + Deleting something that doesn't exist has no effect and gives no error - + - An Update Processor that extends the Leviathan Engine to include explanations of the query portions of the Updates + Gets the Number of Triples in the Triple Collection - + - Creates a new Explain Update Processor + Gets the given Triple - Dataset + Triple to retrieve + + Thrown if the given Triple does not exist in the Triple Collection - + - Creates a new Explain Update Processor + Gets all the Nodes which are Subjects of Triples in the Triple Collection - Dataset - Explanation Level - + - Creates a new Explain Update Processor + Gets all the Nodes which are Predicates of Triples in the Triple Collection - Triple Store - Explanation Level - + - Creates a new Explain Update Processor + Gets all the Nodes which are Objects of Triples in the Triple Collectio - Triple Store - + - Gets the Query Processor to be used + Gets the Enumerator for the Collection - + - SPARQL Update Processor which processes commands against a generic underlying store represented by an IStorageProvider implementation + Gets the Enumerator for the Collection - - - If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used for the non-type specific calls i.e. ProcessCommand() and ProcessCommandSet(). At all other times the SPARQL Update commands will be processed by approximating their behaviour through calls to SaveGraph(), LoadGraph() and UpdateGraph() in addition to local in-memory manipulation of the data. Some commands such as INSERT and DELETE can only be processed when the manager is also a IQueryableStorage since they rely on making a query and performing actions based on the results of that query. - - - The performance of this processor is somewhat dependent on the underlying IStorageProvider. If the underlying manager supports triple level updates as indicated by the UpdateSupported property then operations can be performed quite efficiently, if this is not the case then any operation which modifies a Graph will need to load the existing Graph from the store, make the modifications locally in-memory and then save the resulting Graph back to the Store - - + - + - Creates a new Generic Update Processor + Disposes of a Triple Collection - Generic IO Manager - + - Discards any outstanding changes + Thread Safe decorator for triple collections + + Depending on the platform this either uses to provide MRSW concurrency or it uses to provide exclusive access concurrency, either way usage is thread safe + + This decorator provides thread safe access to any underlying triple collection - + - Flushes any outstanding changes to the underlying store + Creates a new thread safe triple collection which wraps a new instance of the default unindexed - + - Processes an ADD command + Creates a new thread safe triple collection which wraps the provided triple collection - Add Command + Triple Collection - + - Processes a CLEAR command + Enters the write lock - Clear Command - - Implemented by replacing the Graph with an empty Graph - - + - Processes a COPY command + Exists the write lock - Copy Command - + - Processes a CREATE command + Enters the read lock - Create Command - - - Implemented by adding an empty Graph to the Store - - - Warning: As the IStorageProvider interface does not allow checking whether a Graph exists processing CREATE commands can result in overwriting existing Graphs - - - + - Processes a command + Exists the read lock - Command - - - If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used. - - - + - Processes a command set + Adds a Triple to the Collection - Command Set - - - If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used. - - + Triple to add - + - Processes a DELETE command + Determines whether a given Triple is in the Triple Collection - Delete Command - - - Note: The underlying manager must implement the IQueryableStorage interface in order for DELETE commands to be processed - - + The Triple to test + True if the Triple already exists in the Triple Collection - + - Processes a DELETE DATA command + Gets the Number of Triples in the Triple Collection - DELETE Data Command - + - Processes a DROP command + Gets the original instance of a specific Triple from the Triple Collection - Drop Command + Triple + - + - Processes an INSERT command + Deletes a Triple from the Collection - Insert Command - - - Note: The underlying manager must implement the IQueryableStorage interface in order for INSERT commands to be processed - - + Triple to remove + Deleting something that doesn't exist has no effect and gives no error - + - Processes an INSERT DATA command + Gets the Enumerator for the Collection - Insert Data Command + - + - Processes a LOAD command + Gets all the Nodes which are Objects of Triples in the Triple Collectio - Load Command - + - Processes an INSERT/DELETE command + Gets all the Nodes which are Predicates of Triples in the Triple Collection - Insert/Delete Command - + - Processes a MOVE command + Gets all the Nodes which are Subjects of Triples in the Triple Collection - Move Command - + - Determines whether a Graph Pattern is valid for use in an INSERT/DELETE DATA command + Gets all triples with the given Object - Graph Pattern - Is this the top level pattern? + Object - + - Interface for SPARQL Update Processors + Gets all triples with the given predicate - - - A SPARQL Update Processor is a class that knows how apply SPARQL Update Commands to some data source to which the processor has access - - - The point of this interface is to allow for end users to implement custom update processors or to extend and modify the behaviour of the default Leviathan engine as required. - - + Predicate + - + - Processes an ADD command + Gets all triples with the given predicate object - Add Command + Predicate + Object + - + - Processes a CLEAR command + Gets all the triples with the given subject - Clear Command + Subject + - + - Processes a COPY command + Gets all the triples with the given subject and object - Copy Command + Subject + Object + - + - Processes a CREATE command + Gets all triples with the given subject and predicate - Create Command + Subject + Predicate + - + - Processes a command + Disposes of a Triple Collection - Command - + - Processes a command set + A Node Comparer which does faster comparisons since it only does lexical comparisons for literals rather than value comparisons, + and it compares virtual nodes on their VirtualID where possible. - Command Set - + - Processes a DELETE command + Compares two Nodes - Delete Command + Node + Node + - + - Processes a DELETE DATA command + Determine equality for two nodes - DELETE Data Command + + + True if the nodes compare equal, false otheriwse - - - Processes a DROP command - - Drop Command + + - + - Processes an INSERT command + A Node Comparer which does faster comparisons since it only does lexical comparisons for literals rather than value comparisons - Insert Command - + - Processes an INSERT DATA command + Compares two Nodes - Insert Data Command + Node + Node + - + + + + + + + - Processes a LOAD command + Compares triples for equality - Load Command - + - Processes an INSERT/DELETE command + Returns whether two Triples are equal - Insert/Delete Command + Triple + Triple + - + - Processes a MOVE command + Returns a predictable HashCode for the triple based on its components' - Move Command + Triple + - + - Causes any outstanding changes to be discarded + Abstract base class for Triple Comparers which provide for comparisons using different node comparers - + - Causes any outstanding changes to be flushed to the underlying storage + Node Comparer - + - Default SPARQL Update Processor provided by the library's Leviathan SPARQL Engine + Creates a new Triple Comparer - - - The Leviathan Update Processor simply invokes the Evaluate method of the SPARQL Commands it is asked to process. Derived implementations may override the relevant virtual protected methods to substitute their own evaluation of an update for our default standards compliant implementations. - - - + - Dataset over which updates are applied + Creates a new Triple Comparer + Node Comparer to use - + - Creates a new Leviathan Update Processor + Compares two Triples - Triple Store + Triple + Triple + - + - Creates a new Leviathan Update Processor + Triple comparer which compares on subjects, then predicates and finally objects - SPARQL Dataset - + - Gets/Sets whether Updates are automatically committed + Creates a new Full Triple comparer - + - Flushes any outstanding changes to the underlying dataset + Creates a new Full Triple comparer that uses a specific Node comparer + Node comparer - + - Discards and outstanding changes from the underlying dataset + Compares two Triples + Triple + Triple + - + - Creates a new Evaluation Context + Triple comparer which compares only on subjects - Update Commands - - + - Creates a new Evaluation Context + Creates a new Subject comparer - - + - Gets the Query Processor to be used + Creates a new Subject comparer using the provided Node comparer - - - By default null is returned which indicates that the default query processing behaviour is used, to use a specific processor extend this class and override this method. If you do so you will have access to the dataset in use so generally you will want to use a query processor that accepts a ISparqlDataset instance - + Node comparer - + - Processes an ADD command + Compares two Triples - Add Command + Triple + Triple + - + - Processes an ADD command + Triple comparer which compares only on predicates - Add Command - SPARQL Update Evaluation Context - + - Processes a CLEAR command + Creates a new Predicate comparer - Clear Command - + - Processes a CLEAR command + Creates a new Predicate comparer using the provided Node comparer - Clear Command - SPARQL Update Evaluation Context + Node Comparer - + - Processes a COPY command + Compares two Triples - Copy Command + Triple + Triple + - + - Processes a COPY command + Triple comparer which compares only on objects - Copy Command - SPARQL Update Evaluation Context - + - Processes a CREATE command + Creates a new Object comparer - Create Command - + - Processes a CREATE command + Creates a new Object comparer using the provided Node comparer - Create Command - SPARQL Update Evaluation Context + Node comparer - + - Processes a command + Compares two Triples - Command + Triple + Triple + - + - Processes a command + Triple comparer which compares on subjects and then predicates - Command - SPARQL Update Evaluation Context - - Invokes the type specific method for the command type - - + - Processes a command set + Creates a new Subject Predicate comparer - Command Set - - Invokes ProcessCommand() on each command in turn - - + - Processes a DELETE command + Creates a new Subject Predicate comparer using the provided Node comparer - Delete Command + Node Comparer - + - Processes a DELETE command + Compares two Triples - Delete Command - SPARQL Update Evaluation Context + Triple + Triple + - + - Processes a DELETE DATA command + Triple comparer which compares on subjects and then objects - DELETE Data Command - + - Processes a DELETE DATA command + Creates a new Subject Object comparer - Delete Data Command - SPARQL Update Evaluation Context - + - Processes a DROP command + Creates a new Subject Object comparer using the provided Node comparer - Drop Command + Node comparer - + - Processes a DROP command + Compares two Triples - Drop Command - SPARQL Update Evaluation Context + Triple + Triple + - + - Processes an INSERT command + Triple comparer which compares on predicates and then objects - Insert Command - + - Processes an INSERT command + Creates a new Predicate Object comparer - Insert Command - SPARQL Update Evaluation Context - + - Processes an INSERT DATA command + Creates a new Predicate Object comparer using the provided Node comparer - Insert Data Command + Node comparer - + - Processes an INSERT DATA command + Compares two Triples - Insert Data Command - SPARQL Update Evaluation Context + Triple + Triple + - + - Processes a LOAD command + Triple comparer which compares on objects and then subjects - Load Command - + - Processes a LOAD command + Creates a new Object Subject comparer - Load Command - SPARQL Update Evaluation Context - + - Processes an INSERT/DELETE command + Creates a new Object Subject comparer using the provided Node comparer - Insert/Delete Command - + - Processes an INSERT/DELETE command + Compares two Triples - Insert/Delete Command - SPARQL Update Evaluation Context + Triple + Triple + - + - Processes a MOVE command + Class for representing Triple Stores which are collections of RDF Graphs - Move Command - + - Processes a MOVE command + List of Reasoners that are applied to Graphs as they are added to the Triple Store - Move Command - SPARQL Update Evaluation Context - + - SPARQL Update Processor which processes updates by sending them to a remote SPARQL Update endpoint represented by a SparqlRemoteUpdateEndpoint instance + Controls whether inferred information is stored in a special Graph or in the original Graph - + - Creates a new Remote Update Processor + Graph Uri for the special Graph used to store inferred information - Endpoint URI - + - Creates a new Remote Update Processor + Creates a new Triple Store using a new empty Graph collection - Endpoint URI - + - Creates a new Remote Update Processor + Creates a new Triple Store using the given Graph collection which may be non-empty - SPARQL Remote Update Endpoint + Graph Collection - + - Discards any outstanding changes + Returns whether the Store contains the given Triple within the Query Triples + Triple to search for + - + - Flushes any outstanding changes to the underlying store + Selects all Triples which have a Uri Node with the given Uri from all the Query Triples + Uri + - + - Processes an ADD command + Selects all Triples which contain the given Node from all Graphs in the Triple Store - Add Command + Node + - + - Processes a CLEAR command + Selects all Triples where the Object is a Uri Node with the given Uri from all Graphs in the Triple Store - Clear Command + Uri + - + - Processes a COPY command + Selects all Triples where the Object is a given Node from all Graphs in the Triple Store - Copy Command + Node + - + - Processes a CREATE command + Selects all Triples where the Predicate is a given Node from all Graphs in the Triple Store - Create Command + Node + - + - Processes a command + Selects all Triples where the Predicate is a Uri Node with the given Uri from all Graphs in the Triple Store - Command + Uri + - + - Processes a command set + Selects all Triples where the Subject is a given Node from all Graphs in the Triple Store - Command Set + Node + - + - Processes a DELETE command + Selects all Triples where the Subject is a Uri Node with the given Uri from all Graphs in the Triple Store - Delete Command + Uri + - + - Processes a DELETE DATA command + Selects all the Triples with the given Subject-Predicate pair from all the Query Triples - DELETE Data Command + Subject + Predicate + - + - Processes a DROP command + Selects all the Triples with the given Predicate-Object pair from all the Query Triples - Drop Command + Predicate + Object + - + - Processes an INSERT command + Selects all the Triples with the given Subject-Object pair from all the Query Triples - Insert Command + Subject + Object + - + - Processes an INSERT DATA command + Selects all Triples which have a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Insert Data Command + List of the Graph URIs of Graphs you want to select over + Uri + - + - Processes a LOAD command + Selects all Triples which contain the given Node from a Subset of Graphs in the Triple Store - Load Command + List of the Graph URIs of Graphs you want to select over + Node + - + - Processes an INSERT/DELETE command + Selects all Triples where the Object is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Insert/Delete Command + List of the Graph URIs of Graphs you want to select over + Uri + - + - Processes a MOVE command + Selects all Triples where the Object is a given Node from a Subset of Graphs in the Triple Store - Move Command + List of the Graph URIs of Graphs you want to select over + Node + - + - SPARQL Update Processor which processes updates by handing them off to the ExecuteUpdate() method of an IUpdateableTripleStore + Selects all Triples where the Predicate is a given Node from a Subset of Graphs in the Triple Store + List of the Graph URIs of Graphs you want to select over + Node + - + - Creates a new Simple Update Processor + Selects all Triples where the Predicate is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store - Updateable Triple Store + List of the Graph URIs of Graphs you want to select over + Uri + - + - Discards any outstanding changes + Selects all Triples where the Subject is a given Node from a Subset of Graphs in the Triple Store + List of the Graph URIs of Graphs you want to select over + Node + - + - Flushes any outstanding changes to the underlying store + Selects all Triples where the Subject is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + List of the Graph URIs of Graphs you want to select over + Uri + - + - Processes an ADD command + Executes a SPARQL Query on the Triple Store - Add Command + SPARQL Query as unparsed String + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Processes a CLEAR command + Executes a SPARQL Query on the Triple Store - Clear Command + SPARQL Query as a SparqlQuery instance + + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Processes a COPY command + Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided - Copy Command + RDF Handler + Results Handler + SPARQL Query as unparsed String + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Processes a CREATE command + Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided - Create Command + RDF Handler + Results Handler + SPARQL Query as unparsed String + + + This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. + + + We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. + + - + - Processes a command + Applies Inference to the given Graph - Command + Graph to apply inference to - + - Processes a command set + Adds an Inference Engine to the Triple Store - Command Set + Reasoner to add - + - Processes a DELETE command + Removes an Inference Engine from the Triple Store - Delete Command + Reasoner to remove - + - Processes a DELETE DATA command + Clears all Inference Engines from the Triple Store - DELETE Data Command - + - Processes a DROP command + Disposes of a Triple Store - Drop Command - + - Processes an INSERT command + Executes an Update against the Triple Store - Insert Command + SPARQL Update Command(s) + + As per the SPARQL 1.1 Update specification the command string may be a sequence of commands + - + - Processes an INSERT DATA command + Executes a single Update Command against the Triple Store - Insert Data Command + SPARQL Update Command - + - Processes a LOAD command + Executes a set of Update Commands against the Triple Store - Load Command + SPARQL Update Command Set - + - Processes an INSERT/DELETE command + Event Handler for the Graph Added event of the underlying Graph Collection which calls the normal event processing of the parent class BaseTripleStore and then applies Inference to the newly added Graph - Insert/Delete Command + Sender + Graph Event Arguments - + - Processes a MOVE command + A thread safe variant of , simply a instance with a decorator around it's underlying - Move Command - + - A Class for connecting to a remote SPARQL Update endpoint and executing Updates against it + Creates a new Thread Safe triple store - + - Creates a new SPARQL Update Endpoint for the given URI + Creates a new Thread safe triple store using the given Thread safe graph collection - Endpoint URI + Collection - + - Creates a new SPARQL Update Endpoint for the given URI + Creates a new Thread safe triple store using a thread safe decorator around the given graph collection - Endpoint URI + Collection - + - Gets/Sets the HTTP Method used for requests + Represents a union of multiple Triple Collections - The SPARQL 1.1 Protocol specification mandates that Update requests may only be POSTed, attempting to alter the HTTP Mode to anything other than POST will result in a SparqlUpdateException + + The union consists of a Base collection which is the collection that Triples can actually be added to and deleted from and any number of additional collections which are read-only as far as the union is concerned (this does not mean they cannot be altered elsewhere by other code) + - + - Makes an update request to the remote endpoint + Creates a new Union Triple Collection which is a union of two collections - SPARQL Update + Base Triple Collection + Additional Triple Collection - + - Makes an update request asynchronously to the remote endpoint + Creates a new Union Triple Collection which is a union of any number of collections - SPARQL Update - Callback to invoke when the update completes - State to pass to the callback + Base Triple Collection + Additional Triple Collection(s) - + - Serializes configuration for the endpoint + Adds a Triple to the base collection - Serialization Context + Triple to add - + - Possible SPARQL Update Command Types + Checks whether the union contains this Triple in any of the collections it comprises + Triple to test + - + - Command inserts data + Gets the count of Triples in this union + + The Count is the total number of Triples, this may be different from the number of distinct triples + - + - Command deletes data + Deletes a Triple from the base collection + Triple to delete - + - Command inserts data and may be based upon a template + Retrieves a Triple from the union + Triple to retrieve + + Thrown if the Triple is not contained in any of the collections this union comprises - + - Command deletes data and may be based upon a template + Gets the enumeration of distinct objects of Triples - + - Command modifies data + Gets the enumeration of distinct predicates of Triples - + - Command loads a graph into the Store + Gets the enumeration of distinct subjects of Triples - + - Command clears a graph in the Store + Disposes of the collection + + This does nothing since we don't know where and how the collections we are the union of are being used and therefore to dispose of them could have unwanted/unexpected results + - + - Command creates a Graph in the Store + Gets the enumeration of Triples in the union + - + - Command removes a Graph from the Store + A Graph which represents the Union of several Graphs + + + The Union is entirely virtual, the Graphs and their Triples are not actually physically merged together + + + All Assert and Retract operations are directed only to the Default Graph while a Clear() operation will clear all Graphs in the Union + + - + - Command which merges the data from one Graph into another + Creates a new Union Graph which is the Union of all the given Graphs with a specific Default Graph + Default Graph of the Union + Other Graphs in the Union - + - Command which copies the data from one Graph into another overwriting the destination Graph + Gets the Nodes of the Graph - + - Command which moves data from one Graph to another overwriting the destination Graph and deleting the Source Graph + Asserts some Triples in the Graph + Triples + + Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph + - + - Unknown + Asserts s Triple in the Graph + Triple + + Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph + - + - Base Class of SPARQL Update Commands + Retracts some Triples from the Graph + Triples + + Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph + - + - Creates a new SPARQL Update Command + Retracts a Triple from the Graph - Command Type + Triple + + Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph + - + - Gets the Type of this Command + Clears all the Graphs in the Union - + - Gets whether the Command will only affect a single Graph + A static helper class for interning URIs to reduce memory usage - + - Gets whether the Command will potentially affect the given Graph + Creates a URI interning it if interning is enabled via the Options.InternUris - Graph URI + String URI - A return value of true does not guarantee that the Graph will be affected. Some Commands (e.g. DROP ALL) affect all Graphs in the Dataset but the command itself doesn't know whether a Graph with the given URI is actually present in the dataset to which it is applied + When URI interning is disabled this is equivalent to just invoking the constructor of the Uri class - + - Optimises the Command + Clears all interned URIs - + - Evaluates the Command in the given Context + Abstract Base Class for URI Nodes - Evaluation Context - + - Processes the Command Set using the given Update Processor + Internal Only Constructor for URI Nodes - Update Processor + Graph this Node is in + URI - + - Gets the String representation of the Command + Internal Only Constructor for URI Nodes - + Graph this Node is in + QName for the Node + + This Constructor tries to resolve the QName using the NamespaceMapper and Base Uri of the Graph it is in. Exceptions may occur if we cannot resolve the QName correctly. + - + - Represents a sequence of SPARQL Update Commands to be executed on a Dataset + Internal Only Constructor for URI Nodes + URI - + - Creates a new empty Command Set + Deserialization Only Constructor - + - Creates a new Command Set containing the given Command + Deserialization Constructor - Command + Serialization Information + Streaming Context - + - Creates a new Command Set with the given Commands + Gets the Uri for this Node - Commands - + - Adds a new Command to the end of the sequence of Commands + Implementation of Equality for Uri Nodes - Command to add + Object to compare with + + + URI Nodes are considered equal if the string form of their URIs match using Ordinal string comparison + - + - Gets the Command at the given index + Implementation of Equality for Uri Nodes - Index + Object to compare with + + URI Nodes are considered equal if the string form of their URIs match using Ordinal string comparison + - + - Gets the number of Commands in the set + Determines whether this Node is equal to a Blank Node (should always be false) + Blank Node + - + - Gets the enumeration of Commands in the set + Determines whether this Node is equal to a Graph Literal Node (should always be false) + Graph Literal Node + - + - Gets/Sets the Base URI for the Command Set + Determines whether this Node is equal to a Literal Node (should always be false) + Literal Node + - + - Gets the Namespace Map for the Command Set + Determines whether this Node is equal to a URI Node + URI Node + - + - Gets/Sets the Timeout in milliseconds for the execution of the Updates + Determines whether this Node is equal to a Variable Node (should always be false) - Default is no timeout + Variable Node + - + - Gets/Sets the Time the updates took to execute + Determines whether this Node is equal to a URI Node - Thrown if you try to inspect the execution time before/during the execution of updates + URI Node + - + - Gets/Sets the Algebra Optimisers to be applied to portions of updates that require queries to be made + Gets a String representation of a Uri as a plain text Uri + - + - Optimises the Commands in the Command Set + Implementation of Compare To for Uri Nodes - Optimiser to use + Node to Compare To + + + Uri Nodes are greater than Blank Nodes and Nulls, they are less than Literal Nodes and Graph Literal Nodes. +

+ Uri Nodes are ordered based upon lexical ordering of the string value of their URIs +
- + - Optimises the Commands in the Command Set + Returns an Integer indicating the Ordering of this Node compared to another Node - Uses the globally registered query optimiser from SparqlOptimiser.QueryOptimiser + Node to test against + - + - Processes the Command Set using the given Update Processor + Returns an Integer indicating the Ordering of this Node compared to another Node - Update Processor + Node to test against + - + - Gets the String representation of the Command Set + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against - + - Evaluation Context for SPARQL Updates evaluated by the libraries Leviathan SPARQL Engine + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Creates a new SPARQL Update Evaluation Context + Returns an Integer indicating the Ordering of this Node compared to another Node - Command Set - SPARQL Dataset - Query Processor for WHERE clauses + Node to test against + - + - Creates a new SPARQL Update Evaluation Context + Returns an Integer indicating the Ordering of this Node compared to another Node - Command Set - SPARQL Dataset + Node to test against + - + - Creates a new SPARQL Update Evaluation Context + Gets the data for serialization - SPARQL Dataset - Query Processor for WHERE clauses + Serialization Information + Streaming Context - + - Creates a new SPARQL Update Evaluation Context + Reads the data for XML deserialization - SPARQL Dataset + XML Reader - + - Gets the Command Set (if any) that this context pertains to + Writes the data for XML serialization + XML Writer - + - Dataset upon which the Updates are applied + Gets the value of the node as a string + - + - Gets the Query Processor used to process the WHERE clauses of DELETE or INSERT commands + Throws an error as URIs cannot be cast to numerics + - + - Retrieves the Time in milliseconds the update took to evaluate + Throws an error as URIs cannot be cast to numerics + - + - Retrieves the Time in ticks the updates took to evaluate + Throws an error as URIs cannot be cast to numerics + - + - Gets the Remaining Timeout i.e. the Timeout taking into account time already elapsed + Throws an error as URIs cannot be cast to numerics - - If there is no timeout then this is always zero, if there is a timeout this is always >= 1 since any operation that wants to respect the timeout must have a non-zero timeout to actually timeout properly. - + - + - Gets the Update Timeout used for the Command Set + Throws an error as URIs cannot be cast to a boolean - - - This is taken either from the Timeout property of the SparqlUpdateCommandSet to which this evaluation context pertains or from the global option Options.UpdateExecutionTimeout. To set the Timeout to be used set whichever of those is appropriate prior to evaluating the updates. If there is a Command Set present then it's timeout takes precedence unless it is set to zero (no timeout) in which case the global timeout setting is applied. You cannot set the Update Timeout to be higher than the global timeout unless the global timeout is set to zero (i.e. no global timeout) - - + - + - Checks whether Execution should Time out + Throws an error as URIs cannot be cast to a date time - Thrown if the Update has exceeded the Execution Timeout + - + - Starts the Execution Timer + Throws an error as URIs cannot be cast to a date time + - + - Ends the Execution Timer + Throws an error as URIs cannot be cast to a time span + - + - Static Helper class for providing constants, helper functions etc regarding the SPARQL Update specification + Gets the URI of the datatype this valued node represents as a String - + - Class of exceptions that may occur when outputting RDF + Gets the numeric type of the expression - + - Creates a new RDF Output Exception + Class for representing URI Nodes - Error Message - + - Creates a new RDF Output Exception + Internal Only Constructor for URI Nodes - Error Message - Exception that caused this Exception + Graph this Node is in + URI for the Node - + - Class of exceptions that may occur when doing multi-threaded output of RDF + Internal Only Constructor for URI Nodes + Graph this Node is in + QName for the Node - - Used when a process may result in multiple errors from different threads - + This Constructor tries to resolve the QName using the NamespaceMapper and Base Uri of the Graph it is in. Exceptions may occur if we cannot resolve the QName correctly. - + - Creates a new Threaded RDF Output Exception + Deserilization Only Constructor - Error Message - + - Adds an Exception to the list of Inner Exceptions + Deserialization Constructor - Exception + Serialization Information + Streaming Context - + - Gets the enumeration of Exceptions + Implementation of Compare To for URI Nodes + URI Node to Compare To + + + Simply invokes the more general implementation of this method + - + - Class for errors in selecting an appropriate Writer to output RDF with + Determines whether this Node is equal to a URI Node + URI Node + - + - Creates a new RDF Writer Selection Exception with the given Message + Possible Variable Context Types - Error Message - + - Creates a new RDF Writer Selection Exception with the given Message and Inner Exception + There is currently no variable context - Error Message - Inner Exception - + - Abstract base class for RDF writers that generate GZipped output + Existential Variable Context - - - While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such - - - + - Creates a new GZipped writer + Universal Variable Context - Underlying writer - + - Saves a Graph as GZipped output + Represents the Variable Context for Triples - Graph to save - File to save to - + - Saves a Graph as GZipped output + Creates a new Variable Context - Graph to save - Writer to save to + Context Type - + - Helper method for raising warning events + Gets the Context Type - Warning message - + - Event which is raised if non-fatal errors occur writing RDF output + Gets the Variables in this Context - + - Gets the description of the writer + Adds a Variable to this Context - + Variable - + - Writer for GZipped NTriples + Gets whether a given Variable exists in this Context + Variable Node + - + - Creates a new GZipped NTriples writer + Gets/Sets the Inner Context + + When you set the Inner Context this sets the Inner Context of the most nested inner context, you can remove all nested contexts by setting this to null + - + - Writer for GZipped Turtle + Abstract Base Class for Variable Nodes - + - Creates a new GZipped Turtle writer + Creates a new Variable Node + Graph + Variable Name - + - Writer for GZipped Notation 3 + Deserialization Only Constructor - + - Creates a new GZipped Notation 3 writer + Deserialization Constructor + Serialization Information + Streaming Context - + - Writer for GZipped RDF/XML + Gets the Variable Name - + - Creates a new GZipped RDF/XML writer + Gets whether this Node is equal to some other Node + Node to test + - + - Writer for GZipped RDF/JSON + Determines whether this Node is equal to a Blank Node (should always be false) + Blank Node + - + - Creates a new GZipped RDF/JSON writer + Determines whether this Node is equal to a Graph Literal Node (should always be false) + Graph Literal Node + - + - Writer for GZipped RDFa + Determines whether this Node is equal to a Literal Node (should always be false) + Literal Node + - + - Creates a new GZipped RDFa writer + Determines whether this Node is equal to a URI Node (should always be false) + URI Node + - + - Class for generating Turtle Concrete RDF Syntax which provides varying levels of Syntax Compression + Determines whether this Node is equal to a Variable Node - - Similar in speed to the standard TurtleWriter but capable of using more syntax compressions depending on the Compression level set - - Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue + Variable Node + - + - Creates a new Compressing Turtle Writer which uses the Default Compression Level + Determines whether this Node is equal to a Variable Node + Variable Node + - + - Creates a new Compressing Turtle Writer which uses the given Compression Level + Gets whether this Node is equal to some Object - Desired Compression Level - See Remarks for this classes CompressionLevel property to see what effect different compression levels have + Object to test + - + - Creates a new compressing Turtle writer using the given syntax level + Gets the String representation of this Node - Syntax Level + - + - Creates a new Compressing Turtle Writer which uses the given Compression Level and Syntax Level + Compares this Node to another Node - Desired Compression Level - Syntax Level - See Remarks for this classes CompressionLevel property to see what effect different compression levels have + Node to compare with + - + - Gets/Sets whether Pretty Printing is used + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets/Sets whether High Speed Write Mode should be allowed + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets/Sets the Compression Level to be used + Returns an Integer indicating the Ordering of this Node compared to another Node - - - If the Compression Level is set to None then High Speed mode will always be used regardless of the input Graph and the HighSpeedMorePermitted property. - - - If the Compression Level is set to Minimal or above then full Predicate Object lists will be used for Triples. - - - If the Compression Level is set to More or above then Blank Node Collections and Collection syntax will be used if the Graph contains Triples that can be compressed in that way. - + Node to test against + - + - Gets/Sets the Default Namespaces that are always available + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Gets the type of the Triple Formatter used by the writer + Returns an Integer indicating the Ordering of this Node compared to another Node + Node to test against + - + - Saves a Graph to a file using Turtle Syntax + Returns an Integer indicating the Ordering of this Node compared to another Node - Graph to save - File to save to + Node to test against + - + - Saves a Graph to the given Stream using Turtle Syntax + Gets the data for serialization - Graph to save - Stream to save to + Serialization Information + Streaming Context - + - Generates the Turtle Syntax for the Graph + Reads the data for XML deserialization + XML Reader - + - Generates Output for Triples as a single "s p o." Triple + Writes the data for XML serialization - Writer Context - Triple to output - - Used only in High Speed Write Mode + XML Writer - + - Generates Output for Nodes in Turtle syntax + Throws an error as variables cannot be converted to types - Writer Context - Node to generate output for - Segment of the Triple being written - Indentation - + - Internal Helper method which converts a Collection into Turtle Syntax + Throws an error as variables cannot be converted to types - Writer Context - Collection to convert - Indentation - - - Helper method for generating Parser Warning Events - - Warning Message - - + - Event which is raised when there is a non-fatal issue with the Graph being written + Throws an error as variables cannot be converted to types + - + - Gets the String representation of the writer which is a description of the syntax it produces + Throws an error as variables cannot be converted to types - + - Abstract Base class for Dataset writers that produce GZipped Output + Throws an error as variables cannot be converted to types - - - While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such - - + - + - Creates a new GZiped Writer + Throws an error as variables cannot be converted to types - Underlying writer + - + - Saves a RDF Dataset as GZipped output + Throws an error as variables cannot be converted to types - Store to save - File to save to + - + - Saves a RDF Dataset as GZipped output + Throws an error as variables cannot be converted to types - Store to save - Writer to save to + - + - Helper method for raising warning events + Throws an error as variables cannot be cast to a time span - Warning Message + - + - Event raised when non-fatal output errors + Gets the URI of the datatype this valued node represents as a String - + - Gets the description of the writer + Gets the numeric type of the expression - - + - Writer for creating GZipped NQuads output + Class representing Variable Nodes (only used for N3) - + - Creates a new GZipped NQuads output + Creates a new Variable Node + Graph + Variable Name - + - Writer for creating GZipped TriG outptut + Deserialization Only Constructor - + - Creates a new GZipped TriG output + Deserialization Constructor + Serialization Information + Streaming Context - + - Writer for creating GZipped TriX output + Compares this Node to another Variable Node + Variable Node + - + - Creates a new GZipped TriX output + Determines whether this Node is equal to a Variable Node + Variable Node + - + - Abstract Base class for Results writers which generate GZipped output + Class for representing Triple Stores which are collections of RDF Graphs - - While the normal witers can be used with GZip streams directly this class just abstracts the wrapping of file/stream output into a GZip stream if it is not already passed as such - + The 'Web Demand' Triple Store is a Triple Store which automatically retrieves Graphs from the Web based on the URIs of Graphs that you ask it for - + - Creates a new GZipped Results writer + Creates an Web Demand Triple Store - Underlying writer + A Uri for the Default Graph which should be loaded from the Web as the initial Graph - + - Saves a Result Set as GZipped output + Creates an Web Demand Triple Store - Result Set to save - File to save to + A Filename for the Default Graph which should be loaded from a local File as the initial Graph - + - Saves a Result Set as GZipped output + Creates a new Web Demand Triple Store - Result Set to save - Writer to save to - + - Helper method for raising warning events + Abstract decorator for Graphs to make it easier to layer functionality on top of existing implementations - Warning message - + - Event which is raised if non-fatal errors occur writing results + Underlying Graph this is a wrapper around - + - Gets the description of the writer + Creates a wrapper around the default Graph implementation, primarily required only for deserialization and requires that the caller call to properly wire up event handling - - + - Writer for GZipped SPARQL XML + Creates a new wrapper around the given Graph + Graph - + - Creates a new GZipped SPARQL XML writer + Deserialization Constructor + Serialization Information + Streaming Context - + - Writer for GZipped SPARQL JSON + Gets/Sets the Base URI of the Graph - + - Creates a new GZipped SPARQL JSON writer + Gets whether the Graph is empty - + - Writer for GZipped SPARQL CSV + Gets the Namespace Map for the Graph - + - Creates a new GZipped SPARQL CSV writer + Gets the Nodes of the Graph - + - Writer for GZipped SPARQL TSV + Gets the Triple Collection for the Graph - + - Creates a new GZipped SPARQL TSV writer + Asserts a Triple in the Graph + Triple - + - A Class which creates GraphViz Graphs entirely dynamically + Asserts Triples in the Graph + Triples - + - Creates a new GraphVizGenerator + Retracts a Triple from the Graph - Format for the Output (svg is default) - Only use this form if you're certain that dot.exe is in your PATH otherwise the code will throw an error + Triple - + - Creates a new GraphVizGenerator + Retracts Triples from the Graph - Format for the Output - Directory in which GraphViz is installed + Triples - + - Gets/Sets the Format for the Output + Clears the Graph - + - Generates GraphViz Output for the given Graph + Creates a new Blank Node with the given Node ID - Graph to generated GraphViz Output for - File you wish to save the Output to - Whether you want to open the Output in the default application (according to OS settings) for the filetype after it is Created + Node ID + - + - Internal Helper Method for locating the GraphViz Directory using the PATH Environment Variable + Creates a new Blank Node + - + - A Writer which generates GraphViz DOT Format files from an RDF Graph + Gets the next available Blank Node ID + - + - Saves a Graph into GraphViz DOT Format + Creates a new Graph Literal Node with the given sub-graph - Graph to save - File to save to + Sub-graph + - + - Saves a Graph into GraphViz DOT Format + Creates a new Graph Literal Node - Graph to save - Stream to save to + - + - Internal Helper Method for converting a Triple into DOT notation + Creates a new Literal Node - Triple to convert - Writer Context + Value - + - Internal Helper method for converting a Node into DOT notation + Creates a new Literal Node with the given Datatype - Node to Convert - Writer Context + Value + Datatype URI - Currently Graphs containing Graph Literal Nodes cannot be converted - + - Internal Helper method for converting Uri Nodes to DOT Notation + Creates a new Literal Node with the given Language - Uri Node to convert - Writer Context + Value + Language - + - Internal Helper Method for converting Blank Nodes to DOT notation + Creates a new URI Node that references the Graphs Base URI - Blank Node to Convert - + - Internal Helper Method for converting Literal Nodes to DOT notation + Creates a new URI Node from a QName - Literal Node to convert + QName - + - Internal Helper method which handles raising the Warning event if an Event Handler is registered to it + Creates a new URI Node - Warning Message + URI + - + - Event that is raised if there is a potential problem with the RDF being output + Creates a new Variable Node - Not used by this Writer + Variable Name + - + - Gets the String representation of the writer which is a description of the syntax it produces + Attempts to get the Blank Node with the given ID - + Node ID + The Node if it exists or null - + - Interface for Writers that Support Pretty Printing + Attempts to get the Literal Node with the given Value and Language + Value + Language + The Node if it exists or null - + - Gets/Sets whether Pretty Printing Mode should be used + Attempts to get the Literal Node with the given Value + Value + The Node if it exists or null - + - Interface for Writers that Support engaging High Speed Write Mode for some Graphs + Attempts to get the Literal Node with the given Value and Datatype + Value + Datatype URI + The Node if it exists or null otherwise - + - Gets/Sets whether the Writer can use High Speed Write Mode if the Graph is deemed suitable for this + Gets all the Triples involving the given URI + The URI to find Triples involving + Zero/More Triples - + - Interface for Writers that support varying levels of Syntax Compression + Gets all the Triples involving the given Node + The Node to find Triples involving + Zero/More Triples - + - Gets/Sets the Compression Level that the Writer is using + Gets all the Triples with the given URI as the Object - Compression Level is an arbitrary figure that the Writer can interpret as it wants, implementations of this interface should state in the XML Comments for this property what the different values mean. The Standard Compression levels provided by the WriterCompressionLevel enumeration are intended as guides and Writers may interpret these as they desire. + The URI to find Triples with it as the Object + Zero/More Triples - + - Interface for Writers that support use of DTDs to compress output + Gets all the Triples with the given Node as the Object + The Node to find Triples with it as the Object + - + - Gets/Sets whether DTDs can be used + Gets all the Triples with the given Node as the Predicate + The Node to find Triples with it as the Predicate + - + - Interface for Writers that can use attributes (e.g. XML or HTML based writers) which allows you to control whether the writer will choose to use attributes to encode data which could otherwise be expressed as elements + Gets all the Triples with the given Uri as the Predicate + The Uri to find Triples with it as the Predicate + Zero/More Triples - + - Gets/Sets whether literal objects can be compressed as attributes + Gets all the Triples with the given Node as the Subject + + The Node to find Triples with it as the Subject + Zero/More Triples + + + + Gets all the Triples with the given Uri as the Subject + The Uri to find Triples with it as the Subject + Zero/More Triples - + - Interface for Writers that support the use of Namespaces and allows a set of Default Namespaces to be defined + Selects all Triples with the given Subject and Predicate + Subject + Predicate + - + - Gets/Sets the Default Namespaces used for writing + Selects all Triples with the given Subject and Object + Subject + Object + - + - Interface for Writers that support multi-threaded writing + Selects all Triples with the given Predicate and Object + Predicate + Object + - + - Gets/Sets whether multi-threading is used + Returns the UriNode with the given QName if it exists + The QName of the Node to select + - + - Interface for Writers that generate HTML + Returns the UriNode with the given Uri if it exists + The Uri of the Node to select + Either the UriNode Or null if no Node with the given Uri exists - + - Gets/Sets a Stylesheet file used to format the HTML + Gets whether a given Triple exists in this Graph + Triple to test + - + - Gets/Sets the CSS class used for the anchor tags used to display the URIs of URI Nodes + Merges another Graph into the current Graph + Graph to Merge into this Graph + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - + - Gets/Sets the CSS class used for the span tags used to display Blank Node IDs + Merges another Graph into the current Graph + Graph to Merge into this Graph + Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in + + + The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. + + + The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised + + - + - Gets/Sets the CSS class used for the span tags used to display Literals + Determines whether a Graph is equal to another Object + Object to test + + + + A Graph can only be equal to another Object which is an IGraph + + + Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals + + - + - Gets/Sets the CSS class used for the anchor tags used to display Literal datatypes + Determines whether this Graph is equal to the given Graph + Graph to test for equality + Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes + + + + The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs + + + Graph Equality is determined according to the following algorithm: + +
    +
  1. If the given Graph is null Graphs are not equal
  2. +
  3. If the given Graph is this Graph (as determined by Reference Equality) then Graphs are equal
  4. +
  5. If the Graphs have a different number of Triples they are not equal
  6. +
  7. Declare a list of Triples which are the Triples of the given Graph called OtherTriples
  8. +
  9. Declare two dictionaries of Nodes to Integers which are called LocalClassification and OtherClassification
  10. +
  11. For Each Triple in this Graph +
      +
    1. If it is a Ground Triple and cannot be found and removed from OtherTriples then Graphs are not equal since the Triple does not exist in both Graphs
    2. +
    3. If it contains Blank Nodes track the number of usages of this Blank Node in LocalClassification
    4. +
    +
  12. +
  13. If there are any Triples remaining in OtherTriples which are Ground Triples then Graphs are not equal since this Graph does not contain them
  14. +
  15. If all the Triples from both Graphs were Ground Triples and there were no Blank Nodes then the Graphs are equal
  16. +
  17. Iterate over the remaining Triples in OtherTriples and populate the OtherClassification
  18. +
  19. If the count of the two classifications is different the Graphs are not equal since there are differing numbers of Blank Nodes in the Graph
  20. +
  21. Now build two additional dictionaries of Integers to Integers which are called LocalDegreeClassification and OtherDegreeClassification. Iterate over LocalClassification and OtherClassification such that the corresponding degree classifications contain a mapping of the number of Blank Nodes with a given degree
  22. +
  23. If the count of the two degree classifications is different the Graphs are not equal since there are not the same range of Blank Node degrees in both Graphs
  24. +
  25. For All classifications in LocalDegreeClassification there must be a matching classification in OtherDegreeClassification else the Graphs are not equal
  26. +
  27. Then build a possible mapping using the following rules: +
      +
    1. Any Blank Node used only once should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    2. +
    3. Any Blank Node with a unique degree should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    4. +
    5. Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    6. +
    7. Build up lists of dependent pairs of Blank Nodes for both Graphs
    8. +
    9. Use these lists to determine if there are any independent nodes not yet mapped. These should be mapped to equivalent Blank Nodes in the other Graph, if this is not possible the Graphs are not equal
    10. +
    11. Use the Dependencies and existing mappings to generate a possible mapping
    12. +
    13. If a Complete Possible Mapping (there is a Mapping for each Blank Node from this Graph to the Other Graph) then test this mapping. If it succeeds then the Graphs are equal
    14. +
    15. Otherwise we now fallback to the Base Mapping and use it as a basis for Brute Forcing the possible solution space and testing every possibility until either a mapping works or we find the Graphs to be non-equal
    16. +
    +
  28. +
+
- + - Gets/Sets the CSS class used for the span tags used to display Literal language specifiers + Checks whether this Graph is a sub-graph of the given Graph + Graph + - + - Gets/Sets the CSS class used for the div tags used to group chunks of markup into a box + Checks whether this Graph is a sub-graph of the given Graph + Graph + Mapping of Blank Nodes + - + - Gets/Sets a Prefix that is applied to all href attributes + Checks whether this Graph has the given Graph as a sub-graph + Graph + - + - Interface for writers which use formatters from the Formatting namespace + Checks whether this Graph has the given Graph as a sub-graph + Graph + Mapping of Blank Nodes + - + - Gets the Type for the Triple Formatter this writer uses + Computes the Difference between this Graph the given Graph + Graph + - This should be the type descriptor for a type that implements ITripleFormatter + + Produces a report which shows the changes that must be made to this Graph to produce the given Graph + - + - - Namespace for Writing Classes which provide the means to Serialize RDF Graphs as concrete RDF syntaxes or graphical representations. - - - Also contains classes that can be used to save Graphs and Triple Stores to arbitrary database backed storage using classes from the Storage namespace. - + Helper function for Resolving QNames to URIs + QName to resolve to a Uri + - + - - Namespace for Writer Context classes, these are classes that are used internally by writers to store their state. This allows writers to be safely used in a multi-threaded scenario since the writing of one Graph/Store cannot affect the writing of another. - + Event which is raised when a Triple is asserted in the Graph - + - Interface for Store Writer Contexts + Event which is raised when a Triple is retracted from the Graph - + - Gets the Store being written + Event which is raised when the Graph contents change - + - Base Class for Store Writer Context Objects + Event which is raised just before the Graph is cleared of its contents - + - Pretty Print Mode setting + Event which is raised after the Graph is cleared of its contents - + - High Speed Mode setting + Event which is raised when a Merge operation is requested on the Graph - + - Creates a new Base Store Writer Context with default settings + Event which is raised when a Merge operation is completed on the Graph - Store to write - TextWriter being written to - + - Creates a new Base Store Writer Context with custom settings + Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event - Store to write - TextWriter being written to - Pretty Print Mode - High Speed Mode + Sender + Triple Event Arguments - + - Gets/Sets the Pretty Printing Mode used + Helper method for raising the Triple Asserted event manually + Triple Event Arguments - + - Gets/Sets the High Speed Mode used + Helper method for raising the Triple Asserted event manually + Triple - + - Gets the Store being written + Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event + Sender + Triple Event Arguments - + - Gets the TextWriter being written to + Helper method for raising the Triple Retracted event manually + - + - Formats a URI as a String for full Output + Helper method for raising the Triple Retracted event manually - URI - + Triple - + - Formats a URI as a String for full Output + Event handler to help propogate Graph events from the underlying graph - URI - + Sender + Arguments - + - Interface for Writer Contexts + Helper method for raising the Changed event + Triple Event Arguments - + - Gets the Graph being written + Helper method for raising the Changed event - + - Gets the TextWriter being written to + Event handler to help propogate Graph events from the underlying graph + Sender + Arguments - + - Gets/Sets the Pretty Printing Mode used + Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Gets/Sets the High Speed Mode used + Event handler to help propogate Graph events from the underlying graph + Sender + Arguments - + - Gets/Sets the Compression Level used + Helper method for raising the Cleared event - + - Gets/Sets the Node Formatter used + Event handler to help propogate Graph events from the underlying graph + Sender + Arguments - + - Gets/Sets the URI Formatter used + Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + True if the operation can continue, false if it should be aborted - + - Interface for Writer Contexts which store collection compression data + Event handler to help propogate Graph events from the underlying graph + Sender + Arguments - + - Gets the mapping from Blank Nodes to Collections + Helper method for raising the Merged event - + - Gets the Triples that should be excluded from standard output as they are part of collections + Helper method for attaching the necessary event handlers to the underlying graph - + - Base Class for Writer Context Objects + Disposes of the wrapper and in doing so disposes of the underlying graph - - This is not an abstract class since some writers will require only this information or possibly less - - + - Compression Level to be used + Gets the Serialization Information + Serialization Information + Streaming Context - + - Pretty Printing Mode setting + Gets the Schema for XML serialization + - + - High Speed Mode setting + Reads the data for XML deserialization + XML Reader - + - Graph being written + Writes the data for XML serialization + XML Writer - + - TextWriter being written to + Abstract decorator for Graph Collections to make it easier to add new functionality to existing implementations - + - QName Output Mapper + Underlying Graph Collection - + - Node Formatter + Creates a decorator around a default instance - + - URI Formatter + Creates a decorator around the given graph collection + Graph Collection - + - Creates a new Base Writer Context with default settings + Adds a Graph to the collection - Graph being written - TextWriter being written to + Graph + Whether to merge into an existing Graph with the same URI + - + - Creates a new Base Writer Context with custom settings + Gets whether the collection contains the given Graph - Graph being written - TextWriter being written to - Compression Level + + - + - Creates a new Base Writer Context with custom settings + Gets the number of Graphs in the collection - Graph being written - TextWriter being written to - Compression Level - Pretty Print Mode - High Speed Mode - + - Gets the Graph being written + Disposes of the collection - + - Gets the TextWriter being written to + Gets the enumerator for the collection + - + - Gets the QName Output Mapper in use + Gets the URIs of the Graphs in the collection - + - Gets/Sets the Compression Level used + Removes a Graph from the collection + Graph URI + - + - Gets/Sets the Pretty Printing Mode used + Gets a Graph from the collection + Graph URI + - + - Gets/Sets the High Speed Mode used + Abstract decorator for Triple Collections to make it easier to add additional functionality to existing collections - + - Gets/Sets the Node Formatter in use + Underlying Triple Collection - + - Gets/Sets the URI Formatter in use + Creates a new decorator over the default - + - Writer Context for XHTML+RDFa Writers + Creates a new decorator around the given triple collection + Triple Collection - + - Creates a new HTML Writer Context + Adds a triple to the collection - Graph - Text Writer + Triple + - + - HTML Writer to use + Gets whether the collection contains the given Triple + Triple + - + - Writer Context for NTriples Writers + Counts the triples in the collection - + - Creates a new NTriples Writer Context with default settings + Deletes a triple from the collection - Graph to write - TextWriter to write to - NTriples Syntax mode + Triple + - + - Creates a new NTriples Writer Context with custom settings + Gets the specific instance of a Triple from the collection - Graph to write - TextWriter to write to - NTriples Syntax mode - Pretty Print Mode - High Speed Mode + Triple + - + - Gets the NTriples syntax mode + Gets the object nodes - + - Writer Context for RDF/XML Writers + Gets the predicate nodes - + - Pretty Printing Mode setting + Gets the subject nodes - + - Graph being written + Disposes of the collection - + - TextWriter being written to + Gets the enumerator for the collection + - + - XmlWriter being written to + Gets all the triples with the given object + Object + - + - Nested Namespace Mapper + Gets all the triples with the given predicate + Predicate + - + - Creates a new RDF/XML Writer Context + Gets all the triples with the given predicate and object - Graph - Output destination + Predicate + Object + - + - Generates the required settings for the XmlWriter + Gets all the triples with the given subject + Subject - + - Gets the Graph being written + Gets all the triples with the given subject and object + Subject + Object + - + - Gets the TextWriter being written to + Gets all the triples with the given subject and predicate + Subject + Predicate + - + - Gets the XML Writer in use + Abstract decorator for Triple Stores to make it easier to add new functionality on top of existing implementations - + - Gets/Sets the Pretty Printing Mode used + Underlying store - + - Gets/Sets the Node Formatter + Event Handler definitions - - Node Formatters are not used for RDF/XML output - - + - Gets/Sets the URI Formatter + Event Handler definitions - - URI Formatters are not used for RDF/XML output - - + - Gets the Namespace Map in use + Event Handler definitions - + - Gets the Blank Node map in use + Event Handler definitions - + - Gets/Sets whether High Speed Mode is permitted + Event Handler definitions - - Not currently supported - - + - Gets/Sets the Compression Level used + Creates a new triple store decorator that uses a default in-memory - - Not currently supported - - + - Gets/Sets the next ID to use for issuing Temporary Namespaces + Creates a new triple store decorator around the given instance + Triple Store - + - Gets/Sets whether a DTD is used + Gets whether the store is empty - + - Gets/Sets whether attributes are used to encode the predicates and objects of triples with simple literal properties + Gets the Graphs of the store - + - Represents the mapping from Blank Nodes to Collections + Gets the triples of the store - + - Stores the Triples that should be excluded from standard output as they are part of collections + Adds a Graph to the store + Graph + - + - Writer Context for Store Writers which do multi-threaded writing + Adds a Graph to the store - - Provides a queue for queuing the URIs of Graphs from the Store that need writing and a thread safe way of retrieving the next Uri to be written from the Queue - + Graph + Whether to merge with an existing graph with the same URI + - + - Creates a new Threaded Store Writer Context with default settings + Adds a Graph to the store from a URI - Store to be written - TextWriter to write to + Graph URI + - + - Creates a new Threaded Store Writer Context with custom settings + Adds a Graph to the store from a URI - Store to be written - TextWriter to write to - Pretty Print Mode - High Speed Mode + Graph URI + Whether to merge with an existing graph with the same URI + - + - Gets the NamespaceMap used for reducing URIs to QNames since there may only be one shared map written to the output + Removes a Graph from the store + Graph URI + - + - Gets the QName Mapper + Gets whether a Graph exists in the store - - Must be manually initialised by the user - + Graph URI + - + - Adds a Uri to the list of URIs for Graphs that are waiting to be written + Gets a Graph from the store - + Graph URI + - + - Gets the next Uri for a Graph that is waiting to be written + Event which is raised when a graph is added - Uri of next Graph to be written - + - Writer Context for TriG + Events which is raised when a graph is removed - + - Creates a new TriG Writer context + Event which is raised when a graph is changed - Triple Store to save - TextWriter to output to - Whether to use pretty printing - Whether high speed mode is permitted - Compression Level to use - Whether to enable N3 compatability mode - + - Gets/Sets the Compression Level + Event which is raised when a graph is cleared - + - Gets/Sets N3 Compatability Mode + Event which is raised when a graph is merged - + - Writer Context for Turtle Writers + Helper method for raising the Graph Added event manually + Graph - + - Creates a new Turtle Writer Context with default settings + Helper method for raising the Graph Added event manually - Graph to write - TextWriter to write to - Turtle Syntax + Graph Event Arguments - + - Creates a new Turtle Writer Context with default settings + Event Handler which handles the Graph Added event from the underlying Graph Collection and raises the Triple Store's Graph Added event - Graph to write - TextWriter to write to + Sender + Graph Event Arguments + Override this method if your Triple Store implementation wishes to take additional actions when a Graph is added to the Store - + - Creates a new Turtle Writer Context with custom settings + Helper method for raising the Graph Removed event manually - Graph to write - TextWriter to write to - Pretty Print Mode - High Speed Mode - Turtle Syntax + Graph - + - Creates a new Turtle Writer Context with custom settings + Helper method for raising the Graph Removed event manually - Graph to write - TextWriter to write to - Pretty Print Mode - High Speed Mode + Graph Event Arguments - + - Creates a new Turtle Writer Context with custom settings + Event Handler which handles the Graph Removed event from the underlying Graph Collection and raises the Triple Stores's Graph Removed event - Graph to write - TextWriter to write to - Compression Level to use - Pretty Print Mode - High Speed Mode - Turtle Syntax + Sender + Graph Event Arguments - + - Writer Context for Compressing Turtle Writers + Helper method for raising the Graph Changed event manually + Graph Event Arguments - + - Creates a new Turtle Writer Context with default settings + Event Handler which handles the Changed event of the contained Graphs by raising the Triple Store's Graph Changed event - Graph to write - TextWriter to write to + Sender + Graph Event Arguments - + - Creates a new Turtle Writer Context with default settings + Helper method for raising the Graph Changed event manually - Graph to write - TextWriter to write to - Turtle Syntax + Graph - + - Creates a new Turtle Writer Context with custom settings + Helper method for raising the Graph Cleared event manually - Graph to write - TextWriter to write to - Pretty Print Mode - High Speed Mode + Graph Event Arguments - + - Creates a new Turtle Writer Context with custom settings + Event Handler which handles the Cleared event of the contained Graphs by raising the Triple Stores's Graph Cleared event - Graph to write - TextWriter to write to - Pretty Print Mode - High Speed Mode - Turtle Syntax + Sender + Graph Event Arguments - + - Creates a new Turtle Writer Context with custom settings + Helper method for raising the Graph Merged event manually - Graph to write - TextWriter to write to - Compression Level to use - Pretty Print Mode - High Speed Mode + Graph Event Arguments - + - Creates a new Turtle Writer Context with custom settings + Event Handler which handles the Merged event of the contained Graphs by raising the Triple Store's Graph Merged event - Graph to write - TextWriter to write to - Compression Level to use - Pretty Print Mode - High Speed Mode - Turtle Syntax + Sender + Graph Event Arguments - + - Represents the mapping from Blank Nodes to Collections + Disposes of the Triple Store - + - Stores the Triples that should be excluded from standard output as they are part of collections + Delegate Type for Warning Messages raised by RDF Readers + Warning Message - + - - Namespace for Formatter Classes which can be used to format Triples, Nodes and URIs among other types. - + Delegate Type for Warning Messages raised by RDF Writers + Warning Message - + - Interface for formatters designed to format entire RDF Graphs + Delegate Type for Warning Events raised by RDF Dataset Writers + Warning Message - + - Generates the header section for the Graph + Delegate Type for Warning Events raised by RDF Dataset Readers - Graph - + Warning Message - + - Generates the header section for the Graph + Delegate Type for Warning Events raised by SPARQL Readers and Writers for Queries, Updates and Results - Namespaces - + Warning Message - + - Generates a generic header section + Delegate Type for Triple events raised by Graphs - + Originator of the Event + Triple Event Arguments - + - Generates the footer section + Delegate Type for Graph events raised by Graphs - + Originator of the Event + Graph Event Arguments - + - Interface for formatters designed to format entire SPARQL Result Sets + Delegate Type for Graph events raised by Graphs where event handlers may set a Cancel flag to cancel the subsequent operation + Originator of the Event + Graph Event Arguments - + - Generates a header section using the given variables + Delegate Type for Triple Store events raised by Triple Stores - Variables - + Originator of the event + Triple Store Event Arguments - + - Generates a header section assuming no variables + Event Arguments for Events regarding the assertion and retraction of Triples - - + - Generates a footer section + Creates a new set of Triple Event Arguments for the given Triple - + Triple + Graph the Triple Event occurred in - + - A formatter which formats triples for RDF/XML output + Creates a new set of Triple Event Arguments for the given Triple + Triple + Graph the Triple Event occurred in + Was the Triple Asserted (if not then it was Retracted) - + - Formats a Graph Header by creating an <rdf:RDF> element and adding namespace definitions + Gets the Triple - Graph - - + - Formats a Graph Header by creating an <rdf:RDF> element and adding namespace definitions + Gets the Graph the Triple belongs to (may be null) - Namespaces - - + - Formats a Graph Header by creating an <rdf:RDF> element + Gets the URI of the Graph the Triple belongs to (may be null) - + - Formats a Graph Footer by closing the <rdf:RDF> element + Gets whether the Triple was asserted - - + - Formats a Triple as a <rdf:Description> element + Gets whether the Triple was retracted - Triple - - + - Gets the String description of this formatter + Event Arguments for Events regarding Graphs - - + - A Result Format that formats using the official SPARQL XML Results format + Creates a new set of Graph Event Arguments + Graph - + - Formats the Header for a SPARQL Result Set + Creates a new set of Graph Event Arguments - Variables - + Graph + Triple Event Arguments - + - Formats the Header for a SPARQL Result Set + Gets the Graph - - + - Formats the Footer for a SPARQL Result Set + Gets the Triple Event Arguments (if any) - - + - Formats a SPARQL Result + Event Arguments for Events regarding Graphs which may be cancelled - SPARQL Result - - + - Formats a Boolean Result + Creates a new set of Cancellable Graph Event Arguments - Boolean Result - + Graph - + - Gets the string representation of the formatter + Creates a new set of Cancellable Graph Event Arguments - + Graph + Triple Event Arguments - + - Abstract Base Class for Formatters + Gets/Sets whether the Event should be cancelled - + - Creates a new Formatter + Event Arguments for Events regarding Graphs - Format Name - + - Gets the Format Name + Creates a new set of Triple Store Event Arguments + Triple Store - + - Formats a Node as a String + Creates a new set of Triple Store Event Arguments - Node - Triple Segment - + Triple Store + Graph Event Arguments - + - Formats a Node as a String + Creates a new set of Triple Store Event Arguments - Node - + Triple Store + Graph - + - Formats a Triple as a String + Gets the Triple Store - Triple - - + - Formats a URI Node as a String for the given Format + Gets the Graph Event Arguments (if any) - URI Node - Triple Segment - - + - Formats a URI as a String for full Output + Class for representing errors with RDF - URI - - + - Formats a URI as a String for full Output + Creates a new RDF Exception with the given Message - URI - + Error Message - + - Formats a Literal Node as a String for the given Format + Creates a new RDF Exception with the given Message and Inner Exception - Literal Node - Triple Segment - + Error Message + Inner Exception - + - Formats a Blank Node as a String for the given Format + Class for representing errors with Ontologies - Blank Node - Triple Segment - - + - Formats a Variable Node as a String for the given Format + Creates a new RDF Ontology Exception with the given message - Variable Name - Triple Segment - + Error message - + - Formats a Graph Literal Node as a String for the given Format + Creates a new RDF Ontology Exception with the given message and inner exception - Graph Literal - Triple Segment - + Error message + Inner Exception - + - Formats a Character for the given Format + + The Ontology Namespace is based upon Jena's Ontology API. It allows for a more ontology-centric way of manipulating RDF graphs within the dotNetRDF API. + + + The OntologyResource is the base class of resources and allows for the retrieval and manipulation of various common properties of a resource. More specialised classes like OntologyClass and OntologyProperty are used to work with classes and properties etc. + - Character - - + - Formats a sequence of characters as a String + Represents an Individual i.e. an instance of some class in an ontology - Characters - String + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Formats a SPARQL Result for the given format + Gets an Individual from the Graph - SPARQL Result - + Resource that represents the Individual + Graph the Individual is in + + Requires that an individual (a resource which is the subject of at least one triple where the predicate is rdf:type) is already present in the Graph + - + - Formats a SPARQL Boolean Result for the given format + Gets/Creates an Individual from the Graph - Boolean Result - + Resource that represents the Individual + Class to create/add the Individual to + Graph the Individual is in + + Allows for creating new Individuals in the Graph or adding existing resources to another Class. If the resource for the Individual or the given Class are new then they will be added to the Graph + - + - Applies escapes to the given value + Helper method which finds all the Types given for this Resource - Value - Escapes - Escaped string - + - Gets the Name of the Format this Format uses + Gets all the Classes that this resource belongs to - - + - Formatter for generating CSV + Gets whether the Individual belongs to a specific class + Class + - + - Creates a new CSV Formatter + Gets whether the Individual belongs to a class identified by the given resource + Class + - + - Formats URIs for CSV output + Gets whether the Individual belongs to a class identified by the given URI - URI - Triple Segment + Class URI - + - Formats Literals for CSV output + Represents the meta-information about an Ontology - Literal - Triple Segment - + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Abstract Base Class for formatters where things are formatted as lines of plain text deliminated by specific characters + Creates a new Ontology for the given resource + Resource + Graph - + - Creates a new Deliminated Line Formatter + Adds a new owl:backwardsCompatibleWith triple for this Ontology - Format Name - Item Deliminator Character - Escape Character - Character to start URIs (may be null) - Character to end URIs (may be null) - Character to wrap Literals in (may be null) - Character to wrap Long Literals in (may be null) - Character to add at end of line (may be null) - Whether Literals are output with Language/Datatype information + Resource + - + - Formats a Triple + Adds a new owl:backwardsCompatibleWith triple for this Ontology - Triple + Resource - + - Formats a URI Node + Adds a new owl:backwardsCompatibleWith triple for this Ontology - URI Node - Triple Segment + Resource - + - Formats a Literal Node + Removes all owl:backwardsCompatibleWith triples for this Ontology - Literal Node - Triple Segment - + - Formats URIs + Removes a owl:backwardsCompatibleWith triple for this Ontology - + Resource - + - Formatter for formatting as HTML + Removes a owl:backwardsCompatibleWith triple for this Ontology + Resource + - + - Formats URIs using HTML encoding + Removes a owl:backwardsCompatibleWith triple for this Ontology - URI + Resource - + - Formats URIs using HTML encoding + Adds a new owl:incompatibleWith triple for this Ontology - URI + Resource - + - Interface for Character Formatters + Adds a new owl:incompatibleWith triple for this Ontology + Resource + - + - Formats a Character as a String + Adds a new owl:incompatibleWith triple for this Ontology - Character + Resource - + - Formats a sequence of characters as a String + Removes all owl:incompatibleWith triples for this Ontology - Characters - String + - + - Interface for Formatters which can format Namespace Information + Removes a owl:incompatibleWith triple for this Ontology + Resource + - + - Formats Namespace Information as a String + Removes a owl:incompatibleWith triple for this Ontology - Namespae Prefix - Namespace URI + Resource - + - Interface for Formatters which can format Base URI Information + Removes a owl:incompatibleWith triple for this Ontology + Resource + - + - Formats Base URI Information as a String + Adds a new owl:imports triple for this Ontology - Base URI + Resource - + - Interface for classes which can format SPARQL Queries into Strings + Adds a new owl:imports triple for this Ontology + Resource + - + - Formats a SPARQL Query into a String + Adds a new owl:imports triple for this Ontology - SPARQL Query + Resource - + - Formats a Graph Pattern into a String + Removes all owl:imports triples for this Ontology - Graph Pattern - + - Formats a Triple Pattern into a String + Removes a owl:imports triple for this Ontology - Triple Pattern + Resource - + - Formats a Triple Pattern item into a String + Removes a owl:imports triple for this Ontology - Pattern Item - Segment of the Triple Pattern in which the Item appears + Resource - + - Interface for Formatters which Format SPARQL Results + Removes a owl:imports triple for this Ontology + Resource + - + - Formats a SPARQL Result as a String + Adds a new owl:priorVersion triple for this Ontology - SPARQL Result + Resource - + - Formats a Boolean Result as a String + Adds a new owl:priorVersion triple for this Ontology - Boolean Result + Resource - + - Interface for Triple Formatters + Adds a new owl:priorVersion triple for this Ontology + Resource + - + - Formats a Triple as a String + Removes all owl:priorVersion triples for this Ontology - Triple - + - Interface for URI Formatters + Removes a owl:priorVersion triple for this Ontology + Resource + - + - Formats a URI as a String + Removes a owl:priorVersion triple for this Ontology - URI + Resource - + - Formats a URI as a String + Removes a owl:priorVersion triple for this Ontology - URI + Resource - + - Formatter for formatting as Notation 3 without any compression + Gets all the Ontologies that this Ontology is backwards compatible with - + - Creates a new Uncompressed Notation 3 Formatter + Gets all the Ontologies that this Ontology is incompatible with - + - Formats a Variable Node for Notation 3 + Gets all the Ontologies that this Ontology imports - Variable - Triple Segment - - + - Formats a Graph Literal Node for Notation 3 + Gets all the Ontologies that are prior versions of this Ontology - Graph Literal - Triple Segment - - + - Formatter for formatting as Notation 3 + Class for representing a class in an Ontology + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Creates a new Notation 3 Formatter + Creates a new representation of a Class in the given Ontology Mode + Resource + Graph - + - Creates a new Notation 3 Formatter using the given Graph + Adds a new sub-class for this class - Graph + Resource + - + - Creates a new Notation 3 Formatter using the given Namespace Map + Adds a new sub-class for this class - Namespace Map + Resource + - + - Formats a Variable Node for Notation 3 + Adds a new sub-class for this class - Variable - Triple Segment + Resource - + - Formats a Graph Literal Node for Notation 3 + Adds a new sub-class for this class - Graph Literal - Triple Segment + Class + + This overload also adds this class as a super-class of the given class + - + - Formatter which formats Triples as NQuads adding an additional URI at the end of the Triple if there is a Graph URI associated with the Triple + Removes all sub-classes for this class + - + - Creates a new NQuads Formatter + Removes a sub-class for this class + Resource + - + - Creates a new NQuads formatter + Removes a sub-class for this class - NQuads syntax to output + Resource + - + - Creates a new NQuads formatter + Removes a sub-class for this class - NQuads syntax to output - Format Name + Resource + - + - Formats a Triple as a String + Removes a sub-class for this class - Triple + Class + + This overload also removes this class from being a super-class of the given class + - + - Formatter which formats Triples as NQuads according to the RDF 1.1 NQuads specification + Adds a new super-class for this class + Resource + - + - Creates a new formatter + Adds a new super-class for this class + Resource + - + - Formatter for formatting as NTriples + Adds a new super-class for this class + Resource + - + - Set of characters which must be escaped in Literals + Adds a new super-class for this class + Class + + + This overload also adds this class as a sub-class of the given class + - + - Creates a new NTriples formatter + Removes all super-classes - NTriples syntax to output - Format Name + - + - Creates a new NTriples Formatter + Removes a super-class for this class + Resource + - + - Creates a new NTriples Formatter + Removes a super-class for this class + Resource + - + - Creates a new NTriples Formatter + Removes a super-class for this class - Format Name + Resource + - + - Gets the NTriples syntax being used + Removes a super-class for this class + Class + + + This overload also removes this class as a sub-class of the given class + - + - Formats a URI Node + Adds an equivalent class for this class - URI Node - Triple Segment + Resource - + - Formats a Literal Node + Adds an equivalent class for this class - Literal Node - Triple Segment + Resource - + - Formats a Character + Adds an equivalent class for this class - Character + Resource - + - Formats a sequence of characters as a String + Adds an equivalent class for this class - Characters - String + Class + + + This overload also adds this class as an equivalent class of the given class + - + - Formats a Blank Node + Removes all equivalent classes for this class - Blank Node - Triple Segment - + - Formats a URI + Removes an equivalent class for this class - URI + Resource - + - Formatter for formatting as NTriples according to the RDF 1.1 specification + Removes an equivalent class for this class + Resource + - + - Creaates a new formatter + Removes an equivalent class for this class + Resource + - + - Abstract Base Class for Formatters that can compress URIs to QNames + Removes an equivalent class for this class + Class + - + - QName Map used for compressing URIs to QNames + Adds a new disjoint class for this class + Resource + - + - Creates a new QName Formatter + Adds a new disjoint class for this class - Format Name - QName Map + Resource + - + - Creates a new QName Formatter + Adds a new disjoint class for this class - Format Name - QName Map - Whether the 'a' keyword can be used for the RDF type predicate + Resource + - + - Determines whether a QName is valid + Adds a new disjoint class for this class - Value + Class + + This overload also adds this class as a disjoint class of the given class + - + - Formats a URI Node using QName compression if possible + Removes all disjoint classes for this class - URI - Triple Segment - + - Formats a Literal Node using QName compression for the datatype if possible + Removes a disjoint class for this class - Literal Node - Triple Segment + Resource - + - Formats a Namespace as a String + Removes a disjoint class for this class - Namespace Prefix - Namespace URI + Resource - + - Formatter for formatting Nodes for use in SPARQL and for formatting SPARQL Queries + Removes a disjoint class for this class + Resource + - + - Creates a new SPARQL Formatter + Removes a disjoint class for this class + Class + + + This overload also removes this class as a disjoint class of the given class + - + - Creates a new SPARQL Formatter using the given Graph + Gets the sub-classes of this class (both direct and indirect) - Graph - + - Creates a new SPARQL Formatter using the given Namespace Map + Gets the direct sub-classes of this class - Namespace Map - + - Determines whether a QName is valid + Gets the indirect sub-classes of this class - Value - - + - Formats a Variable Node in SPARQL Syntax + Gets the super-classes of this class (both direct and indirect) - Variable Node - Triple Segment - - + - Formats a Namespace Declaration + Gets the direct super-classes of this class - Namespace Prefix - Namespace URI - - + - Formats a Base URI Declaration + Gets the indirect super-classes of this class - Base URI - - + - Formats a Query in nicely formatted SPARQL syntax + Gets the Sibling classes of this class, if this class is the root of the ontology nothing is returned even if there are multiple root classes - SPARQL Query - - + - Formats a Graph Pattern in nicely formatted SPARQL syntax + Gets the equivalent classes of this class - Graph Pattern - - + - Formats a Triple Pattern in nicely formatted SPARQL syntax + Gets the disjoint classes of this class - Triple Pattern - - + - Formats a Pattern Item in nicely formatted SPARQL syntax + Gets the instances (individuals) of this class - Pattern Item - Triple Pattern Segment - - + - Formats the Variable List for a SPARQL Query + Gets the properties which have this class as a domain - Variables - - + - Formats the Variable/QName/URI for a SPARQL DESCRIBE Query + Gets the properties which have this class as a range - SPARQL Query - - + - Formats a SPARQL Expression + Gets whether something is a Top Class i.e. has no super classes - SPARQL Expression - - + - Formats a SPARQL Aggregate + Gets whether something is a Bottom Class i.e. has no sub classes - SPARQL Aggregate - - + - Formats a SPARQL Property Path + Gets/Creates an Individual of this class - SPARQL Property Path + Resource identifying the individual - + - Formats a SPARQL GROUP BY Clause + Gets whether this Class is equal to another Class - GROUP BY Clause + Object to test - + - Formats a SPARQL ORDER BY Clause + Represents a Graph with additional methods for extracting ontology based information from it - ORDER BY Clause - + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Formats the Inline Data portion of a Query + Creates a new Ontology Graph - Inline Data - - + - Formats a SPARQL Result using this Formatter to format the Node values for each Variable + Gets/Creates an ontology resource in the Graph - SPARQL Result + Resource - + - Formats a Boolean Result + Gets/Creates an ontology resource in the Graph - Boolean Result + Resource - + - Formatter for formatting as TSV + Gets/Creates an anonymous ontology resource in the Graph + - + - Creates a new TSV Formatter + Gets/Creates an ontology class in the Graph + Class Resource + - + - Formatter which formats Turtle without any compression + Gets/Creates an ontology class in the Graph + Class Resource + - + - Creates a new Uncompressed Turtle Formatter + Gets/Creates an anonymous ontology class in the Graph + - + - Creates a new Uncompressed Formatter + Gets/Creates an ontology property in the Graph - Format Name + Property Resource + - + - Formats characters + Gets/Creates an ontology property in the Graph - Character + Property Resource - + - Formats a sequence of characters as a String + Gets an existing individual in the Graph - Characters - String + Individual Resource + - + - Formatter which formats Turtle with QName compression + Gets/Creates an individual in the Graph of the given class + Individual Resource + Class + - + - Set of characters that must be escaped for Long Literals + Gets an existing individual in the Graph + Individual Resource + - + - Set of characters that must be escaped for Literals + Gets/Creates an individual in the Graph of the given class + Individual Resource + Class + - + - Creates a new Turtle Formatter + Get all OWL classes defined in the graph - + - Creates a new Turtle Formatter that uses the given QName mapper + Get all the RDFS classes defined in the graph - QName Mapper - + - Creates a new Turtle Formatter for the given Graph + Gets all classes defined in the graph using the standard rdfs:Class and owl:Class types - Graph - + - Creates a new Turtle Formatter for the given Namespace Map + Get all classes defined in the graph where anything of a specific type is considered a class - Namespace Map + Type which represents classes + Enumeration of classes - + - Creates a new Turtle Formatter + Gets all RDF properties defined in the graph - Format Name - + - Creates a new Turtle Formatter + Gets all OWL Object properties defined in the graph - Format Name - Graph - + - Creates a new Turtle Formatter + Gets all OWL Data properties defined in the graph - Format Name - Namespace Map - + - Creates a new Turtle Formatter + Gets all OWL Annotation properties defined in the graph - Format Name - QName Map - + - Formats a Literal Node as a String + Gets all properties defined in the graph using any of the standard OWL property types (owl:AnnotationProperty, owl:DataProperty, owl:ObjectProperty) - Literal Node - Triple Segment - - + - Formats a Blank Node as a String + Gets all properties defined in the graph using any of the standard property types (rdf:Property, owl:AnnotationProperty, owl:DataProperty, owl:ObjectProperty) - Blank Node - Triple Segment - - + - Formats a Namespace Decalaration as a @prefix declaration + Get all properties defined in the graph where anything of a specific type is considered a property - Namespace Prefix - Namespace URI - + Type which represents properties + Enumeration of properties - + - Formats a Base URI declaration as a @base declaration + Static Helper class for the Ontology API - Base URI - + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Formatter which formats Turtle with QName compression using the newer W3C syntax which permits a wider range of valid QNames + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Creates a new Turtle Formatter + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Creates a new Turtle Formatter that uses the given QName mapper + Constant URIs for properties exposed by OntologyResource and its derived classes - QName Mapper - + - Creates a new Turtle Formatter for the given Graph + Constant URIs for properties exposed by OntologyResource and its derived classes - Graph - + - Creates a new Turtle Formatter for the given Namespace Map + Constant URIs for properties exposed by OntologyResource and its derived classes - Namespace Map - + - Creates a new Turtle Formatter + Constant URIs for properties exposed by OntologyResource and its derived classes - Format Name - + - Creates a new Turtle Formatter + Constant URIs for properties exposed by OntologyResource and its derived classes - Format Name - Graph - + - Creates a new Turtle Formatter + Constant URIs for properties exposed by OntologyResource and its derived classes - Format Name - Namespace Map - + - Creates a new Turtle Formatter + Constant URIs for properties exposed by OntologyResource and its derived classes - Format Name - QName Map - + - Gets whether a QName is valid in Turtle as specified by the W3C + Constant URIs for properties exposed by OntologyResource and its derived classes - QName - - + - Interface for classes which can format Nodes into Strings + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Formats a Node as a String + Constant URIs for properties exposed by OntologyResource and its derived classes - Node - - + - Formats a Node as a String for a specific segment of a Triple + Constant URIs for properties exposed by OntologyResource and its derived classes - Node - Segment - - + - - Namespace for classes related to .Net serialization integration in the library - + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Helper Class for use in serialization and deserialization + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Class for saving SPARQL Result Sets to a HTML Table format (this is not a standardised format) + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Gets/Sets the Default Namespaces used to pretty print URIs in the output + Constant URIs for properties exposed by OntologyResource and its derived classes - + - Saves the Result Set to the given File as a HTML Table + Constant URIs for properties exposed by OntologyResource and its derived classes - Result Set to save - File to save to - + - Saves the Result Set to the given Stream as a HTML Table + Constant URIs for properties exposed by OntologyResource and its derived classes - Result Set to save - Stream to save to - + - Internal method which generates the HTML Output for the Sparql Results + Constant URIs for properties exposed by OntologyResource and its derived classes - - - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Constants for URIs for classes in Ontologies - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Constants for URIs for classes in Ontologies - + - Gets the String representation of the writer which is a description of the syntax it produces + Constants for URIs for classes in Ontologies - - + - Class for saving Sparql Result Sets to the SPARQL Results JSON Format + Constants for URIs for classes in Ontologies - + - Saves the Result Set to the given File in the SPARQL Results JSON Format + Constants for URIs for classes in Ontologies - Result Set to save - File to save to - + - Saves the Result Set to the given Stream in the SPARQL Results JSON Format + Constants for URIs for classes in Ontologies - Result Set to save - Stream to save to - + - Internal method which generates the SPARQL Query Results JSON output + Constants for URIs for classes in Ontologies - Result Set to save - Stream to save to - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Constants for URIs for classes in Ontologies - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Class for representing a property in an Ontology + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Gets the String representation of the writer which is a description of the syntax it produces + Creates a new Ontology Property for the given resource in the given Graph - + Resource + Graph - + - Class for saving Sparql Result Sets to the Sparql Results XML Format + Creates a new RDFS Ontology Property for the given resource in the given Graph + Resource + Graph - + - Saves the Result Set to the given File in the Sparql Results XML Format + Adds a new domain for the property - Result Set to save - File to save to + Resource + - + - Saves the Result Set to the given Stream in the Sparql Results XML Format + Adds a new domain for the property - - + Resource + - + - Method which generates the Sparql Query Results XML Format serialization of the Result Set + Adds a new domain for the property + Resource - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Clears all domains for the property - Warning Message + - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Removes a domain for the property + Resource + - + - Gets the String representation of the writer which is a description of the syntax it produces + Removes a domain for the property + Resource - + - Static Helper class for the writing of RDF Graphs and SPARQL Result Sets to Strings rather than Streams/Files + Removes a domain for the property + Resource + - + - Writes the Graph to a String and returns the output in your chosen concrete RDF Syntax + Adds a new range for the property - Graph to save - Writer to use to generate the concrete RDF Syntax + Resource - - Since the API allows for any TextWriter to be passed to the Save() method of a IRdfWriter you can just pass in a StringWriter to the Save() method to get the output as a String. This method simply provides a wrapper to doing just that. - - + - Writes the given Triple Store to a String and returns the output in your chosen concrete RDF dataset syntax + Adds a new range for the property - Triple Store - Writer to use to generate conrete RDF Syntax + Resource - + - Writes the SPARQL Result Set to a String and returns the Output in your chosen format + Adds a new range for the property - SPARQL Result Set - Writer to use to generate the SPARQL Results output + Resource - + - Class for writing a Triple Store in named Graph TriG syntax to a file/stream + Clears all ranges for the property - - - For efficiency the TriG Writer splits it's writing over several threads (currently 4), these threads share a reference to a Context object which gives Global writing context eg. the target TextWriter being written to. Each thread generates temporary local writing contexts as it goes along, each of these is scoped to writing a specific Graph. Graphs are written to a StringWriter so the output for each Graph is built completely and then written in one go to the TextWriter specified as the target of the writing in the global context. - - - Designed to be Thread Safe - should be able to call Save() from several threads with no issue. See Remarks for potential performance impact of this. + - + - Gets/Sets whether High Speed Write Mode is permitted + Removes a range for the property + Resource + - + - Gets/Sets whether Pretty Printing is used + Removes a range for the property + Resource + - + - Gets/Sets the Compression Level for the writer + Removes a range for the property + Resource + - + - Gets/Sets whether N3 Compatability Mode is used, in this mode an = is written after Graph Names so an N3 parser can read the TriG file correctly + Adds a new equivalent property for the property - - Defaults to false from the 0.4.1 release onwards - + Resource + - + - Gets/Sets whether multi-threaded writing will be used to generate output faster + Adds a new equivalent property for the property + Resource + - + - Saves a Store in TriG (Turtle with Named Graphs) format + Adds a new equivalent property for the property - Store to save - File to save to + Resource + - + - Saves a Store in TriG (Turtle with Named Graphs) format + Adds a new equivalent property for the property - Store to save - Writer to save to + Property + + + This overload also adds this property as an equivalent property of the given property + - + - Generates the Output for a Graph as a String in TriG syntax + Clears all equivalent properties for this property - Context for writing the Store - Context for writing the Graph - + - Generates the Output for a Triple as a String in Turtle syntax + Removes an equivalent property for the property - Context for writing the Store - Context for writing the Graph + Resource + - + - Generates Output for Nodes in Turtle syntax + Removes an equivalent property for the property - Context for writing the Store - Context for writing the Graph - Node to generate output for - Segment of the Triple being written + Resource - + - Delegate for the SaveGraphs method + Removes an equivalent property for the property - Context for writing the Store + Resource + - + - Thread Worker method which writes Graphs to the output + Removes an equivalent property for the property - Context for writing the Store + Property + + + This overload also removes this property as an equivalent property of the given property + - + - Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of + Adds an inverse property for the property + Resource + - + - Internal Helper method which raises the Warning event only if there is an Event Handler registered + Adds an inverse property for the property - Warning Message + Resource + - + - Gets the String representation of the writer which is a description of the syntax it produces + Adds an inverse property for the property + Resource - + - Class for generating RDF in NTriples Concrete Syntax + Adds an inverse property for the property - Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue + Property + + + This overload also adds this property as an inverse property of the given property + - + - Creates a new writer + Removes all inverse properties for this property - NTriples Syntax Mode + - + - Creates a new writer + Removes an inverse property for the property + Resource + - + - Gets/Sets whether Triples are sorted before being Output + Removes an inverse property for the property + Resource + - + - Gets the type of the Triple Formatter used by this writer + Removes an inverse property for the property + Resource + - + - Gets/Sets the NTriples syntax mode + Removes an inverse property for the property + Property + + + This overload also removes this property as an inverse property of the given property + - + - Saves the Graph in NTriples Syntax to the given stream + Adds a sub-property for the property - Graph to save - File to save to + Resource + - + - Saves the Graph in NTriples Syntax to the given stream + Adds a sub-property for the property - Graph to save - Stream to save to + Resource + - + - Converts a Triple into relevant NTriples Syntax + Adds a sub-property for the property - Writer Context - Triple to convert + Resource - + - Converts a Node into relevant NTriples Syntax + Adds a sub-property for the property - Writer Context - Node to convert - Segment of the Triple being written + Property + + This overload also adds this property as a super-property of the given property + - + - Event which is raised when there is an issue with the Graph being serialized that doesn't prevent serialization but the user should be aware of + Clears all sub-properties of this property + - + - Internal Helper method which raises the Warning event only if there is an Event Handler registered + Removes a sub-property for the property - Warning Message + Resource + - + - Gets the String representation of the writer which is a description of the syntax it produces + Removes a sub-property for the property + Resource - + - Possible URI Reference Types + Removes a sub-property for the property + Resource + - + - Must be a QName + Removes a sub-property for the property + Property + + + This overload also removes this property as a super-property of the given property + - + - May be a QName or a URI + Adds a super-property for the property + Resource + - + - URI Reference + Adds a super-property for the property + Resource + - + - URI + Adds a super-property for the property + Resource + - + - Class containing constants for possible Compression Levels + Adds a super-property for the property - These are intended as guidance only, Writer implementations are free to interpret these levels as they desire or to ignore them entirely and use their own levels + Property + + + This overload also adds this property as a sub-property of the given property + - + - No Compression should be used (-1) + Removes all super-properties of this property + - + - Minimal Compression should be used (0) + Removes a super-property for the property + Resource + - + - Default Compression should be used (1) + Removes a super-property for the property + Resource + - + - Medium Compression should be used (3) + Removes a super-property for the property + Resource + - + - More Compression should be used (5) + Removes a super-property for the property + Property + + + This overload also removes this property as a sub-property of the given property + - + - High Compression should be used (10) + Gets all the Classes which are in the properties Domain - + - Class containing constants for standardised Writer Error Messages + Gets all the Classes which are in this properties Range - + - Error message produced when a User attempts to serialize a Graph containing Graph Literals + Gets all the equivalent properties of this property - + - Error message produced when a User attempts to serialize a Graph containing Unknown Node Types + Gets the sub-properties of this property (both direct and indirect) - + - Error message produced when a User attempts to serialize a Graph containing Triples with Literal Subjects + Gets the direct sub-classes of this class - + - Error message produced when a User attempts to serialize a Graph containing Triples with Literal Predicates + Gets the indirect sub-classes of this class - + - Error message produced when a User attempts to serialized a Graph containing Triples with Graph Literal Predicates + Gets the super-properties of this property (both direct and indirect) - + - Error message produced when a User attempts to serialize a Graph containing Triples with Blank Node Predicates + Gets the direct super-properties of this property - + - Error message produced when a User attempts to serialize a Graph containing URIs which cannot be reduced to a URIRef or QName as required by the serialization + Gets the indirect super-properties of this property - + - Error message produced when a User attempts to serialize a Graph containing collections where a collection item has more than one rdf:first triple + Gets whether this is a top property i.e. has no super properties defined - + - Error messages produced when errors occur in a multi-threaded writing process + Gets whether this is a btoom property i.e. has no sub properties defined - + - Error message produced when a User attempts to serialize a Variable Node in a format which does not support it + Gets the Sibling properties of this property, if this property is the root of the ontology nothing is returned even if there are multiple root properties - + - Gets an Error message indicating that Graph Literals are not serializable with the appropriate RDF format name inserted in the error + Gets all the inverse properties of this property - RDF format (syntax) - - + - Gets an Error message indicating that Unknown Node Types are not serializable with the appropriate RDF format name inserted in the error + Gets all the resources that use this property - RDF format (syntax) - - + - Gets an Error message indicating that Variable Nodes are not serializable with the appropriate RDF format name inserted in the error + Base class for representing a resource in an Ontology - RDF format (syntax) - + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Gets an Error message indicating that Literal Subjects are not serializable with the appropriate RDF format name inserted in the error + Storage of Literal Properties - RDF format (syntax) - - + - Gets an Error message indicating that Literal Predicates are not serializable with the appropriate RDF format name inserted in the error + Storage of Resource Properties - RDF format (syntax) - - + - Gets an Error message indicating that Graph Literal Predicates are not serializable with the appropriate RDF format name inserted in the error + The Node which this Resource is a wrapper around - RDF format (syntax) - - + - Gets an Error message indicating that Blank Node Predicates are not serializable with the appropriate RDF format name inserted in the error + The Graph from which this Resource originates - RDF format (syntax) - - + - Gets an Error message indicating that a multi-threading writer process failed + Creates a new Ontology Resource for the given Resource in the given Graph - RDF format (syntax) - + Resource + Graph - + - Indicates which Segment of a Triple Node Output is being generated for + Creates a new Ontology Resource for the given Resource in the given Graph - - Used by Writers and Formatters to ensure restrictions on which Nodes can appear where in the syntax are enforced - + Resource + Graph - + - Subject of the Triple + Gets the Resource that this Ontology Resource refers to - + - Predicate of the Triple + Gets the Graph that this Ontology Resource is from - + - Object of the Triple + Retrieves all the Triples which have the Resource as the subject and the given property URI as the predicate from the Graph and stores the values locally + Property URI + Whether only Literal values are acceptable - + - Controls what type of collections + Adds a new literal value for a property + Property URI + Literal Value + Whether the new value should be added to the Graph - + - Find all collections + Adds a new literal value for a property + Property URI + Literal Value + Whether the new value should be added to the Graph - + - Find explicit collections only (those specified with Blank Node syntax) + Adds a new value for a property + Property URI + Literal Value + Whether the new value should be added to the Graph - + - Find implicit collections only (those using rdf:first and rdf:rest) + Adds a new value for a property + Property URI + Literal Value + Whether the new value should be added to the Graph - + - Class used to store Collections as part of the writing process for Compressing Writers + Clears all values for a Literal Property + Property URI + Whether the removed values are removed from the Graph - + - Creates a new Instance of a Collection + Clears all values for a Literal Property - Whether the collection is explicit (specified using square bracket notation) or implicit (specified using normal parentheses) + Property URI + Whether the removed values are removed from the Graph - + - Gets whether this is an Explicit collection (specified using square bracket notation) + Clears all values for a Resource Property + Property URI + Whether the removed values are removed from the Graph - + - Gets/Sets whether the Collection has been written + Clears all values for a Resource Property + Property URI + Whether the removed values are removed from the Graph - + - Gets the Triples that make up the Collection + Removes a literal value for a property + Property URI + Value to remove + Whether the removed value is removed from the Graph - + - Possible Output Formats for Nodes + Removes a literal value for a property + Property URI + Value to remove + Whether the removed value is removed from the Graph - + - Format for NTriples + Removes a value for a property + Property URI + Value to remove + Whether the removed value is removed from the Graph - + - Format for Turtle + Removes a value for a property + Property URI + Value to remove + Whether the removed value is removed from the Graph - + - Format for Notation 3 + Adds a comment for this resource + Comment + - + - Format for Uncompressed Turtle + Adds a comment in a specific language for this resource + Comment + Language + - + - Format for Uncompressed Notation 3 + Removes all comments for this resource + - + - Helper methods for writers + Removes a comment for this resource + Comment + - + - Determines whether a Blank Node ID is valid as-is when serialised in NTriple like syntaxes (Turtle/N3/SPARQL) + Removes a comment for this resource - ID to test + Comment - If false is returned then the writer will alter the ID in some way - + - Determines whether a Blank Node ID is valid as-is when serialised as NTriples + Removes a comment in a specific language for this resource - + Comment + Language - + - Determines whether a given Uri refers to one of the Default Graph URIs assigned to the default Graph when parsing from some RDF dataset syntax + Adds a new owl:differentFrom triple for the resource - Uri to test + Resource - + - Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes + Adds a new owl:differentFrom triple for the resource - Graph to find collections in - Triple Collection in which Triples that have been output are to be listed + Resource + - + - Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes + Adds a new owl:differentFrom triple for the resource - Writer Context - Collection Search Mode + Resource + + + This overload also adds this resource as different from the given resource + - + - Helper method which finds Collections expressed in the Graph which can be compressed into concise collection syntax constructs in some RDF syntaxes + Clears all owl:differentFrom triples for the resource - Writer Context + - + - Encodes values for use in XML + Removes a owl:differentFrom triple for the resource - Value to encode - - The value with any ampersands escaped to & - + Resource + - + - Abstract Base Class for HTML Writers which provides basic implementation of the IHtmlWriter interface + Removes a owl:differentFrom triple for the resource + Resource + - + - Gets/Sets a path to a Stylesheet which is used to format the Graph output + Removes a owl:differentFrom triple for the resource + Resource + + + This overload also removes this resource as different from the given resource + - + - Gets/Sets the CSS class used for the anchor tags used to display the URIs of URI Nodes + Adds a new rdfs:isDefinedBy triple for the resource + Resource + - + - Gets/Sets the CSS class used for the span tags used to display Blank Node IDs + Adds a new rdfs:isDefinedBy triple for the resource + Resource + - + - Gets/Sets the CSS class used for the span tags used to display Literals + Adds a new rdfs:isDefinedBy triple for the resource + Resource + - + - Gets/Sets the CSS class used for the anchor tags used to display Literal datatypes + Removes all rdfs:isDefinedBy triples for the resource + - + - Gets/Sets the CSS class used for the span tags used to display Literal language specifiers + Removes a rdfs:isDefinedBy triple for the resource + Resource + - + - Gets/Sets the CSS class used for div tags used to group chunks of markup into a box + Removes a rdfs:isDefinedBy triple for the resource + Resource + - + - Gets/Sets the Prefix applied to href attributes + Removes a rdfs:isDefinedBy triple for the resource + Resource + - + - Class for generating RDF/XML Concrete Syntax + Adds a label for the resource - - - This is a fast writer based on the fast writing technique used in the other non-RDF/XML Writers. - - - Note: If the Graph to be serialized makes heavy use of collections it may result in a StackOverflowException. To address this set the CompressionLevel property to < 5 - - + Label + - + - Creates a new RDF/XML Writer + Adds a label in a specific language for a resource + Label + Language + - + - Creates a new RDF/XML Writer + Clears all labels for a resource - Compression Level + - + - Creates a new RDF/XML Writer + Removes a specific label for a resource - Compression Level - Whether to use DTDs to further compress output + Label + - + - Creates a new RDF/XML Writer + Removes a label for a resource - Compression Level - Whether to use DTDs to further compress output - Whether to use attributes to encode triples with simple literal objects where possible + Label + - + - Gets/Sets Pretty Print Mode for the Writer + Removes a label in a specific language for a resource + Label + Language + - + - Gets/Sets the Compression Level in use + Adds a new owl:sameAs triple for the resource - - - Compression Level defaults to High - if Compression Level is set to below More i.e. < 5 then Collections will not be compressed into more compact syntax - - + Resource + - + - Gets/Sets whether DTDs are used in the output + Adds a new owl:sameAs triple for the resource + Resource + - + - Gets/Sets whether triples which have a literal object will be expressed as attributes rather than elements where possible (defaults to true) + Adds a new owl:sameAs triple for the resource + Resource + + + This overload also adds this resource as an owl:sameAs triple for the given resource + - + - Gets/Sets the Default Namespaces that are always available + Removes all owl:sameAs triples for the resource + - + - Gets the type of the Triple Formatter used by the writer + Removes a owl:sameAs triple for the resource + Resource + - + - Saves a Graph in RDF/XML syntax to the given File + Removes a owl:sameAs triple for the resource - Graph to save - Filename to save to + Resource + - + - Saves a Graph to an arbitrary output stream + Removes a owl:sameAs triple for the resource - Graph to save - Stream to save to + Resource + + + This overload also removes the owl:sameAs triple for the given resource + - + - Internal method which generates the RDF/Json Output for a Graph + Adds a new rdfs:seeAlso triple for the resource - Graph to save - Stream to save to + Resource + - + - Internal Helper method for raising the Warning event + Adds a new rdfs:seeAlso triple for the resource - Warning Message + Resource + - + - Event which is raised when there is a non-fatal issue with the RDF being output + Adds a new rdfs:seeAlso triple for the resource + Resource + - + - Gets the String representation of the writer which is a description of the syntax it produces + Removes all rdfs:seeAlso triples for the resource - + - HTML Schema Writer is a HTML Writer which writes a human readable description of a Schema/Ontology + Removes a rdfs:seeAlso triple for the resource + Resource + - + - Saves the Graph to the given File as an XHTML Table with embedded RDFa + Removes a rdfs:seeAlso triple for the resource - Graph to save - File to save to + Resource + - + - Saves the Result Set to the given Stream as an XHTML Table with embedded RDFa + Removes a rdfs:seeAlso triple for the resource - Graph to save - Stream to save to + Resource + - + - Internal method which generates the HTML Output for the Graph + Adds a new rdf:type triple for the resource - Writer Context + Resource + - + - Helper method for raising the Warning event + Adds a new rdf:type triple for the resource - Warning Message + Resource + - + - Event which is raised if there is a non-fatal error with the RDF being output + Adds a new rdf:type triple for the resource + Resource + - + - Gets the String representation of the writer which is a description of the syntax it produces + Removes all rdf:type triples for the resource - + - Class for generating RDF/XML Concrete Syntax + Removes a rdf:type triple for the resource - - - This is a fast writer based on the fast writing technique used in the other non-RDF/XML Writers. - - - Note: If the Graph to be serialized makes heavy use of collections it may result in a StackOverflowException. To address this set the CompressionLevel property to < 5 - - + Resource + - + - Creates a new RDF/XML Writer + Removes a rdf:type triple for the resource + Resource + - + - Creates a new RDF/XML Writer + Removes a rdf:type triple for the resource - Compression Level + Resource + - + - Creates a new RDF/XML Writer + Adds version information for the resource - Compression Level - Whether to use DTDs to further compress output + Version Information + - + - Gets/Sets Pretty Print Mode for the Writer + Clears version information for the resource + - + - Gets/Sets the Compression Level in use + Remove version information for the resource - - - Compression Level defaults to High - if Compression Level is set to below More i.e. < 5 then Collections will not be compressed into more compact syntax - - + Version Information + - + - Gets/Sets whether DTDs are used in the output + Remove version information for the resource + Version Information + - + - Gets/Sets the Default Namespaces that are always available + Gets the values for a property which is restricted to literals + Property URI + - + - Gets the type of the Triple Formatter used by this writer + Gets the values for a property which is restricted to literals + Property URI + - + - Saves a Graph in RDF/XML syntax to the given File + Gets the values for a property which can be any node type - Graph to save - Filename to save to + Property URI + - + - Saves a Graph to an arbitrary output stream + Gets the values for a property which can be any node type - Graph to save - Stream to save to + Property URI + - + - Internal method which generates the RDF/Json Output for a Graph + Gets the Version Information for the Resource - Graph to save - Stream to save to - + - Internal Helper method for raising the Warning event + Gets the Comment(s) for the Resource - Warning Message - + - Event which is raised when there is a non-fatal issue with the RDF being output + Gets the Label(s) for the Resource - + - Gets the String representation of the writer which is a description of the syntax it produces + Gets the See Also(s) for the Resource - - + - Class for saving RDF Graphs to a XHTML Table format with the actual Triples embedded as RDFa + Gets the Same As('s) for the Resource - - - Since not all Triples can be embedded into XHTML those Triples will not have RDFa generated for them but all Triples will be shown in a human readable format. Triples that can be serialized are roughly equivalent to anything that can be serialized in Turtle i.e. URI/BNode subject, URI predicates and URI/BNode/Literal object. - - - If you encode Triples which have values datatyped as XML Literals with this writer then round-trip Graph equality is not guaranteed as the RDFa parser will add appropriate Namespace declarations to elements as required by the specification - - - + - Gets/Sets the Default Namespaces to use for writers + Gets the Is Defined By(s) for the Resource - + - Saves the Graph to the given File as an XHTML Table with embedded RDFa + Gets the Different From(s) for the Resource - Graph to save - File to save to - + - Saves the Result Set to the given Stream as an XHTML Table with embedded RDFa + Gets the rdf:type's for the Resource - Graph to save - Stream to save to - + - Internal method which generates the HTML Output for the Graph + Gets all the Triples from the Graph where the Resource occurs as the Subject - Writer Context - + - Generates Output for a given Node + Gets all the Triples from the Graph where the Resource occurs as the Object - Writer Context - Node - + - Generates Output for a given Node + Gets all the Triples from the Graph where the Resource occurs as the Predicate - Writer Context - Node - Triple being written - + - Helper method for raising the Warning event + Gets all the Triples where the Resource occurs in any position - Warning Message - + - Event which is raised if there is a non-fatal error with the RDF being output + Gets the String representation of the Resource + + + This is either the first label (if any are declared) or the string representation of the INode that this resource wraps + - + - Gets the String representation of the writer which is a description of the syntax it produces + Casts a Resource into an Ontology Class + + Anything may be cast to a regardless of whether it actually represents a class in the ontology + - + - Class for generating CSV output from RDF Graphs + Casts a Resource into an Ontology Property + + + Anything may be cast to a regardless of whether it actually represents a property in the ontology + - + - Gets the type of the Triple Formatter used by the writer + Casts a Resource into a Graph + Resource + + + Equivalent to doing a SPARQL DESCRIBE query on this resource + - + - Saves a Graph to CSV format + Represents a Graph with a reasoner attached - Graph - File to save to + + + This class wraps an existing Graph and applies the given reasoner to it materialising the Triples in this Graph. The original Graph itself is not modified but can be accessed if necessary using the BaseGraph property + + + Any changes to this Graph (via Assert() and Retract()) affect this Graph - specifically the set of materialised Triples - rather than the original Graph around which this Graph is a wrapper + + + See Using the Ontology API for some informal documentation on the use of the Ontology namespace + + - + - Saves a Graph to CSV format + Creates a new Reasoner Graph which is a wrapper around an existing Graph with a reasoner applied and the resulting Triples materialised Graph - Writer to save to + Reasoner - + - Generates Node Output for the given Node + Creates a new Reasoner Graph which is a wrapper around an existing Graph with multiple reasoners applied and the resulting Triples materialised - Text Writer - Node - Triple Segment + Graph + Reasoner - + - Event which is raised if the Writer detects a non-fatal error while outputting CSV + Internal method which initialises the Graph by applying the reasoners and setting the Node and Triple collections to be union collections - + - Gets the String representation of the writer which is a description of the syntax it produces + Gets the Base Graph which the reasoning is based upon - - + - Class for generating CSV output from RDF Datasets + Namespace containing classes implementing the Simple Knowledge Organization System (SKOS) - + - Gets the type of the Triple Formatter used by the writer + Represents a labelled group of SKOS concepts - + - Saves a Triple Store to CSV Format + Creates a new collection for the given resource - Triple Store to save - File to save to + Resource representing the collection - + - Saves a Triple Store to CSV Format + Gets the members of the collection - Triple Store to save - Writer to save to - + - Delegate for the SaveGraphs method + Represents a SKOS concept - Context for writing the Store - + - Thread Worker method which writes Graphs to the output + Creates a new concept for the given resource - Context for writing the Store + Resource representing the concept - + - Generates the Output for a Graph as a String in CSV syntax + Gets the concept schemes the concept is contained in - Context for writing the Store - Context for writing the Graph - - + - Generates Output for the given Node + Get the concept schemes the concept is the top concept of - Writer Context - Node - Triple Segment - + - Event which is raised when a non-fatal error occurs while outputting CSV + Gets the preferred labels of the concept - + - Gets the String representation of the writer which is a description of the syntax it produces + Gets the alternative labels of the concept - - + - Class for generating Notation 3 Concrete RDF Syntax which provides varying levels of Syntax Compression + Gets the hidden labels of the concept - Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Creates a new Notation 3 Writer which uses the Default Compression Level + Gets a unique identifiers of the concept in a given concept scheme - + - Creates a new Notation 3 Writer which uses the given Compression Level + Gets the general notes of the concept - Desired Compression Level - See Remarks for this classes CompressionLevel property to see what effect different compression levels have - + - Gets/Sets whether Pretty Printing is used + Gets the modification notes of the concept - + - Gets/Sets whether High Speed Write Mode should be allowed + Gets the formal explanation of the concept - + - Gets/Sets the Compression Level to be used + Gets the editorial notes the concept - - - If the Compression Level is set to None then High Speed mode will always be used regardless of the input Graph and the HighSpeedMorePermitted property. - - - If the Compression Level is set to Minimal or above then full Predicate Object lists will be used for Triples. - - - If the Compression Level is set to More or above then Blank Node Collections and Collection syntax will be used if the Graph contains Triples that can be compressed in that way. - - + - Gets/Sets the Default Namespaces that are always available + Gets examples of the use of the concept - + - Gets the type of the Triple Formatter used by this writer + Gets notes about the past of the concept - + - Saves a Graph to a file using Notation 3 Syntax + Gets notes that help to clarify the meaning and/or the use of the concept - Graph to save - File to save to - + - Saves a Graph to the given Stream using Notation 3 Syntax + Gets concepts related by meaning - Graph to save - Stream to save to - + - Generates the Notation 3 Syntax for the Graph + Gets more general concepts - + - Generates Output for Triples as a single "s p o." Triple + Gets more specific concepts - Writer Context - Triple to output - - Used only in High Speed Write Mode - + - Generates Output for Nodes in Notation 3 syntax + Gets associated concepts - Writer Context - Node to generate output for - Segment of the Triple being output - Indent to use for pretty printing - - + - Internal Helper method which converts a Collection into Notation 3 Syntax + Gets more general concepts (transitive) - Writer Context - Collection to convert - Indent to use for pretty printing - - + - Helper method for generating Parser Warning Events + Gets more specific concepts (transitive) - Warning Message - + - Event which is raised when there is a non-fatal issue with the Graph being written + Gets concepts with comparable meaning from other concept schemes - + - Gets the String representation of the writer which is a description of the syntax it produces + Gets confidently interchangeable concepts from other concept schemes - - + - Class for serializing a Triple Store in the NQuads (NTriples plus context) syntax + Gets interchangeably similar concepts from other concept schemes - + - Creates a new writer + Gets more general concepts from other concept schemes - + - Creates a new writer + Gets more specific concepts from other concept schemes - NQuads Syntax mode to use - + - Controls whether Pretty Printing is used + Gets associated concepts from other concept schemes - - For NQuads this simply means that Graphs in the output are separated with Whitespace and comments used before each Graph - - + - Gets/Sets whether Multi-Threaded Writing + Represents an aggregation of one or more SKOS concepts - + - Gets the type of the Triple Formatter used by this writer + Creates a new concept schemes for the given resource + Resource representing the concept scheme - + - Gets/Sets the NQuads syntax mode + Gets the top concepts of the concept scheme - + - Saves a Store in NQuads format + Represents a wrapper around a SKOS graph providing convenient access to concepts, schemes, and collections - Store to save - File to save to - + - Saves a Store in NQuads format + Creates a new SKOS graph - Store to save - Writer to save to - + - Converts a Triple into relevant NQuads Syntax + Creates a new SKOS graph for the given graph - Writer Context - Triple to convert - Graph URI - + The graph this SKOS graph wraps - + - Converts a Node into relevant NTriples Syntax + Gets concept schems contained in the graph - Node to convert - Writer Context - Triple Segment being written - - + - Delegate for the SaveGraphs method + Gets concepts contained in the graph - Context for writing the Store - + - Thread Worker method which writes Graphs to the output + Gets collections contained in the graph - Context for writing the Store - + - Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of + Gets ordered collections contained in the graph - + - Internal Helper method which raises the Warning event only if there is an Event Handler registered + Static Helper class for the SKOS API - Warning Message - + - Gets the String representation of the writer which is a description of the syntax it produces + SKOS namespace and prefix - - + - Class for generating RDF/Json Concrete Syntax + SKOS namespace and prefix - -

- Uses the Json.Net library by James Newton-King to output RDF/Json according to the specification located on the Talis n2 Wiki -

-
- Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue
- + - Gets/Sets Pretty Print Mode for the Writer + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Saves a Graph in RDF/Json syntax to the given File + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Graph to save - Filename to save to - + - Saves a Graph to an arbitrary output stream + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Graph to save - Stream to save to - + - Internal method which generates the RDF/Json Output for a Graph + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Graph to save - Stream to save to - + - Internal Helper method for raising the Warning event + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Warning Message - + - Event which is raised when there is a non-fatal issue with the RDF being output + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Gets the String representation of the writer which is a description of the syntax it produces + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - - + - Class for saving SPARQL Result Sets to CSV format (not a standardised format) + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Saves a SPARQL Result Set to CSV format + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - File to save to - + - Saves a SPARQL Result Set to CSV format + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - Writer to save to - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Gets the String representation of the writer which is a description of the syntax it produces + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - - + - Class for saving SPARQL Result Sets in the RDF serialization in the RDF format of your choice (default Turtle) + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Creates a new SPARQL RDF Writer which will save Result Sets in the RDF serialization using Turtle syntax + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Creates a new SPARQL RDF Writer which will save Result Sets in the RDF serialization in your chosen RDF Syntax + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - RDF Writer to use - + - Saves the SPARQL Result Set to the given File + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set to save - File to save to - + - Saves the SPARQL Result Set to the given Stream + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set to save - Stream to save to - + - Method which generates the RDF Graph of a SPARQL Result Set + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Gets the String representation of the writer which is a description of the syntax it produces + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - - + - Class for saving SPARQL Result Sets to TSV format (not a standardised format) + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Saves a SPARQL Result Set to TSV format + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - File to save to - + - Saves a SPARQL Result Set to TSV format + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - Writer to save to - + - Helper Method which raises the Warning event when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Warning Message - + - Event raised when a non-fatal issue with the SPARQL Results being written is detected + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Gets the String representation of the writer which is a description of the syntax it produces + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - - + - Class for generating arbitrary XML Output from SPARQL Result Sets by transforming the XML Results Format via an XSLT stylesheet + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - + - Creates a new SPARQL XSLT Writer + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Stylesheet URI - + - Creates a new SPARQL XSLT Writer + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Stylesheet URI - + - Saves a SPARQL Result Set to the given File + Constant URIs for classes and properties exposed by the SKOS API and its derived classes - Result Set - File to save to - + - Saves a SPARQL Result Set to the given Text Writer + Represents SKOS resources that can be members of collections (concepts and collections) - Result Set - Text Writer to write to - + - Gets the String representation of the writer which is a description of the syntax it produces + Represents an ordered group of SKOS concepts - - + - Class for serialzing Triple Stores in the TriX format + Creates a new ordered collection for the given resource + Resource representing the ordered collection - + - Saves a Store in TriX format + Gets the ordered list of members of the collection - Store to save - File to save to - + - Saves a Store in TriX format + Represents a SKOS resource - Store to save - Writer to save to - + - Event which is raised when there is an issue with the Graphs being serialized that doesn't prevent serialization but the user should be aware of + Gets the original resource underlying the SKOS resource - + - Internal Helper method which raises the Warning event only if there is an Event Handler registered + Class for representing errors that occur in RDF Storage - Warning Message - + - Gets the String representation of the writer which is a description of the syntax it produces + Creates a new RDF Storage Exception - + Error Message - + - Class for generating TSV files from RDF Graphs + Creates a new RDF Storage Exception + Error Message + Exception which caused this Exception - + - Gets the type of the Triple Formatter used by this writer + + Namespace for storage classes which provide support for using arbitrary backing Stores + + + Storage is managed via the IStorageProvider interface, see the Triple Store Integration documentation on the main website for more detail. + +

Data Provider Libraries

+ + From the 0.5.0 release onwards any triple store integration that requires additional dependencies are provided with their own library to reduce dependencies in the Core library and allow that functionality to be optional. The following stores are currently provided in separate libraries: + +
    +
  • Virtuoso - Virtuoso support can be found in the dotNetRDF.Data.Virtuoso.dll library and requires one additional dependency.
  • +
- + - Saves a Graph to TSV format + + Namespace for storage classes which provide support for managing servers that provide multiple backing Stores + + + Servers are managed via the interface, a server can provide lists of available stores, retrieve a reference to a store, create new stores and delete existing stores. The exact capabilites may depend on the implementation and may be inspected via the property. + - Graph - File to save to - + - Saves a Graph to TSV format + + Namespace for storage classes which provide support for creating new stores in conjunction with a + + + In order for an to create a new store it requires an instance of the interface from this namespace. The basic interface provides only a Store ID, specific implementations may provide many more customizable properties to allow new stores to be created that take advantage of the capabilties of the server the store is being created on. A provides methods to generate the basic templates that it accepts and should be used in preference to creating any of the implementations directly. + - Graph - Writer to save to - + - Event which is raised if the Writer detects a non-fatal error with the RDF being output + + Namespace containing implementations of which provide templates for creating new stores on Sesame servers + - + - Gets the String representation of the writer which is a description of the syntax it produces + Abstract base class for templates for creating new stores in Sesame - + + + Sesame templates generate a configuration graph like the one mentioned in the Sesame documentation, this graph is POSTed to the SYSTEM repository causing a new store to be created. + + - + - Class for generating TSV output from RDF Datasets + Constants for Sesame repository configuration namespaces - + - Gets the type of the Triple Formatter used by this writer + Constants for Sesame repository configuration namespaces - + - Saves a Triple Store to TSV format + Constants for Sesame repository configuration namespaces - Triple Store to save - File to save to - + - Saves a Triple Store to TSV format + Constants for Sesame repository configuration namespaces - Triple Store to save - Writer to save to - + - Delegate for the SaveGraphs method + Constants for Sesame repository configuration namespaces - Context for writing the Store - + - Thread Worker method which writes Graphs to the output + Constants for Sesame repository configuration namespaces - Context for writing the Store - + - Generates the Output for a Graph as a String in TSV syntax + Creates a new Sesame template - Context for writing the Store - Context for writing the Graph - + ID + Template name + Template description - + - Generates Output for the given Node + Gets/Sets the descriptive label for a Sesame store - Writer Context - Node - Triple Context - + - Event which is raised if the Writer detects a non-fatal error with the RDF being output + Gets a Graph representing the RDF that must be inserted into Sesame's SYSTEM repository in order to create the desired store + - + - Gets the String representation of the writer which is a description of the syntax it produces + Gets the basic template graph which is a graph with all the required namespaces registered and the ID and label filled in - + - Class for generating RDF in Turtle Syntax + Gets the Node used to refer to the store configuration context + + + + + Templates for creating remote Sesame stores - Similar in speed to the CompressingTurtleWriter but doesn't use the full Blank Node and Collection syntax compressions + + This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: + + + @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. + @prefix rep: <http://www.openrdf.org/config/repository#>. + @prefix hr: <http://www.openrdf.org/config/repository/http#>. + + [] a rep:Repository ; + rep:repositoryImpl [ + rep:repositoryType "openrdf:HTTPRepository" ; + hr:repositoryURL <{%Sesame server location|http://localhost:8080/openrdf-sesame%}/repositories/{%Remote repository ID|SYSTEM%}> + ]; + rep:repositoryID "{this.ID}" ; + rdfs:label "{this.Label}" . + + + The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. + - Designed to be Thread Safe - should be able to call the Save() method from multiple threads on different Graphs without issue - + - Creates a new Turtle Writer + Creates a new Template + Store ID - + - Creates a new Turtle Writer + Gets/Sets the remote Sesame server to connect to - Turtle Syntax - + - Gets/Sets whether Pretty Printing is used + Gets/Sets the ID of the remote repository to connect to - + - Gets/Sets whether the Writer is allowed to use High Speed write mode + Gets the template graph - High Speed Write Mode is engaged when the Writer determines that the contents of the Graph are not well suited to Turtle syntax compressions. Usually the writer compresses triples into groups by Subject using Predicate-Object lists to output the Triples relating to each Subject. If the number of distinct Subjects is greater than 75% of the Triples in the Graph then High Speed write mode will be used, in High Speed mode all Triples are written fully and no grouping of any sort is done. + - + - Gets the type of the Triple Formatter used by this writer + Template for creating Sesame memory stores + + + + This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: + + + @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. + @prefix rep: <http://www.openrdf.org/config/repository#>. + @prefix sr: <http://www.openrdf.org/config/repository/sail#>. + @prefix sail: <http://www.openrdf.org/config/sail#>. + @prefix ms: <http://www.openrdf.org/config/sail/memory#>. + + [] a rep:Repository ; + rep:repositoryID "{this.ID}" ; + rdfs:label "{this.Label}" ; + rep:repositoryImpl [ + rep:repositoryType "openrdf:SailRepository" ; + sr:sailImpl [ + sail:sailType "openrdf:MemoryStore" ; + ms:persist {this.Persist} ; + ms:syncDelay {this.SyncDelay} + ] + ]. + + + The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. + + + + + + Creates a new memory store template + Store ID - + - Saves a Graph to a File + Gets the template graph used to create the store - Graph to save - Filename to save to + - + - Saves a Graph using an arbitrary TextWriter + Gets/Sets whether to persist the store - Graph to save - Writer to save using - + - Generates the Output for a Graph + Gets/Sets the sync delay - Context for writing the Graph - + - Generates the Output for a Node in Turtle Syntax + Gets/Sets whether to enable direct type hierarchy inferencing - Context for writing the Graph - Node to generate Output for - Segment of the Triple being written - - + - Helper method for raising the Warning event + Gets/Sets whether to enable RDF Schema Inferencing - Warning Message - + - Event which is raised when a non-fatal issue with the Graph being serialized is encountered + Sesame Native index modes - + - Gets the String representation of the writer which is a description of the syntax it produces + SPOC indexes - - + - Valued Node representing a Time Span value + POSC indexes - + - Creates a new Time span node + Template for creating Sesame Native stores - Graph - Time Span + + + This template generates a Sesame repository config graph like the following, depending on exact options the graph may differ: + + + @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. + @prefix rep: <http://www.openrdf.org/config/repository#>. + @prefix sr: <http://www.openrdf.org/config/repository/sail#>. + @prefix sail: <http://www.openrdf.org/config/sail#>. + @prefix ns: <http://www.openrdf.org/config/sail/native#>. + + [] a rep:Repository ; + rep:repositoryID "{this.ID}" ; + rdfs:label "{this.Label}" ; + rep:repositoryImpl [ + rep:repositoryType "openrdf:SailRepository" ; + sr:sailImpl [ + sail:sailType "openrdf:NativeStore" ; + ns:tripleIndexes "{this.IndexMode}" + ] + ]. + + + The placeholders of the form {this.Property} represent properties of this class whose values will be inserted into the repository config graph and used to create a new store in Sesame. + + - + - Creates a new Time span node + Creates a Sesame Native store template - Graph - Time Span - Lexical value + Store ID - + - Creates a new Time span node + Gets the template graph used to specify the configuration of a Sesame repository - Graph - Time Span - Lexical value - Data type URI + Template Graph - + - Gets the date time value as a string + Gets/Sets the Indexing Mode - - + - Throws an error as date times cannot be converted to integers + Gets/Sets whether to enable direct type hierarchy inferencing - - + - Throws an error as date times cannot be converted to decimals + Gets/Sets whether to enable RDF Schema Inferencing - - + - Throws an error as date times cannot be converted to floats + + Namespace containing implementations of which provide templates for creating new stores on Stardog servers + - - + - Throws an error as date times cannot be converted to doubles + Abstract base implementation of a Store Template for creating Stardog Stores - - + - Throws an error as date times cannot be converted to booleans + Creates a new Stardog Template - + Store ID + Template Name + Template Description + Stardog Database Type - + - Gets the date time value of the node + Gets the Database Type - - + - Gets the date time value of the node + Gets/Sets the minimum differential index limit - - + - Gets the time span value of the node + Gets/Sets the maximum differential merge limit - - + - Gets the URI of the datatype this valued node represents as a String + Gets/Sets whether the database should canonicalise literals - + - Gets the numeric type of the node + Gets/Sets whether to optimize indexes for named graph queries - + - Namespace for specialised node implementations and the interface, this is an extension of the interface that provides strongly typed access to the value of a node. - - These implementations are primarily used internally in the SPARQL engine, however as these all derive from the standard implementations they can be used interchangeably with those if desired. - + Gets/Sets whether to persist indexes - + - Valued Node representing boolean values + Gets/Sets whether to persist indexes synchronously - + - Creates a new boolean valued node + Gets/Sets whether to automatically update statistics - Graph the node belong to - Boolean Value - Lexical Value - + - Creates a new boolean valued node + Gets/Sets the active graphs for ICV - Graph the node belongs to - Boolean Value - + - Gets the string value of the boolean + Enables/Disables ICV - - + - Throws an error as booleans cannot be cast to integers + Gets/Sets the reasoning mode for ICV - - + - Throws an error as booleans cannot be cast to decimals + Gets/Sets whether to perform automatic consistency checking on transactions - - + - Throws an error as booleans cannot be cast to floats + Enables/Disables punning - - + - Throws an error as booleans cannot be cast to doubles + Gets/Sets the graphs that contain the schema (TBox) that are used for reasoning - - + - Gets the boolean value + Enables/Disables Full Text search - - + - Throws an error as booleans cannot be cast to date times + Gets/Sets the Search re-indexing mode - - + - Throws an error as booleans cannot be cast to date times + Gets/Sets whether to use durable transactions - - + - Throws an error as booleans cannot be cast to a time span + Validates that the template is filled out such that a store can be created from it - + Enumeration of errors that occurred - + - Gets the URI of the datatype this valued node represents as a String + Does any additional validation a derived template may require + Error collection to add to - + - Gets the numeric type of the node + Gets the JSON Template for creating a store + - + - Valued node representing a byte (8-bit unsigned integer) + Template for creating Stardog Disk stores - + - Creates a new byte valued node + Creates a new template - Graph the node belongs to - Byte value - Lexical value + Store ID - + - Creates a new byte valued node + Template for creating Stardog in-memory stores - Graph the node belongs to - Byte value - + - Gets the integer value of the byte + Creates a new template - + Store ID - + - Gets the decimal value of the byte + Interface for templates for the provisioning of new stores - + + + This interface is intentionally very limited, the generic type constraints on the interface allow for specific implementations of that interface to futher constrain their implementation to accept only relevant implementations of this interface when provisioning new stores. + + + Specific implementations will likely add various properties that allow end users to configure implementation specific parameters. It is suggested that implementors include System.ComponentModel attributes on their implementations. + + - + - Gets the float value of the byte + Gets/Sets the ID for the Store - - + - Gets the float value of the double + Gets the name of the type of store the template will create - - + - Value node representing a signed byte (8-bit signed integer) + Gets the description of the type of store the template will create - + - Creates a new signed byte node + Validates the template returning an enumeration of error messages - Graph the node belongs to - Signed Byte value - Lexical value + - + - Creates a new signed byte node + A basic store template where the only parameter is the Store ID - Graph the node belongs to - Signed Byte value - + - Gets the integer value of the signed byte + Creates a new template - + Store ID - + - Gets the decimal value of the signed byte + Creates a new template - + Store ID + Template Name + Template Description - + - Gets the float value of the signed byte + Gets/Sets the Store ID - - + - Gets the double value of the signed byte + Gets the name of the type of store the template will create - - + - Valued Node representing a Date Time value + Gets the description of the type of store the template will create - + - Creates a new Date Time valued node + Validates the template - Graph the node belongs to - Date Time value - Lexical Value - Datatype URI + + + This default implementation does no validation, derived classes must override this to add their required validation + - + - Creates a new Date Time valued node + Gets the string representation of the template which is the Template Name - Graph the node belongs to - Date Time value - Lexical Value - Datatype URI + - + - Creates a new Date Time valued node + Represents an AllegroGraph server, may be used to access and manage stores within a catalog on the server - Graph the node belongs to - Date Time value - Datatype URI - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store - Graph the node belongs to - Date Time value - Datatype URI + Base URI for the Store + Catalog ID - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - Graph the node belongs to - Date Time value + Base Uri for the Store - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store - Graph the node belongs to - Date Time value + Base Uri for the Store + Catalog ID + Username for connecting to the Store + Password for connecting to the Store - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - Graph the node belongs to - Date Time value - Lexical Value + Base Uri for the Store + Username for connecting to the Store + Password for connecting to the Store - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store - Graph the node belongs to - Date Time value - Lexical Value + Base Uri for the Store + Catalog ID + Proxy Server - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - Graph the node belongs to - Date Time value - Date Time offset value - Lexical Value + Base Uri for the Store + Proxy Server - + - Creates a new Date Time valued node + Creates a new Connection to an AllegroGraph store - Graph the node belongs to - Date Time value - Date Time offset value - Lexical Value - Data Type URI + Base Uri for the Store + Catalog ID + Username for connecting to the Store + Password for connecting to the Store + Proxy Server - + - Gets the String form of the Date Time + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - Value - Datatype URI - + Base Uri for the Store + Username for connecting to the Store + Password for connecting to the Store + Proxy Server - + - Gets the String form of the Date Time + Gets a default template for creating a new Store - Value - Datatype URI + Store ID - + - Gets the date time value as a string + Gets all available templates for creating a new Store + Store ID - + - Throws an error as date times cannot be converted to integers + Creates a new Store (if it doesn't already exist) - + Template for creating the new Store - + - Throws an error as date times cannot be converted to decimals + Requests that AllegroGraph deletes a Store - + Store ID - + - Throws an error as date times cannot be converted to floats + Get the lists of stores available on the Server - + - Throws an error as date times cannot be converted to doubles + Gets a Store within the current catalog + Store ID + + AllegroGraph groups stores by catalogue, you may only use this method to obtain stores within your current catalogue + - + - Throws an error as date times cannot be converted to booleans + Gets the List of Stores available on the server within the current catalog asynchronously - + Callback + State to pass to callback - + - Gets the date time value of the node + Gets a default template for creating a new Store + Store ID + Callback + State to pass to callback - + - Gets the date time value of the node + Gets all available templates for creating a new Store + Store ID + Callback + State to pass to callback - + - Throws an error as date times cannot be cast to a time span + Creates a new Store on the server within the current catalog asynchronously - + Template to create the store from + Callback + State to pass to callback - + - Gets the URI of the datatype this valued node represents as a String + Deletes a Store from the server within the current catalog asynchronously + Store ID + Callback + State to pass to callback - + - Gets the numeric type of the node + Gets a Store within the current catalog asynchronously + Store ID + Callback + State to pass to call back + + + AllegroGraph groups stores by catalog, you may only use this method to obtain stores within your current catalogue + - + - Valued Node representing a Date value + Helper method for creating HTTP Requests to the Store + Path to the Service requested + Acceptable Content Types + HTTP Method + Querystring Parameters + - + - Creates a new Date valued node + Serializes the connection's configuration - Graph the node belongs to - Date Time value + Configuration Serialization Context - + - Creates a new Date valued node + Interface for storage servers which are systems capable of managing multiple stores which are exposed as instances - Graph the node belongs to - Date Time value - Lexical Value + + This interface may be implemented either separately or alongside . It is quite acceptable for an implementation of that provides a connection to a store sitting on a server that manages multiple stores to also provide an implementation of this interface in order to allow access to other stores on the server. + - + - Creates a new Date valued node + Returns information on the IO behaviour of a Server - Graph the node belongs to - Date Time value - + - Creates a new Date valued node + Gets the list of available stores - Graph the node belongs to - Date Time value - Lexical Value + - + - A Valued Node representing decimal nodes + Gets a default template for creating a store with the given ID + ID + - + - Creates a new decimal valued node + Gets all possible templates for creating a store with the given ID - Graph the node belongs to - Decimal value - Lexical value + ID + - + - Creates a new decimal valued node + Creates a new Store with the given ID - Graph the node belongs to - Decimal value + Template for the new store + Whether creation succeeded - + - Gets the integer value of the decimal + Deletes the Store with the given ID - + Store ID + + Whether attempting to delete the Store that you are accessing is permissible is up to the implementation + - + - Gets the decimal value + Gets the Store with the given ID + Store ID - + - Gets the float value of the decimal + Interface for storage providers which are capable of managing multiple stores asynchronously - - + - Gets the double value of the decimal + Gets information on the IO Behaviour of the Server - - + - A Valued Node representing double values + Lists the available stores asynchronously + Callback + State to pass to the callback - + - Creates a new double valued node + Gets a default template for creating a store with the given ID - Graph the node belongs to - Double value - Lexical value + ID + Callback + State to pass to the callback + - + - Creates a new double valued node + Gets all available templates for creating a store with the given ID - Graph the node belongs to - Double value + ID + Callback + State to pass to the callback - + - Gets the integer value of the double + Creates a store asynchronously - + Template for the store to be created + Callback + State to pass to the callback + + Behaviour with regards to whether creating a store overwrites an existing store with the same ID is at the discretion of the implementation and SHOULD be documented in an implementations comments + - + - Gets the decimal value of the double + Deletes a store asynchronously - + ID of the store to delete + Callback + State to pass to the callback - + - Gets the float value of the double + Gets a store asynchronously - + Store ID + Callback + State to pass to the callback - + - Gets the double value + Represents a connection to a Sesame Server - - + - Extension Methods related to valued nodes + System Repository ID - + - Takes a INode and converts it to a IValuedNode if it is not already an instance that implements the interface + Base Uri for the Server - Node - Valued Node - + - Tries to get the result of calling AsBoolean() on a node throwing an error if the node is null + Username for accessing the Server - Node - Thrown if the input is null of the specific valued node cannot be cast to a boolean - - + - A Valued Node representing float values + Password for accessing the Server - + - Creates a new Float valued node + Whether the User has provided credentials for accessing the Server using authentication - Graph the node belongs to - Float value - Lexical value - + - Creates a new Float valued node + Repositories Prefix - Graph the node belongs to - Float value - + - Gets the integer value of the float + Available Sesame template types - - + - Gets the decimal value of the float + Creates a new connection to a Sesame HTTP Protocol supporting Store - + Base Uri of the Store - + - Gets the float value + Creates a new connection to a Sesame HTTP Protocol supporting Store - + Base Uri of the Store + Username to use for requests that require authentication + Password to use for requests that require authentication - + - Gets the double value of the float + Creates a new connection to a Sesame HTTP Protocol supporting Store - + Base Uri of the Store + Proxy Server - + - Interface for Valued Nodes + Creates a new connection to a Sesame HTTP Protocol supporting Store - - - This interface extends the basic INode interface with methods related to turning the lexical value into a strongly typed .Net value. It is intended primarily for use within SPARQL expression evaluation where we need to do a lot of value conversions and currently waste a lot of effort (and thus performance) doing that. - - + Base Uri of the Store + Username to use for requests that require authentication + Password to use for requests that require authentication + Proxy Server - + - Gets the String value of the Node + Gets the IO Behaviour of the server - - - This is distinct from ToString() because that method will typically include additional information like language specifier/datatype as appropriate whereas this method is used to produce a string as would be produced by applying the STR() function from SPARQL - - + - Gets the Long value of the Node + Gets a default template for creating a store + Store ID - Thrown if the Node cannot be converted to a Long - + - Gets the Decimal value of the Node + Gets all available templates for creating a store + Store ID - Thrown if the Node cannot be converted to a Decimal - + - Gets the Float value of the Node + Creates a new Store based on the given template + Template - Thrown if the Node cannot be converted to a Float + + + Templates must inherit from + + - + - Gets the Double value of the Node + Gets the Store with the given ID + Store ID - Thrown if the Node cannot be converted to a Double + + If the Store ID requested represents the current instance then it is acceptable for an implementation to return itself. Consumers of this method should be aware of this and if necessary use other means to create a connection to a store if they want a unique instance of the provider. + - + - Gets the Boolean value of the Node + Deletes the Store with the given ID - - Thrown if the Node cannot be converted to a Boolean + Store ID + + Whether attempting to delete the Store that you are accessing is permissible is up to the implementation + - + - Gets the Date Time value of the Node + Gets the list of available stores - Thrown if the Node cannot be converted to a Date Time - + - Gets the Date Time Offset value of the Node + Gets a default template for creating a store + Store ID + Callback + State to pass to the callback - Thrown if the Node cannot be converted to a Date Time Offset - + - Gets the Time Span value of the Node + Gets all available templates for creating a store + Store ID + Callback + State to pass to the callback - + - Gets the URI of the datatype this valued node represents as a String + Creates a new store based on the given template + Template + Callback + State to pass to the callback - Either String.Empty if no type or the string form of the type URI + + Template must inherit from + - + - Gets the Numeric Type of the Node + Gets a store asynchronously + Store ID + Callback + State to pass to the callback + + If the store ID requested matches the current instance an instance MAY invoke the callback immediately returning a reference to itself + - + - A Valued Node with a Long value + Deletes a store asynchronously + ID of the store to delete + Callback + State to pass to the callback - + - Creates a new long valued node + Lists the available stores asynchronously - Graph the node belongs to - Long value - Lexical Value + Callback + State to pass to the callback - + - Creates a new long valued node + Helper method for creating HTTP Requests to the Store - Graph the node belongs to - Long value - Lexical Value - Datatype URI + Path to the Service requested + Acceptable Content Types + HTTP Method + Querystring Parameters + - + - Creates a new long valued node + Ensures the connection to the Sesame SYSTEM repository is prepared if it isn't already - Graph the node belongs to - Long value - + - Gets the long value + Disposes of the server - - + - Gets the decimal value of the long + Serializes the connection's configuration - + Configuration Serialization Context - + - Gets the float value of the long + Abstract implementation of a management connection to a Stardog server using the HTTP protocol - - + - Gets the double value of the long + The base URI of the Stardog server - - + - A Valued Node with a unsigned long value + The URI of the admin API - + - Creates a new unsigned long valued node + The username to use for the connection - Graph the node belongs to - Unsigned Long value - Lexical Value - + - Creates a new unsigned long valued node + The password to use for the connection - Graph the node belongs to - Unsigned Long value - Lexical Value - Datatype URI - + - Creates a new usigned long valued node + True if a user name and password are specified, false otherwise - Graph the node belongs to - Unsigned Long value - + - Gets the long value of the ulong + Available Stardog template types - - + - Gets the decimal value of the ulong + Creates a new connection to a Stardog Server - + Base Uri of the Server - + - Gets the float value of the ulong + Creates a new connection to a Stardog Server - + Base Uri of the Server + Username + Password - + - Gets the double value of the ulong + Creates a new connection to a Stardog Server - + Base Uri of the Server + Proxy Server - + - A Valued Node with a numeric value + Creates a new connection to a Stardog Server + Base Uri of the Server + Username + Password + Proxy Server - + - Creates a new numeric valued node + Gets the IO Behaviour of the server - Graph the node belongs to - Lexical Value - Datatype URI - SPARQL Numeric Type - + - Gets the string value of the node + Lists the database available on the server - + - Gets the integer value + Gets a default template for creating a new Store + Store ID - + - Gets the decimal value + Gets all available templates for creating a new Store + Store ID - + - Gets the float value + Creates a new Store based off the given template + Template + + + Templates must inherit from + + + Uses some code based off on answers here to help do the multipart form data request. + + - + - Gets the double value + Deletes a Store with the given ID - + Store ID - + - Gets the boolean value + Gets a provider for the Store with the given ID + Store ID - + - Throws an error as numerics cannot be converted to date times + Lists all databases available on the server - + Callback + State to pass to the callback - + - Throws an error as numerics cannot be converted to date times + Gets a default template for creating a new Store + Store ID + Callback + State to pass to the callback - + - Throws an error as numerics cannot be cast to a time span + Gets all available templates for creating a new Store + Store ID + Callback + State to pass to the callback - - - Gets the URI of the datatype this valued node represents as a String - - - + - Gets the numeric type of the node + Creates a new store based on the given template + Template + Callback + State to pass to the callback + + + Template must inherit from + + - + - Valued node whose value is a string or can only be converted to a string + Deletes a database from the server + Store ID + Callback + State to pass to the callback - + - Creates a new String Node + Gets a database from the server - Graph the Node belongs to - String value - Datatype URI + Store ID + Callback + State to pass to the callback - + - Creates a new String Node + Create a request to the Stardog server's Admin API - Graph the Node belongs to - String value - Language Specifier + The admin API service path + Accept header content + HTTP method to use + Additional request parameters + - + - Creates a new String Node + Disposes of the server - Graph the Node belongs to - String value - + - Gets the string value + Serializes the connection's configuration - + Configuration Serialization Context - + - Throws an error as the string cannot be cast to an integer + Static Class containing constants relevant to provisioning new Stardog stores - - + - Throws an error as the string cannot be cast to a decimal + Constants for valid Stardog Options - - + - Throws an error as the string cannot be cast to a float + Constants for valid Stardog Options - - + - Throws an error as the string cannot be cast to a double + Constants for valid Stardog Options - - + - Gets the boolean value of the string + Constants for valid Stardog Options - - + - Throws an error as the string cannot be cast to a date time + Constants for valid Stardog Options - - + - Throws an error as the string cannot be cast to a date time + Constants for valid Stardog Options - - + - Throws an error as the string cannot be cast to a time span + Constants for valid Stardog Options - - + - Gets the URI of the datatype this valued node represents as a String + Constants for valid Stardog Options - + - Gets the numeric type of the expression + Constants for valid Stardog Options - + - Abstract Base Class for Blank Nodes + Constants for valid Stardog Options - + - Internal Only Constructor for Blank Nodes + Constants for valid Stardog Options - Graph this Node belongs to - + - Internal Only constructor for Blank Nodes + Constants for valid Stardog Options - Graph this Node belongs to - Custom Node ID to use - + - Internal Only constructor for Blank Nodes + Constants for valid Stardog Options - Node Factory from which to obtain a Node ID - + - Unparameterized Constructor for deserialization usage only + Constants for valid Stardog Options - + - Deserialization Constructor + Constants for valid Stardog Options - Serialization Information - Streaming Context - + - Returns the Internal Blank Node ID this Node has in the Graph + Constants for valid Stardog Options - - Usually automatically assigned and of the form autosXXX where XXX is some number. If an RDF document contains a Blank Node ID of this form that clashes with an existing auto-assigned ID it will be automatically remapped by the Graph using the BlankNodeMapper when it is created. - - + - Indicates whether this Blank Node had its ID assigned for it by the Graph + Constants for valid Stardog Options - + - Implementation of Equals for Blank Nodes + Constants for valid Stardog Options - Object to compare with the Blank Node - - - Blank Nodes are considered equal if their internal IDs match precisely and they originate from the same Graph - - + - Implementation of Equals for Blank Nodes + Constants for valid Stardog Database types - Object to compare with the Blank Node - - - Blank Nodes are considered equal if their internal IDs match precisely and they originate from the same Graph - - + - Determines whether this Node is equal to another + Constants for valid Stardog Database types - Other Blank Node - - + - Determines whether this Node is equal to a Graph Literal Node (should always be false) + Constanst for valid Search Re-Index Modes - Graph Literal Node - - + - Determines whether this Node is equal to a Literal Node (should always be false) + Constanst for valid Search Re-Index Modes - Literal Node - - + - Determines whether this Node is equal to a URI Node (should always be false) + Constants for special named graph URIs - URI Node - - + - Determines whether this Node is equal to a Variable Node (should always be false) + Constants for special named graph URIs - Variable Node - - + - Determines whether this Node is equal to a Blank Node + Constants for various Stardog reasoning settings - Blank Node - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constant for various Stardog integer settings - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constant for various Stardog integer settings - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constants for various Stardog boolean flags - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constants for various Stardog boolean flags - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constants for various Stardog boolean flags - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constants for various Stardog boolean flags - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Constants for various Stardog boolean flags - Node to test against - - + - Returns a string representation of this Blank Node in QName form + Constants for various Stardog boolean flags - - + - Gets the data for serialization + Constants for various Stardog boolean flags - Serialization Information - Streaming Context - + - Reads the data for XML deserialization + Constants for various Stardog boolean flags - XML Reader - + - Writes the data for XML serialization + Constants for various Stardog boolean flags - XML Writer - + - Throws an error as a Blank Node cannot be cast to a String + Constants for various Stardog boolean flags - - + - Throws an error as a Blank Node cannot be cast to an integer + Pattern for valid Stardog database names - - + - Throws an error as a Blank Node cannot be cast to a decimal + Validates whether a Database Name is valid + Database Name - + - Throws an error as a Blank Node cannot be cast to a float + Validates whether a Database Type is valid + Database Type - + - Throws an error as a Blank Node cannot be cast to a double + Validates whether a Search Re-Index Mode is valid + Mode - + - Throws an error as a Blank Node cannot be cast to a boolean + Validates whether a Named Graph URI is valid + URI - + - Throws an error as a Blank Node cannot be cast to a date time + Management connection for Stardog 1.* servers - - + - Throws an error as a Blank Node cannot be cast to a date time offset + Creates a new connection to a Stardog Server - + Base Uri of the Server - + - Throws an error as a Blank Node cannot be case to a time span + Creates a new connection to a Stardog Server - + Base Uri of the Server + Username + Password - + - Gets the URI of the datatype this valued node represents as a String + Creates a new connection to a Stardog Server + Base Uri of the Server + Proxy Server - + - Gets the Numeric Type of the Node + Creates a new connection to a Stardog Server + Base Uri of the Server + Username + Password + Proxy Server - + - Class for representing Blank RDF Nodes + Gets a provider for the Store with the given ID + Store ID + - + - Internal Only Constructor for Blank Nodes + Gets a database from the server - Graph this Node belongs to + Store ID + Callback + State to pass to the callback - + - Internal Only constructor for Blank Nodes + Management connection for Stardog 2.* servers - Graph this Node belongs to - Custom Node ID to use - + - Internal Only constructor for Blank Nodes + Creates a new connection to a Stardog Server - Node Factory from which to obtain a Node ID + Base Uri of the Server - + - Constructor for deserialization usage only + Creates a new connection to a Stardog Server + Base Uri of the Server + Username + Password - + - Deserialization Constructor + Creates a new connection to a Stardog Server - Serialization Information - Streaming Context + Base Uri of the Server + Proxy Server - + - Implementation of Compare To for Blank Nodes + Creates a new connection to a Stardog Server - Blank Node to Compare To - - - Simply invokes the more general implementation of this method - + Base Uri of the Server + Username + Password + Proxy Server - + - Determines whether this Node is equal to a Blank Node + Gets a provider for the Store with the given ID - Blank Node + Store ID - + - Class for representing RDF Graphs + Gets a database from the server - Safe for multi-threaded read-only access but unsafe if one/more threads may modify the Graph by using the Assert, Retract or Merge methods + Store ID + Callback + State to pass to the callback - + - Creates a new instance of a Graph + Management connection for Stardog 3.* servers - + - Creates a new instance of a Graph with an optionally empty Namespace Map + Creates a new connection to a Stardog Server - Whether the Namespace Map should be empty + Base Uri of the Server - + - Creates a new instance of a Graph using the given Triple Collection + Creates a new connection to a Stardog Server - Triple Collection + Base Uri of the Server + Username + Password - + - Creates a new instance of a Graph using the given Triple Collection and an optionally empty Namespace Map + Creates a new connection to a Stardog Server - Triple Collection - Whether the Namespace Map should be empty + Base Uri of the Server + Proxy Server - + - Deserialization Constructor + Creates a new connection to a Stardog Server - Serialization Information - Streaming Context + Base Uri of the Server + Username + Password + Proxy Server - + - Asserts a Triple in the Graph + Gets a provider for the Store with the given ID - The Triple to add to the Graph + Store ID + - + - Asserts a List of Triples in the graph + Gets a database from the server - List of Triples in the form of an IEnumerable + Store ID + Callback + State to pass to the callback - + - Retracts a Triple from the Graph + Management connection for Stardog servers running the latest version, current this is 3.* - Triple to Retract - Current implementation may have some defunct Nodes left in the Graph as only the Triple is retracted - + - Retracts a enumeration of Triples from the graph + Creates a new connection to a Stardog Server - Enumeration of Triples to retract + Base Uri of the Server - + - Returns the UriNode with the given Uri if it exists + Creates a new connection to a Stardog Server - The Uri of the Node to select - Either the UriNode Or null if no Node with the given Uri exists + Base Uri of the Server + Username + Password - + - Returns the UriNode with the given QName if it exists + Creates a new connection to a Stardog Server - The QName of the Node to select - + Base Uri of the Server + Proxy Server - + - Returns the LiteralNode with the given Value if it exists + Creates a new connection to a Stardog Server - The literal value of the Node to select - Either the LiteralNode Or null if no Node with the given Value exists - The LiteralNode in the Graph must have no Language or DataType set + Base Uri of the Server + Username + Password + Proxy Server - + - Returns the LiteralNode with the given Value in the given Language if it exists + Class for connecting to an AllegroGraph Store - The literal value of the Node to select - The Language Specifier for the Node to select - Either the LiteralNode Or null if no Node with the given Value and Language Specifier exists + + + Connection to AllegroGraph is based on their new HTTP Protocol which is an extension of the Sesame 2.0 HTTP Protocol. The specification for the AllegroGraph protocol can be found here + + + If you wish to use a Store which is part of the Root Catalog on an AllegroGraph 4.x and higher server you can either use the constructor overloads that omit the catalogID parameter or pass in null as the value for that parameter + + - + - Returns the LiteralNode with the given Value and given Data Type if it exists + Creates a new Connection to an AllegroGraph store - The literal value of the Node to select - The Uri for the Data Type of the Literal to select - Either the LiteralNode Or null if no Node with the given Value and Data Type exists + Base URI for the Store + Catalog ID + Store ID - + - Returns the Blank Node with the given Identifier + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - The Identifier of the Blank Node to select - Either the Blank Node or null if no Node with the given Identifier exists + Base Uri for the Store + Store ID - + - Gets all the Triples involving the given Node + Creates a new Connection to an AllegroGraph store - The Node to find Triples involving - Zero/More Triples + Base Uri for the Store + Catalog ID + Store ID + Username for connecting to the Store + Password for connecting to the Store - + - Gets all the Triples involving the given Uri + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - The Uri to find Triples involving - Zero/More Triples + Base Uri for the Store + Store ID + Username for connecting to the Store + Password for connecting to the Store - + - Gets all the Triples with the given Node as the Subject + Creates a new Connection to an AllegroGraph store - The Node to find Triples with it as the Subject - Zero/More Triples + Base Uri for the Store + Catalog ID + Store ID + Proxy Server - + - Gets all the Triples with the given Uri as the Subject + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - The Uri to find Triples with it as the Subject - Zero/More Triples + Base Uri for the Store + Store ID + Proxy Server - + - Gets all the Triples with the given Node as the Predicate + Creates a new Connection to an AllegroGraph store - The Node to find Triples with it as the Predicate - + Base Uri for the Store + Catalog ID + Store ID + Username for connecting to the Store + Password for connecting to the Store + Proxy Server - + - Gets all the Triples with the given Uri as the Predicate + Creates a new Connection to an AllegroGraph store in the Root Catalog (AllegroGraph 4.x and higher) - The Uri to find Triples with it as the Predicate - Zero/More Triples + Base Uri for the Store + Store ID + Username for connecting to the Store + Password for connecting to the Store + Proxy Server - + - Gets all the Triples with the given Node as the Object + Gets the Catalog under which the repository you are connected to is located - The Node to find Triples with it as the Object - - + - Gets all the Triples with the given Uri as the Object + Makes a SPARQL Update request to the Allegro Graph server - The Uri to find Triples with it as the Object - Zero/More Triples + SPARQL Update - + - Selects all Triples with the given Subject and Predicate + Makes a SPARQL Update request to the Allegro Graph server - Subject - Predicate - + SPARQL Update + Callback + State to pass to the callback - + - Selects all Triples with the given Subject and Object + Does nothing as AllegroGraph does not require the same query escaping that Sesame does - Subject - Object + Query to escape - + - Selects all Triples with the given Predicate and Object + Helper method for creating HTTP Requests to the Store - Predicate - Object + Path to the Service requested + Acceptable Content Types + HTTP Method + Querystring Parameters - + + + + - Class for representing RDF Graphs when you don't want Indexing + Gets a String which gives details of the Connection - - Gives better load performance but poorer lookup performance - + - + - Creates a new Graph which is not indexed + Serializes the connection's configuration + Configuration Serialization Context - + - Wrapper class for Graph Collections + Static Helper class containing internal extensions methods used to support the BaseAsyncSafeConnector class - + - Internal Constant used as the Hash Code for the default graph + Loads a Graph asynchronously + Storage Provider + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Dictionary of Graph Uri Enhanced Hash Codes to Graphs + Loads a Graph asynchronously - See GetEnhancedHashCode() + Storage Provider + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Creates a new Graph Collection + Saves a Graph aynchronously + Storage Provider + Graph to save + Callback + State to pass to the callback - + - Checks whether the Graph with the given Uri exists in this Graph Collection + Updates a Graph asynchronously - Graph Uri to test - + Storage Provider + URI of the Graph to update + Triples to add + Triples to remove + Callback + State to pass to the callback - + - Adds a Graph to the Collection + Deletes a Graph asynchronously - Graph to add - Sets whether the Graph should be merged with an existing Graph of the same Uri if present - Throws an RDF Exception if the Graph has no Base Uri or if the Graph already exists in the Collection and the parameter was not set to true + Storage Provider + URI of the Graph to delete + Callback + State to pass to the callback - + - Removes a Graph from the Collection + Lists Graphs in the store asynchronously - Uri of the Graph to remove + Storage Provider + Callback + State to pass to the callback - + - Gets the number of Graphs in the Collection + Queries a store asynchronously + Storage Provider + SPARQL Query + Callback + State to pass to the callback - + - Provides access to the Graph URIs of Graphs in the Collection + Queries a store asynchronously + Storage Provider + SPARQL Query + RDF Handler + Results Handler + Callback + State to pass to the callback - + - Gets a Graph from the Collection + Updates a store asynchronously - Graph Uri - + Storage Provider + SPARQL Update + Callback + State to pass to the callback - + - Gets the Enumerator for the Collection + Abstract Base Class for IStorageProvider implementations for which it is safe to do the IAsyncStorageProvider implementation simply by farming out calls to the synchronous methods onto background threads (i.e. non-HTTP based connectors) - - + - Gets the Enumerator for this Collection + Gets the parent server (if any) - - + - Disposes of the Graph Collection + Gets the parent server (if any) - Invokes the Dispose() method of all Graphs contained in the Collection - + - Thread Safe decorator around a Graph collection + Loads a Graph from the Store - - Dependings on your platform this either provides MRSW concurrency via a or exclusive access concurrency via a - + Graph to load into + URI of the Graph to load - + - Creates a new Thread Safe decorator around the default + Loads a Graph from the Store + Graph to load into + URI of the Graph to load - + - Creates a new Thread Safe decorator around the supplied graph collection + Loads a Graph from the Store - Graph Collection + Handler to load with + URI of the Graph to load - + - Enters the write lock + Loads a Graph from the Store + Handler to load with + URI of the Graph to load - + - Exits the write lock + Saves a Graph to the Store + Graph to save - + - Enters the read lock + Updates a Graph in the Store + URI of the Graph to update + Triples to be added + Triples to be removed - + - Exits the read lock + Updates a Graph in the Store + URI of the Graph to update + Triples to be added + Triples to be removed - + - Checks whether the Graph with the given Uri exists in this Graph Collection + Deletes a Graph from the Store - Graph Uri to test - + URI of the Graph to delete - + - Adds a Graph to the Collection + Deletes a Graph from the Store - Graph to add - Sets whether the Graph should be merged with an existing Graph of the same Uri if present - Throws an RDF Exception if the Graph has no Base Uri or if the Graph already exists in the Collection and the parameter was not set to true + URI of the Graph to delete - + - Removes a Graph from the Collection + Lists the Graphs in the Store - Uri of the Graph to remove + - + - Gets the number of Graphs in the Collection + Indicates whether the Store is ready to accept requests - + - Gets the Enumerator for the Collection + Gets whether the Store is read only - - + - Provides access to the Graph URIs of Graphs in the Collection + Gets the IO Behaviour of the Store - + - Gets a Graph from the Collection + Gets whether the Store supports Triple level updates via the UpdateGraph() method - Graph Uri - - + - Disposes of the Graph Collection + Gets whether the Store supports Graph deletion via the DeleteGraph() method - Invokes the Dispose() method of all Graphs contained in the Collection - + - Abstract Base Class for Graph Literal Nodes + Gets whether the Store supports listing graphs via the ListGraphs() method - + - Creates a new Graph Literal Node in the given Graph which represents the given Subgraph + Diposes of the Store - Graph this node is in - Sub Graph this node represents - + - Creates a new Graph Literal Node whose value is an empty Subgraph + Loads a Graph from the Store asynchronously - Graph this node is in + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Deserializer Constructor + Loads a Graph from the Store asynchronously - Serialization Information - Streaming Context + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Deserialization Only constructor + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the Subgraph that this Node represents + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Implementation of the Equals method for Graph Literal Nodes. Graph Literals are considered Equal if their respective Subgraphs are equal + Saves a Graph to the Store asynchronously - Object to compare the Node with - + Graph to save + Callback + State to pass to the callback - + - Implementation of the Equals method for Graph Literal Nodes. Graph Literals are considered Equal if their respective Subgraphs are equal + Updates a Graph in the Store asychronously - Object to compare the Node with - + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Determines whether this Node is equal to a Blank Node (should always be false) + Updates a Graph in the Store asychronously - Blank Node - + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Determines whether this Node is equal to a Graph Literal Node + Deletes a Graph from the Store - Graph Literal Node - + URI of the Graph to delete + Callback + State to pass to the callback - + - Determines whether this Node is equal to a Literal Node (should always be false) + Deletes a Graph from the Store - Literal Node - + URI of the Graph to delete + Callback + State to pass to the callback - + - Determines whether this Node is equal to a URI Node (should always be false) + Lists the Graphs in the Store asynchronously - URI Node - + Callback + State to pass to the callback - + - Determines whether this Node is equal to a Variable Node (should always be false) + Abstract Base Class for HTTP based Storage API implementations - Variable Node - + + + As .NET Core does not currently provide an implementaton of the System.Net.WebProxy class, this implementation + is restricted in functionality. Many of the methods it exposes will raise a PlatformNotSupported exception, + but are currently required to enable dependent classes to compile. As such it is strongly recommended that + developers targeting .NET Core should NOT plan on making any use of the proxy capabilities of this class. + + - + - Determines whether this Node is equal to a Graph Literal Node + Creates a new connector - Graph Literal Node - - + - Implementation of ToString for Graph Literals which produces a String representation of the Subgraph in N3 style syntax + Whether the User has provided credentials for accessing the Store using authentication - - + - Implementation of CompareTo for Graph Literals + Sets a Proxy Server to be used - Node to compare to - - - Graph Literal Nodes are greater than Blank Nodes, Uri Nodes, Literal Nodes and Nulls - + Proxy Address - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Sets a Proxy Server to be used - Node to test against - + Proxy Address - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets/Sets a Proxy Server to be used - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Clears any in-use credentials so subsequent requests will not use a proxy server - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Sets Credentials to be used for Proxy Server - Node to test against - + Username + Password - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Sets Credentials to be used for Proxy Server - Node to test against - + Username + Password + Domain - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets/Sets Credentials to be used for Proxy Server - Node to test against - - + - Gets the Serialization Information + Clears the in-use proxy credentials so subsequent requests still use the proxy server but without credentials - Serialization Information - Streaming Context - + - Reads the data for XML deserialization + Gets/Sets the HTTP Timeouts used specified in milliseconds - XML Reader + + + Defaults to 30 seconds (i.e. the default value is 30,000) + + + It is important to understand that this timeout only applies to the HTTP request portions of any operation performed and that the timeout may apply more than once if a POST operation is used since the timeout applies separately to obtaining the request stream to POST the request and obtaining the response stream. Also the timeout does not in any way apply to subsequent work that may be carried out before the operation can return so if you need a hard timeout on an operation you should manage that yourself. + + + When set to a zero/negative value then the standard .Net timeout of 100 seconds will apply, use if you want the maximum possible timeout i.e. if you expect to launch extremely long running operations. + + + Not supported under Silverlight, Windows Phone and Portable Class Library builds + + - + - Writes the data for XML serialization + Password for accessing the Store - XML Writer - + - Throws an error as Graph Literals cannot be cast to a string + Password for accessing the Store - - + - Throws an error as Graph Literals cannot be cast to an integer + Helper method which applies standard request options to the request, these currently include proxy settings and HTTP timeout - + HTTP Web Request + HTTP Web Request with standard options applied - + - Throws an error as Graph Literals cannot be cast to a decimal + Helper method which adds standard configuration information (proxy and timeout settings) to serialized configuration - + Object Node representing the IStorageProvider whose configuration is being serialized + Serialization Context - + - Throws an error as Graph Literals cannot be cast to a float + Abstract Base Class for HTTP Based IAsyncStorageProvider implementations - + + + It is expected that most classes extending from this will also then implement separately for their synchronous communication, this class purely provides partial helper implementations for the asynchronous communication + + - + - Throws an error as Graph Literals cannot be cast to a double + Creates a new Base Async HTTP Connector - - + - Throws an error as Graph Literals cannot be cast to a boolean + Gets the parent server (if any) - - + - Throws an error as Graph Literals cannot be cast to a date time + Gets the parent server (if any) - - + - Throws an error as Graph Literals cannot be cast to a date time + Loads a Graph from the Store asynchronously - + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Throws an error as Graph Literals cannot be cast to a time span + Loads a Graph from the Store asynchronously - + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the URI of the datatype this valued node represents as a String + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the numeric type of the node + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Class for representing Graph Literal Nodes which are supported in highly expressive RDF syntaxes like Notation 3 + Helper method for doing async load operations, callers just need to provide an appropriately prepared HTTP request + HTTP Request + Handler to load with + Callback + State to pass to the callback - + - Creates a new Graph Literal Node in the given Graph which represents the given Subgraph + Saves a Graph to the Store asynchronously - Graph this node is in + Graph to save + Callback + State to pass to the callback - + - Creates a new Graph Literal Node whose value is an empty Subgraph + Helper method for doing async save operations, callers just need to provide an appropriately perpared HTTP requests and a RDF writer which will be used to write the data to the request body - Graph this node is in - Sub-graph this node represents + HTTP request + RDF Writer + Graph to save + Callback + State to pass to the callback - + - Deserialization Constructor + Updates a Graph in the Store asychronously - Serialization Information - Streaming Context + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Deserialization Only Constructor + Updates a Graph in the Store asychronously + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Implementation of Compare To for Graph Literal Nodes + Helper method for doing async update operations, callers just need to provide an appropriately prepared HTTP request and a RDF writer which will be used to write the data to the request body - Graph Literal Node to Compare To - - - Simply invokes the more general implementation of this method - + HTTP Request + RDF writer + URI of the Graph to update + Triples + Callback + State to pass to the callback - + - Determines whether this Node is equal to a Graph Literal Node + Deletes a Graph from the Store - Graph Literal Node - + URI of the Graph to delete + Callback + State to pass to the callback - + - Implementation of which wraps the standard System.Web + Deletes a Graph from the Store + URI of the Graph to delete + Callback + State to pass to the callback - + - Creates a new Web Context + Helper method for doing async delete operations, callers just need to provide an appropriately prepared HTTP request - HTTP Context + HTTP request + Whether a 404 response counts as success + URI of the Graph to delete + Callback + State to pass to the callback - + - Gets the HTTP Request + Lists the Graphs in the Store asynchronously + Callback + State to pass to the callback - + - Gets the HTTP Response + Indicates whether the Store is ready to accept requests - + - Gets the User + Gets whether the Store is read only - + - Implementation of which wraps the standard System.Web + Gets the IO Behaviour of the Store - + - Implementation of which wraps the standard System.Web + Gets whether the Store supports Triple level updates via the UpdateGraph() method - + - Interface which represents the context of some request to a HTTP server + Gets whether the Store supports Graph deletion via the DeleteGraph() method - - Abstraction which allows us to reuse code for request and response processing across different HTTP server environments - - + - Gets the HTTP Request + Gets whether the Store supports listing graphs via the ListGraphs() method - + - Gets the HTTP Response + Diposes of the Store - + - Gets the User + Helper method for doing async operations where a sequence of HTTP requests must be run + HTTP requests + Callback + State to pass to the callback - + - Interface which represents a HTTP request + Structure for representing Triples that are waiting to be Batch written to the Database - - Abstraction which allows us to reuse code for request processing across different HTTP server environments - - + - Gets the MIME Types specified in the Accept header + Creates a new Batch Triple + Triple + Graph ID to store Triple for - + - Gets the Content Length + Triple - + - Gets the Content Type + Graph ID - + - Gets the Headers + Equality for Batch Triples + Object to test + - + - Gets the HTTP Method + Hash Code for Batch Triples + - + - Gets the Input Stream + Allows you to treat an RDF Dataset File - NQuads, TriG or TriX - as a read-only generic store - + - Gets the Querystring parameters + Creates a new Dataset File Manager + File to load from + Whether to load asynchronously - + - Gets the URL + Internal helper method for loading the data + File to load from - + - Gets the Users Host Address + Makes a query against the in-memory copy of the Stores data + SPARQL Query + - + - Interface which represents a HTTP response + Makes a query against the in-memory copy of the Stores data processing the results with one of the given handlers - - Abstraction which allows us to reuse code for response processing across different HTTP server environments - + RDF Handler + Results Handler + SPARQL Query - + - Adds a Header to the resposne + Loads a Graph from the Dataset - Name - Value + Graph to load into + URI of the Graph to load - + - Clears the Response + Loads a Graph from the Dataset with the given Handler + RDF Handler + URI of the Graph to load - + - Writes a String to the response body + Loads a Graph from the Dataset - Data to write + Graph to load into + URI of the Graph to load - + - Gets/Sets the Content Encoding for the response + Loads a Graph from the Dataset with the given Handler + RDF Handler + URI of the Graph to load - + - Gets/Sets the Content Type for the response + Throws an error since this Manager is read-only + Graph to save + Always thrown since this Manager provides a read-only connection - + - Gets the Headers for the response + Gets the Save Behaviour of the Store - + - Gets the output stream + Throws an error since this Manager is read-only + Graph URI + Triples to be added + Triples to be removed - + - Gets/Sets the HTTP Status Code for the response + Throws an error since this Manager is read-only + Graph URI + Triples to be added + Triples to be removed - + - - Namespace for Classes designed to aid the deployment of Linked Data, SPARQL Endpoints and other Semantic Web technologies as part of ASP.Net web applications. - - - The ASP.Net support leverages the Configuration API heavily and so only requires only 1 <appSetting> like so: - - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - - This setting provides a pointer to an RDF configuration graph that uses the Configuration Vocabulary to express the configuration of HTTP Handlers for your ASP.Net application. We also now provide a command line tool rdfWebDeploy which can be used to automate the testing and deployment of this configuration. See documentation on the Configuration API for more detail. Individual handler documentation gives basic examples of Handler configurations. - + Returns that Updates are not supported since this is a read-only connection - + - - Namespace for Configuration classes which are used to load and store the configuration settings for HTTP Handlers provided as part of the Web namespace. - + Throws an error since this connection is read-only + URI of the Graph to delete + Thrown since you cannot delete a Graph from a read-only connection - + - - Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Graph Store HTTP Protocol - + Throws an error since this connection is read-only + URI of the Graph to delete + Thrown since you cannot delete a Graph from a read-only connection - + - Abstract Base Class for representing SPARQL Graph Store HTTP Protocol for Graph Management Handler configurations + Returns that deleting graphs is not supported - + - Protocol processor + Returns that the Manager is ready if the underlying file has been loaded - + - Service Description Graph + Returns that the Manager is read-only - + - Creates a new Protocol Handler Configuration + Gets the list of URIs of Graphs in the Store - HTTP Context - Configuration Graph - Object Node + - + - Gets the SPARQL Graph Store HTTP Protocol for Graph Management processor which is to be used + Returns that listing graphs is supported - + - Gets the Service Description Graph + Gets the Source File this manager represents a read-only view of - + - Adds Description of Features for the given Handler Configuration + Gets the String representation of the Connection - Service Description Graph - Description Node for the Service + - + - A basic Protocol Handler Configuration implentation + Disposes of the Manager - + - Creates a new Protocol Handler Configuration + Serializes the connection's configuration - HTTP Context - Configuration Graph - Object Node + Configuration Serialization Context - + + Class for connecting to 4store + + - Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Query handlers + Depending on the version of RASQAL used for your 4store instance and the options it was built with some kinds of queries may not suceed or return unexpected results. - + + Prior to the 1.x releases 4store did not permit the saving of unamed Graphs to the Store or Triple level updates. There was a branch of 4store that supports Triple level updates and you could tell the connector if your 4store instance supports this when you instantiate it. From the 0.4.0 release of the library onwards this support was enabled by default since the 1.x builds of 4store have this feature integrated into them by default. + + - + - Abstract Base class for SPARQL Query Handlers + Creates a new 4store connector which manages access to the services provided by a 4store server + Base Uri of the 4store + + Note: As of the 0.4.0 release 4store support defaults to Triple Level updates enabled as all recent 4store releases have supported this. You can still optionally disable this with the two argument version of the constructor + - + - Query Processor to be used + Creates a new 4store connector which manages access to the services provided by a 4store server + Base Uri of the 4store + Indicates to the connector that you are using a 4store instance that supports Triple level updates + + If you enable Update support but are using a 4store instance that does not support Triple level updates then you will almost certainly experience errors while using the connector. + - + - Default Graph Uri for queries + Creates a new 4store connector which manages access to the services provided by a 4store server + Base Uri of the 4store + Proxy Server + + Note: As of the 0.4.0 release 4store support defaults to Triple Level updates enabled as all recent 4store releases have supported this. You can still optionally disable this with the two argument version of the constructor + - + - Default Timeout for Queries + Creates a new 4store connector which manages access to the services provided by a 4store server + Base Uri of the 4store + Indicates to the connector that you are using a 4store instance that supports Triple level updates + Proxy Server + + If you enable Update support but are using a 4store instance that does not support Triple level updates then you will almost certainly experience errors while using the connector. + - + - Default Partial Results on Timeout behaviour + Returns whether this connector has been instantiated with update support or not + + If this property returns true it does not guarantee that the 4store instance actually supports updates it simply indicates that the user has enabled updates on the connector. If Updates are enabled and the 4store server being connected to does not support updates then errors will occur. + - + - Whether the Handler supports Timeouts + Returns that the Connection is ready - + - Whether the Handler supports Partial Results on Timeout + Returns that the Connection is not read-only - + - Querystring Field name for the Timeout setting + Gets the IO Behaviour of 4store - + - Querystring Field name for the Partial Results setting + Returns that deleting Graph is supported - + - Whether a Query Form should be shown to the User + Returns that Listing Graphs is supported - + - Default Sparql Query + Loads a Graph from the 4store instance + Graph to load into + Uri of the Graph to load - + - SPARQL Describe Algorithm to use (null indicates default is used) + Loads a Graph from the 4store instance using an RDF Handler + RDF Handler + URI of the Graph to load - + - SPARQL Syntax to use (defaults to library default which is SPARQL 1.1 unless changed) + Loads a Graph from the 4store instance + Graph to load into + URI of the Graph to load - + - Service Description Graph + Loads a Graph from the 4store instance + RDF Handler + URI of the Graph to load - + - Query Optimiser to use (null indicates default is used) + Saves a Graph to a 4store instance (Warning: Completely replaces any existing Graph with the same URI) + Graph to save + + + Completely replaces any existing Graph with the same Uri in the store + + + Attempting to save a Graph which doesn't have a Base Uri will result in an error + + + Thrown if you try and save a Graph without a Base Uri or if there is an error communicating with the 4store instance - + - Algebra Optimisers to use (empty list means only standard optimisers apply) + Updates a Graph in the store + Uri of the Graph to Update + Triples to be added + Triples to be removed + + May throw an error since the default builds of 4store don't support Triple level updates. There are builds that do support this and the user can instantiate the connector with support for this enabled if they wish, if they do so and the underlying 4store doesn't support updates errors will occur when updates are attempted. + - + - Creates a new Query Handler Configuration + Updates a Graph in the store - HTTP Context - Configuration Graph - Object Node + Uri of the Graph to Update + Triples to be added + Triples to be removed + + May throw an error since the default builds of 4store don't support Triple level updates. There are builds that do support this and the user can instantiate the connector with support for this enabled if they wish, if they do so and the underlying 4store doesn't support updates errors will occur when updates are attempted. + - + - Creates a new Query Handler Configuration + Makes a SPARQL Query against the underlying 4store Instance - Configuration Graph - Object Node + SPARQL Query + A Graph or a SparqlResultSet + + Depending on the version of RASQAL used and the options it was built with some kinds of queries may not suceed or return unexpected results. + - + - Gets the Processor used to evaluate queries + Makes a SPARQL Query against the underlying 4store Instance processing the results with the appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query - + - Gets the Default Graph Uri + Deletes a Graph from the 4store server + Uri of Graph to delete - + - Whether the Remote Endpoint supports specifying Query Timeout as a querystring parameter + Deletes a Graph from the 4store server + Uri of Graph to delete - + - Gets the Default Query Execution Timeout + Lists the Graphs in the Store + - + - Querystring field name for the Query Timeout for Remote Endpoints which support it + Applies a SPARQL Update against 4store + SPARQL Update + + Note: Please be aware that some valid SPARQL Updates may not be accepted by 4store since the SPARQL parser used by 4store does not support some of the latest editors draft syntax changes. + - + - Whether the Remote Endpoint supports specifying Partial Results on Timeout behaviour as a querystring parameter + Saves a Graph to the Store asynchronously + Graph to save + Callback + State to pass to the callback - + - Gets the Default Partial Results on Timeout behaviour + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Querystring field name for the Partial Results on Timeout setting for Remote Endpoints which support it + Updates a Graph in the Store asychronously + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Gets whether the Query Form should be shown to users + Deletes a Graph from the Store + URI of the Graph to delete + Callback + State to pass to the callback - + - Gets the Default Query for the Query Form + Updates the store asynchronously + SPARQL Update + Callback + State to pass to the callback - + - Gets the SPARQL Describe Algorithm to be used + Queries the store asynchronously + SPARQL Query + Callback + State to pass to the callback - + - Gets the SPARQL Query Syntax to use + Queries the store asynchronously + SPARQL Query + RDF Handler + Results Handler + Callback + State to pass to the callback - + - Gets the Service Description Graph + Disposes of a 4store connection - + - Gets the Query Optimiser associated with the Configuration + Gets a String which gives details of the Connection + - + - Gets the Algebra Optimisers associated with the Configuration + Serializes the connection's configuration + - + - Adds Description of Features for the given Handler Configuration + Class for connecting to any dataset that can be exposed via Fuseki - Service Description Graph - Description Node for the Service + + + Uses all three Services provided by a Fuseki instance - Query, Update and HTTP Update + + - + - Basic implementation of a Query Handler Configuration + Creates a new connection to a Fuseki Server + The /data URI of the Fuseki Server - + - Creates a new Query Handler Configuration + Creates a new connection to a Fuseki Server - HTTP Context - Configuration Graph - Object Node + The /data URI of the Fuseki Server - + - Creates a new Query Handler Configuration + Creates a new connection to a Fuseki Server - Configuration Graph - Object Node + The /data URI of the Fuseki Server + Proxy Server - + - - Namespace for Configuration classes which are used to load and store the configuration settings for handlers which serve resources such as Graphs - + Creates a new connection to a Fuseki Server + The /data URI of the Fuseki Server + Proxy Server - + - Abstract Base Class for Dataset Handler configurations + Returns that Listing Graphs is supported - + - Creates a new Dataset Handler configuration + Gets the IO Behaviour of the Store - HTTP Context - Configuration Graph - Object Node - + - Gets the Dataset + Returns that Triple level updates are supported using Fuseki - + - Basic implementation of a Dataset Handler Configuration + Gets the List of Graphs from the store + - + - Creates a new Dataset Handler configuration + Updates a Graph in the Fuseki store - HTTP Context - Configuration Graph - Object Node + URI of the Graph to update + Triples to be added + Triples to be removed - + - Abstract Base class for Graph Handler configurations + Updates a Graph in the Fuseki store + URI of the Graph to update + Triples to be added + Triples to be removed - + - Creates a new Graph Handler Configuration + Executes a SPARQL Query on the Fuseki store - HTTP Context - Configuration Graph - Object Node + SPARQL Query + - + - Gets the Graph being served + Executes a SPARQL Query on the Fuseki store processing the results using an appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query + - + - Gets/Sets the cached ETag for the Graph + Executes SPARQL Updates against the Fuseki store + SPARQL Update - + - Basic implementation of a Graph Handler configuration + Makes a SPARQL Query against the underlying store + SPARQL Query + Callback + State to pass to the callback + SparqlResultSet or a Graph depending on the Sparql Query - + - Creates a new Graph Handler Configuration + Executes a SPARQL Query on the Fuseki store processing the results using an appropriate handler from those provided - HTTP Context - Configuration Graph - Object Node + RDF Handler + Results Handler + SPARQL Query + Callback + State to pass to the callback + - + - - Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Servers - + Executes SPARQL Updates against the Fuseki store + SPARQL Update + Callback + State to pass to the callback - + - Abstract Base class for Handler Configuration for SPARQL Servers + Lists the graph sin the Store asynchronously + Callback + State to pass to the callback - + - Query processor + Updates a Graph on the Fuseki Server + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Update processor + Gets a String which gives details of the Connection + - + - Protocol processor + Serializes the connection's configuration + Configuration Serialization Context - + - Service Description Graph + Provides a wrapper around an in-memory store + + + Useful if you want to test out some code using temporary in-memory data before you run the code against a real store or if you are using some code that requires an IStorageProvider interface but you need the results of that code to be available directly in-memory. + + - + - Default Graph Uri for queries + Creates a new In-Memory Manager which is a wrapper around a new empty in-memory store - + - Default Timeout for Queries + Creates a new In-Memory Manager which is a wrapper around an in-memory store + Triple Store - + - Default Partial Results on Timeout behaviour + Creates a new In-Memory Manager which is a wrapper around a SPARQL Dataset + Dataset - + - Whether the Handler supports Timeouts + Loads a Graph from the Store + Graph to load into + Graph URI to load - + - Whether the Handler supports Partial Results on Timeout + Loads a Graph from the Store + RDF Handler + Graph URI to load - + - Querystring Field name for the Timeout setting + Loads a Graph from the Store + Graph to load into + Graph URI to load - + - Querystring Field name for the Partial Results setting + Loads a Graph from the Store + RDF Handler + Graph URI to load - + - Whether a Query Form should be shown to the User + Saves a Graph to the Store + Graph - + - Default Sparql Query + Gets the IO Behaviour for In-Memory stores - + - SPARQL Describe Algorithm to use (null indicates default is used) + Updates a Graph in the Store + URI of the Graph to Update + Triples to be added + Triples to be removed - + - SPARQL Syntax to use (defaults to library default which is SPARQL 1.1 unless changed) + Updates a Graph in the Store + URI of the Graph to Update + Triples to be added + Triples to be removed - + - Query Optimiser to use (null indicates default is used) + Returns that Triple level updates are supported - + - Algebra Optimisers to use (empty list means only standard optimisers apply) + Deletes a Graph from the Store + URI of the Graph to delete - + - Gets the Default Graph Uri + Deletes a Graph from the Store + URI of the Graph to delete - + - Whether the Remote Endpoint supports specifying Query Timeout as a querystring parameter + Returns that Graph Deletion is supported - + - Gets the Default Query Execution Timeout + Lists the URIs of Graphs in the Store + - + - Querystring field name for the Query Timeout for Remote Endpoints which support it + Returns that listing graphs is supported - + - Whether the Remote Endpoint supports specifying Partial Results on Timeout behaviour as a querystring parameter + Returns that the Store is ready - + - Gets the Default Partial Results on Timeout behaviour + Returns that the Store is not read-only - + - Querystring field name for the Partial Results on Timeout setting for Remote Endpoints which support it + Makes a SPARQL Query against the Store + SPARQL Query + - + - Gets whether the Query Form should be shown to users + Makes a SPARQL Query against the Store processing the results with the appropriate processor from those given + RDF Handler + Results Handler + SPARQL Query + - + - Gets the Default Query for the Query Form + Applies SPARQL Updates to the Store + SPARQL Update - + - Gets the SPARQL Describe Algorithm to be used + Queries the store asynchronously + SPARQL Query + Callback + State to pass to the callback - + - Gets the SPARQL Query Syntax to use + Queries the store asynchronously + SPARQL Query + RDF Handler + Results Handler + Callback + State to pass to the callback - + - Gets the Query Optimiser associated with the Configuration + Updates the store asynchronously + SPARQL Update + Callback + State to pass to the callback - + - Gets the Algebra Optimisers associated with the Configuration + Disposes of the Manager - + - Whether Update Form should be shown + Gets a String representation of the Manager + - + - Default Update Text for the Update Form + Serializes the Configuration of the Manager + Configuration Serialization Context - + - Gets whether to show the Update Form if no update is specified + Flags Enumeration which is used to express the IO Behaviours of a specific Store - + - Gets the Default Update for the Update Form + Indicates whether the Store is Read Only i.e. Saving is not supported - + - Gets the Service Description Graph + Indicates that the Store is a Triple Store - + - Creates a new Base SPARQL Server Configuration based on information from a Configuration Graph + Indicates that the Store is a Quad (Graph) Store - HTTP Context - Configuration Graph - Object Node - + - Creates a new Base SPARQL Server Configuration based on information from a Configuration Graph + Indicates whether the Store has an explicit unnamed default graph - Configuration Graph - Object Node - + - Gets the SPARQL Query Processor + Indicates whether the Store has named graphs - + - Gets the SPARQL Update Processor + Indicates that a Triple Store appends Triples when the SaveGraph() method is used - + - Gets the SPARQL Graph Store HTTP Protocol Processor + Indicates that a Triple Store overwrites Triples when the SaveGraph() method is used - + - Adds Description of Features for the given Handler Configuration + Indicates that Graph data written to the Default Graph is always appended when the SaveGraph() method is used - Service Description Graph - Node for the SPARQL Query service - Node for the SPARQL Update service - Node for the SPARQL Graph Store HTTP Protocol service - + - Concrete implementation of a Handler Configuration for SPARQL Servers + Indicates that Graph data written to the Default Graph overwrites existing data when the SaveGraph() method is used - + - Creates a new SPARQL Server Configuration from information in a Configuration Graph + Indicates that Graph data written to Named Graphs is always appended when the SaveGraph() method is used - HTTP Context - Configuration Graph - Object Node - + - Creates a new SPARQL Server Configuration from information in a Configuration Graph + Indicates that Graph data written to Named Graphs overwrites existing data when the SaveGraph() method is used - Configuration Graph - Object Node - + - - Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Update handlers - + Indicates a Store that can do Triple Level additions on existing Graphs using the UpdateGraph() method - + - Abstract Base class for SPARQL Update Handler configurations + Indicates a Store that can do Triple Level removals on existing Graphs using the UpdateGraph() method - + - Update Processor to be used + Indicates that a Store has a notion of explicit empty graphs + + For some quad stores the existence of a graph may only be defined in terms of one/more quads being stored in that graph + - + - Whether Update Form should be shown + Indicates that the Store is from a system which provides access to multiple stores (such an implementation will usually implement the IStorageServer interface) - at a minimum this usually means the store will allow you to list other available stores. More complex abilities like creating and deleting stores are indicated by other flags. - + - Default Update Text for the Update Form + Indicates that the Store provides the means to create additional Stores - + - Service Description Graph + Indicates that the Store provides the means to delete Stores - + - Creates a new Update Handler Configuration + Indicates a Store that can do Triple Level additions and removals on existing Graphs using the UpdateGraph() method - HTTP Context - Configuration Graph - Object Node - + - Gets the SPARQL Update processor which is to be used + Default Behaviour for Read Only Triple Stores - + - Gets whether to show the Update Form if no update is specified + Default Behaviour for Read Only Quad (Graph) Stores - + - Gets the Default Update for the Update Form + Default Behaviour for Triple Stores + + Default Behaviour is considered to be a Triple Store where data is appended + - + - Gets the Service Description Graph + Default Behaviour for Quad (Graph) Stores + + Default Behaviour is considered to be Quad Store with Default and Named Graphs, data is appended to the default graph and overwrites named graphs + - + - Adds Description of Features for the given Handler Configuration + Behaviour for fully fledged storage servers i.e. multiple stores are supported and can be created and deleted as desired - Service Description Graph - Description Node for the Service - + - Basic implementation of a Update Handler Configuration + Interface which describes the capabilities of some storage provider - + - Creates a new Update Handler Configuration + Gets whether the connection with the underlying Store is ready for use - HTTP Context - Configuration Graph - Object Node - + - Abstract Base Class for Handler Configuration + Gets whether the connection with the underlying Store is read-only + + Any Manager which indicates it is read-only should also return false for the UpdatedSupported property and should throw a RdfStorageException if the SaveGraph() or UpdateGraph() methods are called + - + - Minimum Cache Duration setting permitted + Gets the Save Behaviour the Store uses - + - Maximum Cache Duration setting permitted + Gets whether the triple level updates are supported + + Some Stores do not support updates at the Triple level and may as designated in the interface defintion throw a NotSupportedException if the UpdateGraph() method is called. This property allows for calling code to check in advance whether Updates are supported + - + - Whether errors are shown to the User + Gets whether the deletion of graphs is supported + + Some Stores do not support the deletion of Graphs and may as designated in the interface definition throw a NotSupportedException if the DeleteGraph() method is called. This property allows for calling code to check in advance whether Deletion of Graphs is supported. + - + - Stylesheet for formatting the Query Form and HTML format results + Gets whether the Store supports Listing Graphs - + - Introduction Text for the Query Form + Interface for storage providers which provide the read/write functionality to some arbitrary storage layer + + Designed to allow for arbitrary Triple Stores to be plugged into the library as required by the end user + - + - List of Custom Expression Factories which have been specified in the Handler Configuration + Gets the Parent Server on which this store is hosted (if any) + + + For storage backends which support multiple stores this is useful because it provides a way to access all the stores on that backend. For stores which are standalone they should simply return null + + - + - List of Property Function Factories which have been specified in the Handler Configuration + Loads a Graph from the Store + Graph to load into + Uri of the Graph to load + + + If the Graph being loaded into is Empty then it's Base Uri should become the Uri of the Graph being loaded, otherwise it should be merged into the existing non-empty Graph whose Base Uri should be unaffected. + + + Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + - + - Sets whether CORS headers are output + Loads a Graph from the Store + Graph to load into + URI of the Graph to load + + + If the Graph being loaded into is Empty then it's Base Uri should become the Uri of the Graph being loaded, otherwise it should be merged into the existing non-empty Graph whose Base Uri should be unaffected. + + + Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + - + - Writer Compression Level + Loads a Graph from the Store using the RDF Handler + RDF Handler + URI of the Graph to load + + + Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + - + - Writer Pretty Printing Mode + Loads a Graph from the Store using the RDF Handler + RDF Handler + URI of the Graph to load + + + Behaviour of this method with regards to non-existent Graphs is up to the implementor, an empty Graph may be returned or an error thrown. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + - + - Writer High Speed Mode permitted? + Saves a Graph to the Store + Graph to Save + + Uri of the Graph should be taken from the BaseUri property +

+ Behaviour of this method with regards to whether it overwrites/updates/merges with existing Graphs of the same Uri is up to the implementor and may be dependent on the underlying store. Implementors should state in the XML comments for their implementations what behaviour is implemented. +
- + - XML Writers can use DTDs? + Updates a Graph in the Store + Uri of the Graph to update + Triples to add to the Graph + Triples to remove from the Graph + + + Note: Not all Stores are capable of supporting update at the individual Triple level and as such it is acceptable for such a Store to throw a NotSupportedException if the Store cannot provide this functionality + + + Behaviour of this method with regards to non-existent Graph is up to the implementor, it may create a new empty Graph and apply the updates to that or it may throw an error. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + + Implementers MUST allow for either the additions or removals argument to be null + + + May be thrown if the underlying Store is not capable of doing Updates at the Triple level + May be thrown if the underlying Store is not capable of doing Updates at the Triple level or if some error occurs while attempting the Update - + - Multi-threaded writers can write multi-threaded? + Updates a Graph in the Store + Uri of the Graph to update + Triples to add to the Graph + Triples to remove from the Graph + + + Note: Not all Stores are capable of supporting update at the individual Triple level and as such it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality + + + Behaviour of this method with regards to non-existent Graph is up to the implementor, it may create a new empty Graph and apply the updates to that or it may throw an error. Implementors should state in the XML comments for their implementation what behaviour is implemented. + + + Implementers MUST allow for either the additions or removals argument to be null + + + May be thrown if the underlying Store is not capable of doing Updates at the Triple level + May be thrown if the underlying Store is not capable of doing Updates at the Triple level or if some error occurs while attempting the Update - + - XML Writers can compress literal objects to attributes? + Deletes a Graph from the Store + URI of the Graph to be deleted + May be thrown if the underlying Store is not capable of doing Deleting a Graph + May be thrown if the underlying Store is not capable of Deleting a Graph or an error occurs while performing the delete + + + Note: Not all Stores are capable of Deleting a Graph so it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality + + - + - Default Namespaces for appropriate writers + Deletes a Graph from the Store + URI of the Graph to be deleted + May be thrown if the underlying Store is not capable of doing Deleting a Graph + May be thrown if the underlying Store is not capable of Deleting a Graph or an error occurs while performing the delete + + + Note: Not all Stores are capable of Deleting a Graph so it is acceptable for such a Store to throw a NotSupportedException or an RdfStorageException if the Store cannot provide this functionality + + - + - Creates a new Base Handler Configuration which loads common Handler settings from a Configuration Graph + Gets a List of Graph URIs for the graphs in the store - HTTP Context - Configuration Graph - Object Node + - It is acceptable for the context parameter to be null + Implementations should implement this method only if they need to provide a custom way of listing Graphs. If the Store for which you are providing a manager can efficiently return the Graphs using a SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } query then there should be no need to implement this function. - + - Creates a new Base Handler Configuration which loads common Handler settings from a Configuration Graph + Interface for storage providers which allow SPARQL Queries to be made against them - Configuration Graph - Object Node - + - Gets the User Groups for the Handler + Makes a SPARQL Query against the underlying store + SPARQL Query + SparqlResultSet or a Graph depending on the Sparql Query + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Gets whether Error Messages should be shown to users + Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice + RDF Handler + SPARQL Results Handler + SPARQL Query + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Gets whether CORS (Cross Origin Resource Sharing) headers are sent to the client in HTTP responses + Interface for storage providers which allow SPARQL Queries to be made against them with reasoning set by query - + - Gets the Stylesheet for formatting HTML Results + Makes a SPARQL Query against the underlying store + SPARQL Query + rReasoning On demand by query + SparqlResultSet or a Graph depending on the Sparql Query + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Gets the Introduction Text for the Query Form + Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice + RDF Handler + SPARQL Results Handler + SPARQL Query + rReasoning On demand by query + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Gets the Cache Duration in minutes to use + Interface for storage providers which allow SPARQL Updates to be made against them - - The SPARQL Handlers use the ASP.Net Cache object to cache information and they specify the caching duration as a Sliding Duration by default. This means that each time the cache is accessed the expiration time increases again. Set the CacheSliding property to false if you'd prefer an absolute expiration - - - This defaults to 15 minutes and the Handlers will only allow you to set a value between the MinimumCacheDuration and MaximumCacheDuration. We think that 15 minutes is a good setting and we use this as the default setting unless a duration is specified explicitly. - - + + + Processes a SPARQL Update command against the underlying Store + + SPARQL Update + + + + Interface for storage providers which provide asynchronous read/write functionality to some arbitrary storage layer + + + Designed to allow for arbitrary Triple Stores to be plugged into the library as required by the end user + + + - Gets whether Sliding Cache expiration is used + Gets the Parent Server on which this store is hosted (if any) - The SPARQL Handlers use the ASP.Net Cache object to cache information and they specify the cache duration as a Sliding Duration by default. Set this property to false if you'd prefer absolute expiration + For storage backends which support multiple stores this is useful because it provides a way to access all the stores on that backend. For stores which are standalone they should simply return null - + - Gets whether any Custom Expression Factories are registered in the Config for this Handler + Loads a Graph from the Store asynchronously + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the Custom Expression Factories which are in the Config for this Handler + Loads a Graph from the Store asynchronously + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Gets whether there are any custom property function factories registered for this Handler + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the custom property function factories registered for this Handler + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Gets the Writer Compression Level to use + Saves a Graph to the Store asynchronously + Graph to save + Callback + State to pass to the callback - + - Gets whether XML Writers can use DTDs + Updates a Graph in the Store asychronously + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Gets whether XML Writers can compress literal objects as attributes + Updates a Graph in the Store asychronously + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Gets whether some writers can use high-speed mode when they detect that Graphs are ill-suited to syntax compression + Deletes a Graph from the Store + URI of the Graph to delete + Callback + State to pass to the callback - + - Gets whether multi-threaded writers are allowed to use multi-threaded mode + Deletes a Graph from the Store + URI of the Graph to delete + Callback + State to pass to the callback - + - Gets whether Pretty Printing is enabled + Lists the Graphs in the Store asynchronously + Callback + State to pass to the callback - + - Gets the Default Namespaces used for writing + Interface for storage providers which allow SPARQL Queries to be made against them asynchronously - + - Static Helper class for Configuration loading for use in ASP.Net applicatons + Queries the store asynchronously + SPARQL Query + Callback + State to pass to the callback + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Base Cache Key for Configuration Graph caching + Queries the store asynchronously + SPARQL Query + RDF Handler + Results Handler + Callback + State to pass to the callback + Thrown if an error occurs performing the query + Thrown if an error occurs performing the query + Thrown if the query is invalid when validated by dotNetRDF prior to passing the query request to the store or if the request succeeds but the store returns malformed results + Thrown if the store returns results in a format dotNetRDF does not understand - + - Cache Duration for Configuration Graph caching + Interface for storage providers which allow SPARQL Updates to be made against them asynchronously - + - Gets the Configuration Graph with the given Filename returns it + Updates the store asynchronously - HTTP Context - Configuration File - + SPARQL Update + Callback + State to pass to the callback - + - Finds whether there is any Handler Configuration for a wildcard path that the current request path matches + Interface for storage providers which have controllable transactions - Configuration Graph - Request URI - The resulting matched path - + + + It is up to the implementation whether transactions are per-thread or global and how transactions interact with operations performed on the storage provider. Please see individual implementations for notes on how transactions are implemented. + + - + - Path Resolver for Web Configuration loading + Begins a transaction - + - Creates a new Web Configuration Path Resolver + Commits a transaction - HTTP Server Utility - + - Resolves a Path by calling MapPath() where appropriate + Rolls back a transaction - Path - - + - Abstract Base Class for HTTP Handlers which serve SPARQL Datasets + Interface for storage providers which have controllable transactions which can be managed asynchronously - + - Holds the Configuration for this HTTP Handler + Begins a transaction asynchronously + Callback + State to pass to the callback - + - Returns that the Handler is reusable + Commits a transaction asynchronously + Callback + State to pass to the callback - + - Processes the request by loading the Configuration in order to obtain the Dataset to be served and then serving it to the client + Rolls back a transaction asynchronously - HTTP Context + Callback + State to pass to the callback - + - Serves the Dataset to the Client + Provides a Read-Only wrapper that can be placed around another IStorageProvider instance - HTTP Context - Dataset to serve - Implementations should override this if they wish to override the default behaviour of outputting the entire dataset using the HandlerHelper.SendToClient() method e.g. to use a custom writer or server only portions of the dataset + This is useful if you want to allow some code read-only access to a mutable store and ensure that it cannot modify the store via the manager instance - + - Abstract method in which concrete implementations must load and return their Configuration + Creates a new Read-Only connection which is a read-only wrapper around another store - HTTP Context - + Manager for the Store you want to wrap as read-only - + - Abstract method in which concrete implementations may update their Configuration post-request processing if necessary + Gets the parent server (if any) - HTTP Context - + - Abstract base class for HTTP Handlers for serving Graphs in ASP.Net applications + Loads a Graph from the underlying Store + Graph to load into + URI of the Graph to load - + - Handler Configuration + Loads a Graph from the underlying Store + Graph to load into + URI of the Graph to load - + - Gets that the Handler is reusable + Loads a Graph from the underlying Store + RDF Handler + URI of the Graph to load - + - Processes the request by loading the Configuration in order to obtain the Graph to be served and then serving it to the client + Loads a Graph from the underlying Store - HTTP Context + RDF Handler + URI of the Graph to load - + - Selects the Writer to use for sending the Graph to the Client + Throws an exception since you cannot save a Graph using a read-only connection - Selected MIME Type Definition - - - - Implementations may override this if they wish to substitute in an alternative writer for certain MIME types (e.g. as done by the SchemaGraphHandler) - - + Graph to save + Thrown since you cannot save a Graph using a read-only connection - + - Method which can be used to alter the Graph before it is served + Gets the IO Behaviour of the read-only connection taking into account the IO Behaviour of the underlying store - Graph - - + - Method which computes an ETag for a Graph + Throws an exception since you cannot update a Graph using a read-only connection - Graph - - - Method may return null if no ETag can be computed or you do not wish to serve ETag Headers - + URI of the Graph + Triples to be added + Triples to be removed + Thrown since you cannot update a Graph using a read-only connection - + - Abstract method in which concrete implementations must load and return their Configuration + Throws an exception since you cannot update a Graph using a read-only connection - HTTP Context - + URI of the Graph + Triples to be added + Triples to be removed + Thrown since you cannot update a Graph using a read-only connection - + - Abstract method in which concrete implementations may update their Configuration post-request processing if necessary + Returns that Update is not supported - HTTP Context - + - Abstract Base Class for creating SPARQL Graph Store HTTP Protocol Handler implementations + Throws an exception as you cannot delete a Graph using a read-only connection + URI of the Graph to delete + Thrown since you cannot delete a Graph using a read-only connection - + - Handler Configuration + Throws an exception as you cannot delete a Graph using a read-only connection + URI of the Graph to delete + Thrown since you cannot delete a Graph using a read-only connection - + - Base Path of the Handler as determined by the implementing class when loading Configuration using the LoadConfig() method + Returns that deleting graphs is not supported - + - Indicates that the Handler is reusable + Gets the list of graphs in the underlying store + - + - Processes requests made to the Graph Store HTTP Protocol endpoint and invokes the appropriate methods on the Protocol Processor that is in use + Returns whether listing graphs is supported by the underlying store - HTTP Context - - - Implementations may override this if necessary - if the implementation is only providing additional logic such as authentication, ACLs etc. then it is recommended that the override applies its logic and then calls the base method since this base method will handle much of the error handling and sending of appropriate HTTP Response Codes. - - - + - Loads the Handler Configuration + Returns whether the Store is ready + + + + + Returns that the Store is read-only + + + + + Disposes of the Store + + + + + Gets the String representation of the Manager - HTTP Context - Base Path of the Handler to be determined by an implementing class - + - Updates the Handler Configuration + Serializes the Configuration of the Manager - HTTP Context + Configuration Serialization Context - + - Abstract Base class for Handlers which provide SPARQL Query endpoints + Provides a Read-Only wrapper that can be placed around another IQueryableStorage instance + + + This is useful if you want to allow some code read-only access to a mutable store and ensure that it cannot modify the store via the manager instance + + - + - Handler Configuration + Creates a new Queryable Read-Only connection which is a read-only wrapper around another store + Manager for the Store you want to wrap as read-only - + - Returns that the Handler is reusable + Executes a SPARQL Query on the underlying Store + SPARQL Query + - + - Processes a SPARQL Query Request + Executes a SPARQL Query on the underlying Store processing the results with an appropriate handler from those provided - HTTP Context + RDF Handler + Results Handler + SPARQL Query + - + - Loads the Handler Configuration + Lists the Graphs in the Store - HTTP Context - + - Processes a Query + Returns that listing Graphs is supported - Query - - - - Implementations should override this method if their behaviour requires more than just invoking the configured Query processor - - - + - Processes the Results and returns them to the Client in the HTTP Response + Abstract Base Class for connecting to any Store that supports the Sesame 2.0 HTTP Communication protocol - HTTP Context - Result Object - Implementations should override this if they do not want to use the default results processing behaviour provided by HandlerHelper.SendToClient() + See here for the protocol specification, this base class supports Version 5 of the protocol which does not include SPARQL Update support - + - Updates the Handler Configuration + Base Uri for the Store - HTTP Context - + - Handles errors in processing SPARQL Query Requests + Store ID - Context of the HTTP Request - Error title - Sparql Query - Error - + - Handles errors in processing SPARQL Query Requests + Repositories Prefix - Context of the HTTP Request - Error title - Sparql Query - Error - HTTP Status Code to return - + - Generates a Sparql Query Form + Query Path Prefix - - + - Determines the Permission Action for a SPARQL Query + Update Path Prefix - Query - - + - Abstract Base Class for creating SPARQL Update Handler implementations + Whether to do full encoding of contexts - + - Handler Configuration + Whether queries should always be posted - + - Gets that the Handler is reusable + Server the store is hosted on - + - Processes SPARQL Update requests + Creates a new connection to a Sesame HTTP Protocol supporting Store - HTTP Context + Base Uri of the Store + Store ID - + - Loads the Handler Configuration + Creates a new connection to a Sesame HTTP Protocol supporting Store - HTTP Context - + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication - + - Processes SPARQL Updates + Creates a new connection to a Sesame HTTP Protocol supporting Store - Update Command Set - - - Implementations should override this method if their behaviour requires more than just invoking the configured Update processor - - + Base Uri of the Store + Store ID + Proxy Server - + - Updates the Handler Configuration + Creates a new connection to a Sesame HTTP Protocol supporting Store - HTTP Context + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication + Proxy Server - + - Generates a SPARQL Update Form + Gets the Base URI to the repository - HTTP Context - + - Handles errors in processing SPARQL Update Requests + Gets the Repository Name that is in use - Context of the HTTP Request - Error title - SPARQL Update - Error - + - Handles errors in processing SPARQL Update Requests + Gets the Save Behaviour of Stores that use the Sesame HTTP Protocol - Context of the HTTP Request - Error title - SPARQL Update - Error - HTTP Status Code - + - Gets the Permission action for the SPARQL Update Command + Returns that Updates are supported on Sesame HTTP Protocol supporting Stores - Update Command - - + - Abstract Base class for SPARQL Servers which provide combined SPARQL Query, Update and Graph Store HTTP Protocol endpoints + Returns that deleting graphs from the Sesame store is supported - + - Handler Configuration + Returns that listing Graphs is supported - + - Returns that the Handler is reusable + Returns that the Connection is ready - + - Processes requests + Returns that the Connection is not read-only - HTTP Context - + - Processes Query requests + Gets the parent server - HTTP Context - + - Processes Update requests + Makes a SPARQL Query against the underlying Store - HTTP Context + SPARQL Query + - + - Processes Protocol requests + Makes a SPARQL Query against the underlying Store processing the results with an appropriate handler from those provided - HTTP Context + RDF Handler + Results Handler + SPARQL Query + - + - Processes Service Description requests + Escapes a Query to avoid a character encoding issue when communicating a query to Sesame - HTTP Context + Query + - + - Loads the Handler Configuration + Gets the Content Type used to save data to the store i.e. the MIME type to use for the Content-Type header - HTTP Context - Base Path of the Handler - + - Updates the Handler Configuration + Creates an RDF Writer to use for saving data to the store - HTTP Context + - + - Processes SPARQL Queries + Loads a Graph from the Store - Query - - - - Implementations should override this method if their behaviour requires more than just invoking the configured Query processor - - + Graph to load into + Uri of the Graph to load + If a Null Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Processes SPARQL Updates + Loads a Graph from the Store - Update Command Set - - - Implementations should override this method if their behaviour requires more than just invoking the configured Update processor - - + RDF Handler + Uri of the Graph to load + If a Null Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Internal Helper function which returns the Results back to the Client in one of their accepted formats + Loads a Graph from the Store - Context of the HTTP Request - Results of the Sparql Query - - - Implementations should override this if they want to control how results are sent to the client rather than using the default behaviour provided by HandlerHelper.ProcessResults() - - + Graph to load into + Uri of the Graph to load + If a Null/Empty Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Handles errors in processing SPARQL Query Requests + Loads a Graph from the Store - Context of the HTTP Request - Error title - Sparql Query - Error + RDF Handler + Uri of the Graph to load + If a Null/Empty Uri is specified then the default graph (statements with no context in Sesame parlance) will be loaded - + - Handles errors in processing SPARQL Query Requests + Saves a Graph into the Store (Warning: Completely replaces any existing Graph with the same URI unless there is no URI - see remarks for details) - Context of the HTTP Request - Error title - Sparql Query - Error - HTTP Status Code to return + Graph to save + + If the Graph has no URI then the contents will be appended to the Store, if the Graph has a URI then existing data associated with that URI will be replaced + - + - Handles errors in processing SPARQL Update Requests + Updates a Graph - Context of the HTTP Request - Error title - SPARQL Update - Error + Uri of the Graph to update + Triples to be added + Triples to be removed - + - Handles errors in processing SPARQL Update Requests + Updates a Graph - Context of the HTTP Request - Error title - SPARQL Update - Error - HTTP Status Code + Uri of the Graph to update + Triples to be added + Triples to be removed - + - Generates a SPARQL Query Form + Deletes a Graph from the Sesame store - HTTP Context + URI of the Graph to delete - + - Generates a SPARQL Update Form + Deletes a Graph from the Sesame store - HTTP Context + URI of the Graph to delete - + - Gets the Permission action for a SPARQL Query + Gets the list of Graphs in the Sesame store - Query - + - Gets the Permission action for a SPARQL Update Command + Gets the parent server - Update Command - - + - HTTP Handler for serving RDF Datasets in ASP.Net applications + Saves a Graph to the Store asynchronously - - - Used to serve a Dataset at a specific fixed URL. The Dataset being served to the user in one of their acceptable MIME types if possible, if they don't accept any MIME type we can serve then they get a 406 Not Acceptable - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Dataset to be served: - - <dotnetrdf:/folder/dataset> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.DatasetHandler" ; - dnr:usingDataset _:dataset . - - _:dataset a dnr:SparqlDataset ; - dnr:type "VDS.RDF.Query.Datasets.InMemoryDataset" . - - - + Graph to save + Callback + State to pass to the callback - + - Loads the Handlers configuration + Loads a Graph from the Store asynchronously - HTTP Context - + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Updates the Handlers configuration + Updates a Graph in the Store asychronously - HTTP Context + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - A HTTP Module that attempts to allow content negotiation by file extension wherever applicable + Deletes a Graph from the Store + URI of the Graph to delete + Callback + State to pass to the callback - + - Disposes of the Module + Makes a SPARQL Query against the underlying store + SPARQL Query + Callback + State to pass to the callback + SparqlResultSet or a Graph depending on the Sparql Query - + - Intialises the Module + Makes a SPARQL Query against the underlying store processing the resulting Graph/Result Set with a handler of your choice - HTTP Application + RDF Handler + SPARQL Results Handler + SPARQL Query + Callbakc + State to pass to the callback - + - Handles the start of requests by doing conneg wherever applicable + Helper method for creating HTTP Requests to the Store - Sender of the Event - Event Arguments + Path to the Service requested + Acceptable Content Types + HTTP Method + Querystring Parameters + - + - HTTP Handler for serving Graphs in ASP.Net applications + Disposes of the Connector - - - Configured exactly in the same way as GraphHandler - only difference in functionality is that if the requested Content Type (based on the Accept: header) is HTML then the HtmlSchemaWriter will be used to provide a human readable schema document rather than the standard HtmlWriter which justs creates a table of Triples. Remember though that this means that the HTML output will not contain embedded RDFa as the HtmlSchemaWriter does not embed any as opposed to the standard HtmlWriter which does. - - - + - Overrides writer Selection to use the HtmlSchemaWriter whenever the HtmlWriter would normally have been used + Gets a String which gives details of the Connection - MIME Type Definition selected based on the Requests Accept header - + - Type of Service Description to return + Serializes the connection's configuration + Configuration Serialization Context - + - Description of the Query Service + Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication protocol + + Acts as a synonym for whatever the latest version of the Sesame HTTP Protocol that is supported by dotNetRDF might be. Currently this is Version 6 which includes SPARQL Update support (Sesame 2.4+ required) + - + - Description of the Update Service + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID - + - Description of the Protocol Service + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication - + - Description of all Services (this will produce an invalid Service Description document as defined by the current Specification Drafts) + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Proxy Server - + - Static Helper class responsible for generating SPARQL Service Description Graphs based on a given Configuration object + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication + Proxy Server - + - Namespace URI for SPARQL Service Description 1.1 + Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication Protocol version 5 (i.e. no SPARQL Update support) - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Proxy Server - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication + Proxy Server - + - Constants for SPARQL Service Description Classes + Connector for connecting to a Store that supports the Sesame 2.0 HTTP Communication Protocol version 6 (i.e. includes SPARQL Update support) - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Proxy Server - + - Constants for SPARQL Service Description Classes + Creates a new connection to a Sesame HTTP Protocol supporting Store + Base Uri of the Store + Store ID + Username to use for requests that require authentication + Password to use for requests that require authentication + Proxy Server - + - Constants for SPARQL Service Description Classes + Makes a SPARQL Update request to the Sesame server + SPARQL Update - + - Constants for SPARQL Service Description Instances + Makes a SPARQL Update request to the Sesame server + SPARQL Update + Callback + State to pass to the callback - + - Constants for SPARQL Service Description Instances + Controls how the SparqlConnector loads Graphs from the Endpoint - + - Constants for SPARQL Service Description Instances + Graphs are loaded by issuing a DESCRIBE query using the Graph URI - + - Constants for SPARQL Service Description Instances + Graphs are loaded by issuing a CONSTRUCT FROM query using the Graph URI - + - Constants for SPARQL Service Description Instances + Class for connecting to any SPARQL Endpoint as a read-only Store + + + This class is effectively a read-only wrapper around a SparqlRemoteEndpoint using it with it's default settings, if you only need to query an endpoint and require more control over the settings used to access the endpoint you should use that class directly or use the constructors which allow you to provide your own pre-configure SparqlRemoteEndpoint instance + + + Unlike other HTTP based connectors this connector does not derive from BaseHttpConnector - if you need to specify proxy information you should do so on the SPARQL Endpoint you are wrapping either by providing a SparqlRemoteEndpoint instance pre-configured with the proxy settings or by accessing the endpoint via the Endpoint property and programmatically adding the settings. + + - + - Constants for SPARQL Service Description Instances + Underlying SPARQL query endpoint - + - Constants for SPARQL Service Description Instances + Method for loading graphs - + - Constants for SPARQL Service Description Properties + Whether to skip local parsing - + - Constants for SPARQL Service Description Properties + Timeout for endpoints - + - Constants for SPARQL Service Description Properties + Creates a new SPARQL Connector which uses the given SPARQL Endpoint + Endpoint - + - Constants for SPARQL Service Description Properties + Creates a new SPARQL Connector which uses the given SPARQL Endpoint + Endpoint + Load Method to use - + - Constants for SPARQL Service Description Properties + Creates a new SPARQL Connector which uses the given SPARQL Endpoint + Endpoint URI - + - Constants for SPARQL Service Description Properties + Creates a new SPARQL Connector which uses the given SPARQL Endpoint + Endpoint URI + Load Method to use - + - Constants for SPARQL Service Description Properties + Gets the parent server (if any) - + - Constants for SPARQL Service Description Properties + Controls whether the Query will be parsed locally to accurately determine its Query Type for processing the response + + If the endpoint you are connecting to provides extensions to SPARQL syntax which are not permitted by the libraries parser then you may wish to enable this option as otherwise you will not be able to execute such queries + - + - Constants for SPARQL Service Description Properties + Gets/Sets the HTTP Timeout in milliseconds used for communicating with the SPARQL Endpoint - + - Constants for SPARQL Service Description Properties + Gets the underlying SparqlRemoteEndpoint which this class is a wrapper around - + - Constants for SPARQL Service Description Properties + Makes a Query against the SPARQL Endpoint + SPARQL Query + - + - Constants for SPARQL Service Description Properties + Makes a Query against the SPARQL Endpoint processing the results with an appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query + - + - Constants for SPARQL Service Description Properties + Loads a Graph from the SPARQL Endpoint + Graph to load into + URI of the Graph to load - + - Constants for SPARQL Service Description Properties + Loads a Graph from the SPARQL Endpoint + RDF Handler + URI of the Graph to load - + - Constants for SPARQL Service Description Properties + Loads a Graph from the SPARQL Endpoint + Graph to load into + URI of the Graph to load - + - Constants for SPARQL Service Description Properties + Loads a Graph from the SPARQL Endpoint + RDF Handler + URI of the Graph to load - + - Constants for SPARQL Service Description Properties + Throws an error since this Manager is read-only + Graph to save + Always thrown since this Manager provides a read-only connection - + - Generates a SPARQL Service Description Graph for the given Query Handler Configuration or uses the configuration supplied Description Graph + Gets the IO Behaviour of SPARQL Connections - Query Handler Configuration - Base URI of the Description - - + - Generates a SPARQL Service Description Graph for the specified portion of the SPARQL Server Handler Configuration or uses the configuration supplied Description Graph + Throws an error since this Manager is read-only - SPARQL Server Configuration - Base URI of the Description - Portion of the SPARQL Server to describe - + Graph URI + Triples to be added + Triples to be removed - + - Generates a SPARQL Service Description Graph for the given Update Handler Configuration or uses the configuration supplied Description Graph + Throws an error since this Manager is read-only - Update Handler Configuration - Base URI of the Description - + Graph URI + Triples to be added + Triples to be removed - + - Generates a SPARQL Service Description Graph for the given Protocol Handler Configuration or uses the configuration supplied Description Graph + Returns that Updates are not supported since this connection is read-only - Protocol Handler Configuration - Base URI of the Description - - + - HTTP Handler for adding SPARQL Graph Store HTTP Protocol for RDF Graph Management endpoints to ASP.Net applications + Throws an exception as this connector provides a read-only connection - - - Used to create a Protocol endpoint at a Base URL with any URL under this handled by this Handler - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Protocol Endpoint: - - <dotnetrdf:/folder/protocol/*> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.WildcardProtocolHandler" ; - dnr:protocolProcessor _:proc . - - _:proc a dnr:SparqlHttpProtocolProcessor ; - dnr:type "VDS.RDF.Update.Protocol.LeviathanProtocolProcessor" ; - dnr:usingStore _:store . - - _:store a dnr:TripleStore ; - dnr:type "VDS.RDF.TripleStore" . - - - + URI of this Graph to delete + Thrown since this connection is read-only so you cannot delete graphs using it - + - Loads the Handler Configuration + Throws an exception as this connector provides a read-only connection - HTTP Context - Base Path of the Handler which this method will determine - + URI of this Graph to delete + Thrown since this connection is read-only so you cannot delete graphs using it - + - Updates the Handler Configuration + Returns that deleting graphs is not supported - HTTP Context - + - HTTP Handler for serving Graphs in ASP.Net applications + Lists the Graphs in the Store - - - Used to serve a Graph at a specific fixed URL. The Graph being served to the user in one of their acceptable MIME types if possible, if they don't accept any MIME type we can serve then they get a 406 Not Acceptable - - - If you have a Graph where you use slash URIs under this URL and you want those URIs to resolve to the same Graph then you should use the WildcardGraphHandler instead - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Graph to be served: - - <dotnetrdf:/folder/graph> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.GraphHandler" ; - dnr:usingGraph _:graph . - - _:graph a dnr:Graph ; - dnr:type "VDS.RDF.Graph" ; - dnr:fromFile "yourGraph.rdf" . - - - + - + - Loads the Handlers configuration + Returns that listing graphs is supported - HTTP Context - - + - Updates the Handlers configuration + Returns that the Connection is ready - HTTP Context - + - Static Helper class for HTTP Handlers + Returns that the Connection is read-only - + - Gets the Username of the User for the HTTP Request provided that they are authenticated + Disposes of the Connection - HTTP Context - - - Note: Unauthenticated Users are treated as guests - - + - Checks whether a User is authenticated (or guests are permitted) + Gets a String which gives details of the Connection - HTTP Context - User Groups to test against - + - Checks whether a User is authenticated (or guests are permitted) and the given action is allowed + Serializes the connection's configuration - HTTP Context - User Groups to test against - Action to check for permission for - + Configuration Serialization Context - + - Retrieves the Accept Types to be used to determine the content format to be used in responding to requests + Class for connecting to any SPARQL server that provides both a query and update endpoint - HTTP Context - - This method was added in 0.4.1 to allow for the NegotiateByFileExtension module to work properly. Essentially the module may rewrite the Accept header of the HTTP request but this will not be visible directly via the AcceptTypes property of the HTTP request as that is fixed at the time the HTTP request is parsed and enters the ASP.Net pipeline. This method checks whether the Accept header is present and if it has been modified from the AcceptTypes property uses the header instead of the property + This class is a wrapper around a and a . The former is used for the query functionality while the latter is used for the update functionality. As updates happen via SPARQL the behaviour with respects to adding and removing blank nodes will be somewhat up to the underlying SPARQL implementation. This connector is not able to carry out operations which attempt to delete blank nodes and cannot guarantee that added blank nodes bear any relation to existing blank nodes in the store. + + + Unlike other HTTP based connectors this connector does not derive from BaseHttpConnector - if you need to specify proxy information you should do so on the SPARQL Endpoint you are wrapping either by providing endpoint instance pre-configured with the proxy settings or by accessing the endpoint via the Endpoint and UpdateEndpoint properties and programmatically adding the settings. - + + + Creates a new connection + + Query Endpoint + Update Endpoint + Method for loading graphs + + + + Creates a new connection + + Query Endpoint + Update Endpoint + + - Helper function which returns the Results (Graph/Triple Store/SPARQL Results) back to the Client in one of their accepted formats + Creates a new connection - Context of the HTTP Request - Results of the Sparql Query + Query Endpoint + Update Endpoint + Method for loading graphs - + - Helper function which returns the Results (Graph/Triple Store/SPARQL Results) back to the Client in one of their accepted formats + Creates a new connection - Context of the HTTP Request - Results of the Sparql Query - Handler Configuration + Query Endpoint + Update Endpoint - + - Applies the Writer Options from a Handler Configuration to a Writer + Gets the underlying SparqlRemoteUpdateEndpoint which this class is a wrapper around - Writer - Handler Configuration - + - Handles errors in processing SPARQL Query Requests + Gets/Sets the HTTP Timeout in milliseconds used for communicating with the SPARQL Endpoint - Context of the HTTP Request - Handler Configuration - Error title - Sparql Query - Error - + - Handles errors in processing SPARQL Query Requests + Gets that deleting graphs is supported - Context of the HTTP Request - Handler Configuration - Error title - Sparql Query - Error - HTTP Status Code to return - + - Handles errors in processing SPARQL Update Requests + Gets that the store is not read-only - Context of the HTTP Request - Handler Configuration - Error title - SPARQL Update - Error - + - Handles errors in processing SPARQL Update Requests + Gets the IO behaviour for the store - Context of the HTTP Request - Handler Configuration - Error title - SPARQL Update - Error - HTTP Status Code to return - + - Computes the ETag for a Graph + Gets that triple level updates are supported, see the remarks section of the for exactly what is and isn't supported - Graph - - + - Checks whether the HTTP Request contains caching headers that means a 304 Modified response can be sent + Deletes a graph from the store - HTTP Context - ETag - Last Modified - True if a 304 Not Modified can be sent + URI of the graph to delete - + - Adds ETag and/or Last-Modified headers as appropriate to a response + Deletes a graph from the store - HTTP Context - ETag - Last Modified + URI of the graph to delete - + - Adds the Standard Custom Headers that dotNetRDF attaches to all responses from it's Handlers + Saves a graph to the store - HTTP Context - Handler Configuration + Graph to save - + - Adds CORS headers which are needed to allow JS clients to access RDF/SPARQL endpoints powered by dotNetRDF + Updates a graph in the store - HTTP Context + URI of the graph to update + Triples to add + Triples to remove - + - Converts a DateTime to RFC 2822 format + Updates a graph in the store - - + URI of the graph to update + Triples to add + Triples to remove - + - HTTP Handler for adding SPARQL Graph Store HTTP Protocol for RDF Graph Management endpoints to ASP.Net applications + Makes a SPARQL Update against the store - - - Used to create a Protocol endpoint at a specific fixed URL, if you want to allow URIs relative to this URI to be used to refer to Graphs then use the WildcardProtocolHandler instead - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Protocol Endpoint: - - <dotnetrdf:/folder/protocol> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.ProtocolHandler" ; - dnr:protocolProcessor _:proc . - - _:proc a dnr:SparqlHttpProtocolProcessor ; - dnr:type "VDS.RDF.Update.Protocol.LeviathanProtocolProcessor" ; - dnr:usingStore _:store . - - _:store a dnr:TripleStore ; - dnr:type "VDS.RDF.TripleStore" . - - - + SPARQL Update - + - Loads the Handler Configuration + Gets a String which gives details of the Connection - HTTP Context - Base Path of the Handler which this method will determine - + - Updates the Handler Configuration + Serializes the connection's configuration - HTTP Context + Configuration Serialization Context - + - HTTP Handler for adding SPARQL Query endpoints to ASP.Net applications + Class for connecting to any store that implements the SPARQL Graph Store HTTP Protocol for Managing Graphs - Used to create a Query endpoint at a specific fixed URL + The SPARQL Graph Store HTTP Protocol is defined as part of SPARQL 1.1 and is currently a working draft so implementations are not guaranteed to be fully compliant with the draft and the protocol may change in the future. - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Query Endpoint: - - <dotnetrdf:/folder/query> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.QueryHandler" ; - dnr:queryProcessor _:proc . - - _:proc a dnr:SparqlQueryProcessor ; - dnr:type "VDS.RDF.Query.LeviathanQueryProcessor" ; - dnr:usingStore _:store . - - _:store a dnr:TripleStore ; - dnr:type "VDS.RDF.TripleStore" . - + Note: While this connector supports the update of a Graph the Graph Store HTTP Protocol only allows for the addition of data to an existing Graph and not the removal of data, therefore any calls to UpdateGraph() that would require the removal of Triple(s) will result in an error. - - - Loads the Handler Configuration - - HTTP Context - - - + - Updates the Handler Configuration + URI of the Protocol Server - HTTP Context - + - HTTP Handler for adding SPARQL Servers to ASP.Net applications - SPARQL Servers provide combined Query, Update and Graph Store HTTP Protocol for RDF Graph Management endpoints + Creates a new SPARQL Graph Store HTTP Protocol Connector - - - Used to create a SPARQL Server with a base URI where any URI under this URI is handled by this Server - - - For example given a Base URI of http://example.org/server/ then the Handler would treat requests to http://example.org/server/query as SPARQL Query requests, requests to http://example.org/server/update as SPARQL Update requests and requests to any other URL covered by this base URL as SPARQL Graph Store HTTP Protocol requests - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Update Endpoint: - - <dotnetrdf:/folder/server/*> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.SparqlServer" ; - dnr:queryProcessor _:qProc ; - dnr:updateProcessor _:uProc ; - dnr:protocolProcessor _:pProc . - - _:qProc a dnr:SparqlQueryProcessor ; - dnr:type "VDS.RDF.Query.LeviathanQueryProcessor" ; - dnr:usingStore _:store . - - _:uProc a dnr:SparqlUpdateProcessor ; - dnr:type "VDS.RDF.Update.LeviathanUpdateProcessor" ; - dnr:usingStore _:store . - - _:pProc a dnr:SparqlHttpProtocolProcessor ; - dnr:type "VDS.RDF.Update.Protocol.LeviathanProtocolProcessor" ; - dnr:usingStore _:store . - - _:store a dnr:TripleStore ; - dnr:type "VDS.RDF.TripleStore" . - - - + URI of the Protocol Server - + - Loads the Handler Configuration + Creates a new SPARQL Graph Store HTTP Protocol Connector - HTTP Context - Base Path for the Server - + URI of the Protocol Server - + - Updates the Handler Configuration + Creates a new SPARQL Graph Store HTTP Protocol Connector - HTTP Context + URI of the Protocol Server + Proxy Server - + - HTTP Handler for adding SPARQL Update endpoints to ASP.Net applications + Creates a new SPARQL Graph Store HTTP Protocol Connector - - - Used to create an Update endpoint at a specific fixed URL - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Update Endpoint: - - <dotnetrdf:/folder/update> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.UpdateHandler" ; - dnr:updateProcessor _:proc . - - _:proc a dnr:SparqlUpdateProcessor ; - dnr:type "VDS.RDF.Update.LeviathanUpdateProcessor" ; - dnr:usingStore _:store . - - _:store a dnr:TripleStore ; - dnr:type "VDS.RDF.TripleStore" . - - - + URI of the Protocol Server + Proxy Server - + - Loads the Handler Configuration + Gets the IO Behaviour of SPARQL Graph Store protocol based stores - HTTP Context - - + - Updates the Handler Configuration + Gets that Updates are supported - HTTP Context - + - HTTP Handler for serving Graphs in ASP.Net applications + Returns that deleting Graphs is supported - - - Used to serve a Graph at a base URL with any URL under that being handled by this Handler. The Graph is served to the user in one of their acceptable MIME types if possible, if they don't accept any MIME type we can serve then they get a 406 Not Acceptable - - - This Handler is configured using the new Configuration API introduced in the 0.3.0 release. This requires just one setting to be defined in the <appSettings> section of your Web.config file which points to a Configuration Graph like so: - <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> - The Configuration Graph must then contain Triples like the following to specify a Graph to be served: - - <dotnetrdf:/folder/graph/*> a dnr:HttpHandler ; - dnr:type "VDS.RDF.Web.WildcardGraphHandler" ; - dnr:usingGraph _:graph . - - _:graph a dnr:Graph ; - dnr:type "VDS.RDF.Graph" ; - dnr:fromFile "yourGraph.rdf" . - - - - + - Loads the Handler Configuration + Returns that listing Graphs is not supported - HTTP Context - - + - Updates the Handlers configuration + Gets that the Store is ready - HTTP Context - + - Interface for RDF Graphs + Gets that the Store is not read-only - - - Most implementations will probably want to inherit from the abstract class BaseGraph since it contains reference implementations of various algorithms (Graph Equality/Graph Difference/Sub-Graph testing etc) which will save considerable work in implementation and ensure consistent behaviour of some methods across implementations. - - - + - Gets/Sets the Base Uri for the Graph + Loads a Graph from the Protocol Server + Graph to load into + URI of the Graph to load - + - Gets whether a Graph is Empty + Loads a Graph from the Protocol Server + RDF Handler + URI of the Graph to load - + - Gets the Namespace Map for the Graph + Loads a Graph from the Protocol Server + Graph to load into + URI of the Graph to load - + - Gets the Nodes of the Graph + Loads a Graph from the Protocol Server + RDF Handler + URI of the Graph to load - + - Gets the Triple Collection for the Graph + Sends a HEAD Command to the Protocol Server to determine whether a given Graph exists + URI of the Graph to check for - + - Asserts a Triple in the Graph + Sends a HEAD Command to the Protocol Server to determine whether a given Graph exists - A Triple + URI of the Graph to check for - + - Asserts an Enumerable of Triples in the Graph + Saves a Graph to the Protocol Server - An Enumerable of Triples + Graph to save - + - Retracts a Triple from the Graph + Updates a Graph on the Protocol Server - A Triple + URI of the Graph to update + Triples to be added + Triples to be removed + + Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown + - + - Retracts an Enumerable of Triples from the Graph + Updates a Graph on the Protocol Server - Enumerable of Triples + URI of the Graph to update + Triples to be added + Triples to be removed + + Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown + - + - Retracts all Triples from the Graph + Deletes a Graph from the store - - - The Graph should raise the ClearRequested event at the start of the Clear operation and abort the operation if the operation is cancelled by an event handler. On completing the Clear the Cleared event should be raised. - - + URI of the Graph to delete - + - Creates a URI Node that corresponds to the Base URI of the Graph + Deletes a Graph from the store - + URI of the Graph to delete - + - Creates a URI Node for the given QName using the Graphs NamespaceMap to resolve the QName + Throws an exception as listing graphs in a SPARQL Graph Store HTTP Protocol does not support listing graphs - QName + Thrown since SPARQL Graph Store HTTP Protocol does not support listing graphs - + - Selects the Blank Node with the given ID if it exists in the Graph, returns null otherwise + Loads a Graph from the Protocol Server - Node ID - The Node if it exists in the Graph or null + Graph to load into + URI of the Graph to load + Callback + State to pass to the callback - + - Selects the Literal Node with the given Value and Language if it exists in the Graph, returns null otherwise + Loads a Graph from the Protocol Server - Value of the Literal - Language Specifier of the Literal - The Node if it exists in the Graph or null + RDF Handler + URI of the Graph to load + Callback + State to pass to the callback - + - Selects the Literal Node with the given Value if it exists in the Graph, returns null otherwise + Saves a Graph to the Protocol Server - Value of the Literal - The Node if it exists in the Graph or null + Graph to save + Callback + State to pass to the callback - + - Selects the Literal Node with the given Value and DataType if it exists in the Graph, returns otherwise + Updates a Graph on the Protocol Server - Value of the Literal - Data Type of the Literal - The Node if it exists in the Graph or null + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback + + Note: The SPARQL Graph Store HTTP Protocol for Graph Management only supports the addition of Triples to a Graph and does not support removal of Triples from a Graph. If you attempt to remove Triples then an RdfStorageException will be thrown + - + - Selects all Triples which have a Uri Node with the given Uri + Lists the Graphs in the Store asynchronously - Uri - + Callback + State to pass to the callback - + - Selects all Triples which contain the given Node + Deletes a Graph from the store asynchronously - Node - + URI of the graph to delete + Callback + State to pass to the callback - + - Selects all Triples where the Object is a Uri Node with the given Uri + Disposes of the Connection - Uri - - + - Selects all Triples where the Object is a given Node + Gets a String representation of the connection - Node - + - Selects all Triples where the Predicate is a given Node + Serializes the connection's configuration - Node - + Configuration Serialization Context - + - Selects all Triples where the Predicate is a Uri Node with the given Uri + Reasoning modes supported by Stardog - Uri - - + - Selects all Triples where the Subject is a given Node + No Reasoning (default) - Node - - + - Selects all Triples where the Subject is a Uri Node with the given Uri + OWL-QL Reasoning - Uri - - + - Selects all Triples with the given Subject and Predicate + OWL-EL Reasoning - Subject - Predicate - - + - Selects all Triples with the given Subject and Object + OWL-RL Reasoning - Subject - Object - - + - Selects all Triples with the given Predicate and Object + OWL-DL Reasoning - Predicate - Object - - + - Selects the Uri Node with the given QName if it exists in the Graph, returns null otherwise + RDFS Reasoning - QName - The Node if it exists in the Graph or null - + - Selects the Uri Node with the given Uri if it exists in the Graph, returns null otherwise + RDFS, QL, RL, and EL axioms, plus SWRL rules - Uri - The Node if it exists in the Graph or null - + - Gets whether a given Triple is in this Graph + As of Stardog 3.x the reasoning mode is no longer a connection property and is instead managed at the database level - Triple to test - - + - Merges the given Graph into this Graph + Abstract implementation of a connector for Stardog that connects using the HTTP protocol - Graph to merge - The Graph should raise the MergeRequested event at the start of the Merge operation and abort the operation if the operation is cancelled by an event handler. On completing the Merge the Merged event should be raised. + Has full support for Stardog Transactions, connection is in auto-commit mode by default i.e. all write operations (Delete/Save/Update) will create and use a dedicated transaction for their operation, if the operation fails the transaction will automatically be rolled back. You can manage Transactions using the Begin(), Commit() and Rollback() methods. - - - - - Merges the given Graph into this Graph - - Graph to merge - Indicates that the Merge should preserve the Graph URIs of Nodes - - The Graph should raise the MergeRequested event at the start of the Merge operation and abort the operation if the operation is cancelled by an event handler. On completing the Merge the Merged event should be raised. + The connector maintains a single transaction which is shared across all threads since Stardog is currently provides only MRSW (Multiple Reader Single Writer) concurrency and does not permit multiple transactions to occur simultaneously. - - - Checks whether a Graph is equal to another Graph and if so returns the mapping of Blank Nodes - - Graph to compare with - Mapping of Blank Nodes - - - - - Checks whether this Graph is a sub-graph of the given Graph - - Graph - - - + - Checks whether this Graph is a sub-graph of the given Graph + Constant for the default Anonymous user account and password used by Stardog if you have not supplied a shiro.ini file or otherwise disabled security - Graph - Mapping of Blank Nodes - - + - Checks whether this Graph has the given Graph as a sub-graph + The underlying server connection - Graph - - + - Checks whether this Graph has the given Graph as a sub-graph + Creates a new connection to a Stardog Store - Graph - Mapping of Blank Nodes - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode - + - Calculates the difference between this Graph and the given Graph + Creates a new connection to a Stardog Store - Graph - - - - Produces a report which shows the changes that must be made to this Graph to produce the given Graph - - + Base Uri of the Server + Knowledge Base (i.e. Database) ID - + - Converts the Graph into a DataTable + Creates a new connection to a Stardog Store - - - Warning: Not available under builds which remove the Data Storage layer from dotNetRDF e.g. Silverlight - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password - + - Resolves a QName into a URI using the Namespace Map and Base URI of this Graph + Creates a new connection to a Stardog Store - QName - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode - + - Event which is raised when a Triple is asserted in the Graph + Creates a new connection to a Stardog Store - - Whenever this event is raised the Changed event should also be raised - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode + Proxy Server - + - Event which is raised when a Triple is retracted from the Graph + Creates a new connection to a Stardog Store - - Whenever this event is raised the Changed event should also be raised - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode + Proxy Server - + - Event which is raised when the Graph contents change + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Proxy Server - + - Event which is raised just before the Graph is cleared of its contents + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Proxy Server - + - Event which is raised after the Graph is cleared of its contents + Gets the Base URI of the Stardog server - + - Event which is raised just before a Merge operation begins on the Graph + Gets the knowledge base ID being used by this connector - + - Event which is raised when a Merge operation is completed on the Graph + Gets/Sets the reasoning mode to use for queries - + - Interface for RDF Graphs which provide Transactions i.e. changes to them can be Flushed (committed) or Discard (rolled back) as desired + Gets the IO Behaviour of Stardog - + - Flushes any changes to the Graph + Returns that listing Graphs is supported - + - Discards any changes to the Graph + Returns that the Connection is ready - + - Node Type Values + Returns that the Connection is not read-only - + - A Blank Node + Returns that Updates are supported on Stardog Stores - + - A Uri Node + Returns that deleting graphs from the Stardog store is not yet supported (due to a .Net specific issue) - + - A Literal Node + Gets the parent server - + - A Graph Literal Node + Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use + Sparql Query + - + - A Variable Node (currently only used in N3) + Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use, the reasoning can be set by query + Sparql Query + + - + - Interface for Nodes + Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use processing the results using an appropriate handler from those provided + RDF Handler + Results Handler + SPARQL Query + - + - Nodes have a Type + Makes a SPARQL Query against the underlying Store using whatever reasoning mode is currently in-use processing the results using an appropriate handler from those provided, the reasoning can be set by query - Primarily provided so can do quick integer comparison to see what type of Node you have without having to do actual full blown Type comparison + RDF Handler + Results Handler + SPARQL Query + + - + - Gets the Graph the Node belongs to + Loads a Graph from the Store + Graph to load into + URI of the Graph to load + + If an empty/null URI is specified then the Default Graph of the Store will be loaded + - + - Gets/Sets the Graph URI associated with a Node + Loads a Graph from the Store + RDF Handler + URI of the Graph to load + + If an empty/null URI is specified then the Default Graph of the Store will be loaded + - + - Gets the String representation of the Node + Loads a Graph from the Store - + Graph to load into + Uri of the Graph to load + + If an empty/null Uri is specified then the Default Graph of the Store will be loaded + - + - Gets the String representation of the Node formatted with the given Node formatter + Loads a Graph from the Store - Formatter - + RDF Handler + URI of the Graph to load + + If an empty/null URI is specified then the Default Graph of the Store will be loaded + - + - Gets the String representation of the Node formatted with the given Node formatter + Saves a Graph into the Store (see remarks for notes on merge/overwrite behaviour) - Formatter - Triple Segment - + Graph to save + + + If the Graph has no URI then the contents will be appended to the Store's Default Graph. If the Graph has a URI then existing Graph associated with that URI will be replaced. To append to a named Graph use the UpdateGraph() method instead + + - + - Interface for URI Nodes + Updates a Graph in the Stardog Store + Uri of the Graph to update + Triples to be added + Triples to be removed + + Removals happen before additions + - + - Gets the URI the Node represents + Updates a Graph in the Stardog store + Uri of the Graph to update + Triples to be added + Triples to be removed - + - Interface for Blank Nodes + Deletes a Graph from the Stardog store + URI of the Graph to delete - + - Gets the Internal ID of the Blank Node + Deletes a Graph from the Stardog store + URI of the Graph to delete - + - Interface for Literal Nodes + Gets the list of Graphs in the Stardog store + - + - Gets the Lexical Value of the Literal + Gets the parent server - + - Gets the Language specifier (if any) of the Literal or the Empty String + Saves a Graph to the Store asynchronously + Graph to save + Callback + State to pass to the callback - + - Gets the DataType URI (if any) of the Literal or null + Saves a Graph to the Store asynchronously + Graph to save + Callback + State to pass to the callback - + - Interface for Graph Literal Nodes + Save a graph to the database asynchronously within the context of an open transaction + The ID of the transaction to use for the update + True to commit the transaction on completion + The graph to write + Callback invoked on completion + State parameter to pass to the callback - + - Gets the Sub-graph the Graph Literal represents + Loads a Graph from the Store asynchronously + Handler to load with + URI of the Graph to load + Callback + State to pass to the callback - + - Interface for Variable Nodes + Updates a Graph in the Store asychronously + URI of the Graph to update + Triples to be added + Triples to be removed + Callback + State to pass to the callback - + - Gets the Variable Name + Apply an update to a graph + The URI of the graph to be updated + The triples to insert + The triples to delete + Callback invoked on completion + Additional state passed to the callback + If a transaction is currently in progress, the update is applied + as part of that transaction. Otherwise a new transaction is started and committed by this method. - + - Interface to be implemented by RDF Readers which parse Concrete RDF Syntax + Apply an update to a graph as part of a transaction + The ID of the open transaction to use + True to commit the transaction at the end of the update, false otherwise + The URI of the graph to be updated + The triples to inser + The triples to remove + A callback to be invoked on completion + Additional state to pass to the callback - + - Method for Loading a Graph from some Concrete RDF Syntax via some arbitrary Stream + Deletes a Graph from the Store - Graph to load RDF into - The reader to read input from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the Stream + URI of the Graph to delete + Callback + State to pass to the callback - + - Method for Loading a Graph from some Concrete RDF Syntax via some arbitrary Input + Delete a graph as part of an open transaction - Graph to load RDF into - The reader to read input from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the Stream + The ID of the transaction to use + True to commit the transaction at the end of the delete operation, false to leave the transaction open + The URI of the graph to delete + Callback to invoked on completion of the operation + Additional state to pass into the callback - + - Method for Loading a Graph from some Concrete RDF Syntax from a given File + Queries the store asynchronously - Graph to load RDF into - The Filename of the File to read from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the File + SPARQL Query + Callback + State to pass to the callback - + - Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax via some arbitrary Stream + Queries the store asynchronously - RDF Handler to use - The reader to read input from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the Stream + SPARQL Query + RDF Handler + Results Handler + Callback + State to pass to the callback - + - Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax via some arbitrary Stream + Helper method for creating HTTP Requests to the Store - RDF Handler to use - The reader to read input from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the Stream + Path to the Service requested + Acceptable Content Types + HTTP Method + Querystring Parameters + - + - Method for Loading RDF using a RDF Handler from some Concrete RDF Syntax from a given File + Adds Stardog specific request headers; reasoning needed for < 2.2 - RDF Handler to use - The Filename of the File to read from - Thrown if the Parser tries to output something that is invalid RDF - Thrown if the Parser cannot Parse the Input - Thrown if the Parser encounters an IO Error while trying to access/parse the Stream + - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Get the query parameter string that specifies the current reasoning mode + - + - Interface to be implemented by RDF Writers which generate RDF Concrete Syntax + Start a transaction + A transaction ID for the new transaction - + - Method for Saving a Graph to a Concrete RDF Syntax in a file based format + Commit an open transaction - The Graph to Save - The filename to save the Graph in - Thrown if the RDF in the Graph is not representable by the Writer - Thrown if the Writer is unable to write to the File + The ID of the transaction to commit - + - Method for Saving a Graph to a Concrete RDF Syntax via some arbitrary TextWriter + Rollback an open transaction - The Graph to Save - The TextWriter to save the Graph to - Thrown if the RDF in the Graph is not representable by the Writer - Thrown if the Writer is unable to write to the underlying storage of the TextWriter specified in the + The ID of the transaction to rollback - + - Event which writers can raise to indicate possible ambiguities or issues in the syntax they are producing + Begins a new Transaction + + A single transaction + - + - Interface to be implemented by Triple Store Readers + Commits the active Transaction + Thrown if there is not an active Transaction on the current Thread + + Transactions are scoped to Managed Threads + - + - Loads a RDF dataset into a Triple Store + Rolls back the active Transaction - Triple Store - File to load from + Thrown if there is not an active Transaction on the current Thread + + Transactions are scoped to Managed Threads + - + - Loads a RDF dataset into a Triple Store + Begins a transaction asynchronously - Triple Store - Input to load from + Callback + State to pass to the callback - + - Loads a RDF dataset using a RDF Handler + Commits a transaction asynchronously - RDF Handler to use - File to load from + Callback + State to pass to the callback - + - Loads a RDF dataset using a RDF Handler + Rolls back a transaction asynchronously - RDF Handler to use - Input to load from + Callback + State to pass to the callback - + - Event which Readers can raise when they notice syntax that is ambigious/deprecated etc which can still be parsed + Disposes of the Connector - + - Interface to be implemented by Triple Store Writers + Gets a String which gives details of the Connection + - + - Method for saving data to a Triple Store + Serializes the connection's configuration - Triple Store - File to save to + Configuration Serialization Context - + - Method for saving data to a Triple Store + A Stardog Connector for connecting to Stardog version 1.* servers - Triple Store - Write to save to - + - Event which writers can raise to indicate possible ambiguities or issues in the syntax they are producing + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode - + - An Interface for classes which provide Context Information for Triples thus allowing you to create Quads with arbitrary extra information attached to Triples via your Context Objects + Creates a new connection to a Stardog Store - - A Triple Context is simply a name-value pair collection of arbitrary data that can be attached to a Triple. Internal representation of this is left to the implementor. - + Base Uri of the Server + Knowledge Base (i.e. Database) ID - + - A Method which will indicate whether the Context contains some arbitrary property + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password - + - A Property which exposes the arbitrary properties of the Context as an Key Based Index + Creates a new connection to a Stardog Store - Name of the Property - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode - + - Class which implements a very basic Triple Context + Creates a new connection to a Stardog Store - - The Name Value collection is represented internally as a Dictionary - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode + Proxy Server - + - Creates a new Basic Triple Context without a Source + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode + Proxy Server - + - Checks whether a given property is defined in this Context object + Creates a new connection to a Stardog Store - Name of the Property - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Proxy Server - + - Gets/Sets the value of a Property + Creates a new connection to a Stardog Store - Name of the Property - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Proxy Server - + - Interface for Triple Stores + A Stardog Connector for connecting to Stardog version 2.* servers - A Triple Store may be a representation of some storage backed actual store or just a temporary collection of Graphs created for working with. Note that an implementation is not required to provide a definitive view of a Triple Store and may only provide a limited/partial snapshot of the underlying store. Check the documentation for the various implementations to see what type of view of a Triple Store they actually provide. - + - Gets whether a TripleStore is Empty + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode - + - Gets the Graph Collection of Graphs in this Triple Store + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID - + - Gets all the Triples in the Triple Store which are currently loaded in memory (see remarks) + Creates a new connection to a Stardog Store - Since a Triple Store object may represent only a snapshot of the underlying Store evaluating this enumerator may only return some of the Triples in the Store and may depending on specific Triple Store return nothing. + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password - + - Adds a Graph into the Triple Store + Creates a new connection to a Stardog Store - Graph to add + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode - + - Adds a Graph into the Triple Store + Creates a new connection to a Stardog Store - Graph to add - Controls whether the Graph should be merged with an existing Graph of the same Uri if it already exists in the Triple Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Reasoning Mode + Proxy Server - + - Adds a Graph into the Triple Store by dereferencing the Graph Uri to get the RDF and then load the resulting Graph into the Triple Store + Creates a new connection to a Stardog Store - Uri of the Graph to be added + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Reasoning Mode + Proxy Server - + - Adds a Graph into the Triple Store by dereferencing the Graph Uri to get the RDF and then load the resulting Graph into the Triple Store + Creates a new connection to a Stardog Store - Uri of the Graph to be added - Controls whether the Graph should be merged with an existing Graph of the same Uri if it already exists in the Triple Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Proxy Server - + - Removes a Graph from the Triple Store + Creates a new connection to a Stardog Store - Graph Uri of the Graph to remove + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Proxy Server - + - Checks whether the Graph with the given Uri is in this Triple Store + Adds Stardog specific request headers - Graph Uri - + - + - Gets a Graph from the Triple Store; + Get the query string parameter that specifies the current reasoning mode - Graph URI - + - Event which is raised when a Graph is added + Executes a SPARQL Update against the Stardog store + SPARQL Update + + Stardog executes SPARQL update requests in their own self contained transactions which do not interact with normal Stardog transactions that may be managed via this API. In some cases this can lead to unexpected behaviour, for example if you call , make an update and then call the updates will not be rolled back. + - + - Event which is raised when a Graph is removed + Executes a SPARQL Update against the Stardog store + SPARQL Update + Callback + State to pass to callback + + Stardog executes SPARQL update requests in their own self contained transactions which do not interact with normal Stardog transactions that may be managed via this API. In some cases this can lead to unexpected behaviour, for example if you call , make an update and then call the updates will not be rolled back. + - + - Event which is raised when a Graphs contents changes + A Stardog Connector for connecting to Stardog version 3.* servers - + - Event which is raised when a Graph is cleared + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID - + - Event which is raised when a Graph has a merge operation performed on it + Creates a new connection to a Stardog Store + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password - + - Interface for Triple Stores which can be queried in memory using method calls or the SPARQL implementation contained in this library + Creates a new connection to a Stardog Store - - - An in memory Triple Store will typically load most of the Graphs and consequently Triples contained within it into Memory as the in memory SPARQL implementation only operates over the part of the Triple Store loaded in memory. This being said there is no reason why an in memory store can't provide a Snapshot view of an underlying store to allow only the relevant parts of Store to be loaded and queried. - - - All the Selection Methods which do not specify a subset of Graphs on such a Triple Store should operate over the entire store - - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Proxy Server - + - Returns whether a given Triple is contained anywhere in the Query Triples + Creates a new connection to a Stardog Store - Triple to check for existence of - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Proxy Server - + + + + - Selects all Triples which have a Uri Node with the given Uri from all the Query Triples + Adds Stardog specific request headers - Uri - + - + - Selects all Triples which contain the given Node from all the Query Triples + A Stardog connector for connecting to Stardog servers running the latest version, currently this is version 3.* - Node - - + - Selects all Triples where the Object is a Uri Node with the given Uri from all the Query Triples + Creates a new connection to a Stardog Store - Uri - + Base Uri of the Server + Knowledge Base (i.e. Database) ID - + - Selects all Triples where the Object is a given Node from all the Query Triples + Creates a new connection to a Stardog Store - Node - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password - + - Selects all Triples where the Predicate is a given Node from all the Query Triples + Creates a new connection to a Stardog Store - Node - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Proxy Server - + - Selects all Triples where the Predicate is a Uri Node with the given Uri from all the Query Triples + Creates a new connection to a Stardog Store - Uri - + Base Uri of the Server + Knowledge Base (i.e. Database) ID + Username + Password + Proxy Server - + - Selects all Triples where the Subject is a given Node from all the Query Triples + Possible Async Storage API Actions - Node - - + - Selects all Triples where the Subject is a Uri Node with the given Uri from all the Query Triples + Loaded a Graph - Uri - - + - Selects all the Triples with the given Subject-Predicate pair from all the Query Triples + Loaded data with a RDF Handler - Subject - Predicate - - + - Selects all the Triples with the given Predicate-Object pair from all the Query Triples + Saved a Graph - Predicate - Object - - + - Selects all the Triples with the given Subject-Object pair from all the Query Triples + Updates a Graph - Subject - Object - - + - Selects all Triples which have a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Deleted a Graph - List of the Graph URIs of Graphs you want to select over - Uri - - + - Selects all Triples which contain the given Node from a Subset of Graphs in the Triple Store + Listed Graphs - List of the Graph URIs of Graphs you want to select over - Node - - + - Selects all Triples where the Object is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Made a SPARQL Query - List of the Graph URIs of Graphs you want to select over - Uri - - + - Selects all Triples where the Object is a given Node from a Subset of Graphs in the Triple Store + Made a SPARQL Query with a handler - List of the Graph URIs of Graphs you want to select over - Node - - + - Selects all Triples where the Predicate is a given Node from a Subset of Graphs in the Triple Store + Made a SPARQL Update - List of the Graph URIs of Graphs you want to select over - Node - - + - Selects all Triples where the Predicate is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Began a Transaction - List of the Graph URIs of Graphs you want to select over - Uri - - + - Selects all Triples where the Subject is a given Node from a Subset of Graphs in the Triple Store + Committed a Transaction - List of the Graph URIs of Graphs you want to select over - Node - - + - Selects all Triples where the Subject is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Rolled back a Transaction - List of the Graph URIs of Graphs you want to select over - Uri - - + - Executes a SPARQL Query on the Triple Store + Gettting a new store template - SPARQL Query as an unparsed string - - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Executes a SPARQL Query on the Triple Store + Getting all available templates - SPARQL Query as a SparqlQuery instance - - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided + Created a Store - RDF Handler - Results Handler - SPARQL Query as an unparsed string - - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided + Deleted a Store - RDF Handler - Results Handler - Parsed SPARQL Query - - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Interface for Triple Stores which can be queried natively i.e. the Stores provide their own SPARQL implementations + Retrieved a reference to a Store - - A Natively Queryable store will typically not load its Graphs and Triples into memory as this is generally unecessary. - - + - Executes a SPARQL Query on the Triple Store + Got the list of Stores - Sparql Query as unparsed String - - - This assumes that the Store has access to some native SPARQL query processor on/at the Store which will be used to return the results. Implementations should parse the returned result into a SparqlResultSet or Graph. - - + - Executes a SPARQL Query on the Triple Store processing the results using an appropriate handler from those provided + Unknown Action - RDF Handler - Results Handler - SPARQL Query as unparsed String - + - Interface for Triple Stores which support SPARQL Update as per the SPARQL 1.1 specifications + Represents arguments passed to callbacks on success/failure of a async storage API call - A Store which supports this may implement various access control mechanisms which limit what operations are actually permitted - - - It is the responsibility of the Store class to ensure that commands are permissible before invoking them + Primarily used to provide simple method signatures on the async storage API callbacks - - - Executes an Update against the Triple Store - - SPARQL Update Command(s) - - As per the SPARQL 1.1 Update specification the command string may be a sequence of commands - - - - - Executes a single Update Command against the Triple Store - - SPARQL Update Command - - + - Executes a set of Update Commands against the Triple Store + Creates new callback arguments - SPARQL Update Command Set + Operation - + - Interface for Triple Stores which can have a IInferenceEngine attached to them + Creates new callback arguments + Operation + Error that occurred - + - Adds an Inference Engine to the Triple Store + Creates new callback arguments - Reasoner to add + Operation + Graph to return - + - Removes an Inference Engine from the Triple Store + Creates new callback arguments - Reasoner to remove + Operation + Graph to return + Error that occurred - + - Clears all Inference Engines from the Triple Store + Creates new callback arguments + Operation + URI of the affected Graph - + - Applies Inference to the given Graph + Creates new callback arguments - Graph to apply inference to - - Allows you to apply Inference to a Graph even if you're not putting that Graph into the Store - + Operation + URI of the affected Graph + Error that occurred - + - Interface for Triple Stores which are backed by some storage layer that may delay persistence and thus require flushing to ensure changes are persisted to the backing store, as a by product such stores will typically have some notion of transactionality + Creates new callback arguments + Operation + Enumeration of Graph URIs - + - Flushes any outstanding changes to the underlying store + Creates new callback arguments + Operation + Handler to return - + - Discards any outstanding changes to the underlying store + Creates new callback arguments + Operation + Handler to return + Error that occurred - + - Abstract Base Class for Literal Nodes + Creates new callback arguments + Operation + SPARQL Query + RDF Handler to return + Results Handler to return - + - Constants used to add salt to the hashes of different Literal Nodes + Creates new callback arguments + Operation + SPARQL Query + RDF Handler + Results Handler + Error that occurred - + - Constants used to add salt to the hashes of different Literal Nodes + Creates new callback arguments + Operation + SPARQL Query + Results to return - + - Constants used to add salt to the hashes of different Literal Nodes + Creates new callback arguments + Operation + SPARQL Query + Results to return + Error that occurred - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal + Operation + Data to return - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal - Whether to Normalize the Literal Value + Operation + Data to return + Error that occurred - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal - String value for the Language Specifier for the Literal + Operation + Enumeration of Store IDs - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal - String value for the Language Specifier for the Literal - Whether to Normalize the Literal Value + Operation + Enumeration of Store IDs + Error that occurred - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal - Uri for the Literals Data Type + Operation + Store ID + Storage Provider + Error that occurred - + - Internal Only Constructor for Literal Nodes + Creates new callback arguments - Graph this Node is in - String value of the Literal - Uri for the Literals Data Type - Whether to Normalize the Literal Value + Operation + Store ID + Template - + - Deserialization Only Constructor + Creates new callback arguments + Operation + Store ID + Templates - + - Deserialization Constructor + Sets the Data to the appropriate property based on the operation type - Serialization Information - Streaming Context + Data - + - Gives the String Value of the Literal + Gets whether the async operation succeeded (no error occurred) - + - Gives the Language Specifier for the Literal (if it exists) or the Empty String + Gets the Graph that was saved/loaded (if applicable) - + - Gives the Data Type Uri for the Literal (if it exists) or a null + Gets the error that occurred (for failed operations) - + - Implementation of the Equals method for Literal Nodes + Gets the URI of the Graph affected by the operation - Object to compare the Node with - - - The default behaviour is for Literal Nodes to be considered equal IFF -
    -
  1. Their Language Specifiers are identical (or neither has a Language Specifier)
  2. -
  3. Their Data Types are identical (or neither has a Data Type)
  4. -
  5. Their String values are identical
  6. -
- This behaviour can be overridden to use value equality by setting the LiteralEqualityMode option to be Loose if this is more suited to your application. -
- + - Implementation of the Equals method for Literal Nodes + Gets the list of Graph URIs (if applicable) - Object to compare the Node with - - - The default behaviour is for Literal Nodes to be considered equal IFF -
    -
  1. Their Language Specifiers are identical (or neither has a Language Specifier)
  2. -
  3. Their Data Types are identical (or neither has a Data Type)
  4. -
  5. Their String values are identical
  6. -
- This behaviour can be overridden to use value equality by setting the LiteralEqualityMode option to be Loose if this is more suited to your application. -
- + - Determines whether this Node is equal to a Blank Node (should always be false) + Gets the RDF Handler used (if applicable) - Blank Node - - + - Determines whether this Node is equal to a Graph Literal Node (should always be false) + Gets the Results Handler used (if applicable) - Graph Literal Node - - + - Determines whether this Node is equal to a Literal Node + Gets the Query Results (if applicable) - Literal Node - - + - Determines whether this Node is equal to a URI Node (should always be false) + Gets the SPARQL Query (if applicable) - URI Node - - + - Determines whether this Node is equal to a Variable Node (should always be false) + Gets the SPARQL Update (if applicable) - Variable Node - - + - Determines whether this Node is equal to a Literal Node + Gets the Store ID (if applicable) - Literal Node - - + - Gets a String representation of a Literal Node + Gets the list of Store IDs (if applicable) - - Gives a value without quotes (as some syntaxes use) with the Data Type/Language Specifier appended using Notation 3 syntax - + - Implementation of CompareTo for Literal Nodes + Gets the Storage Provider (if applicable) - Node to Compare To - - Literal Nodes are greater than Blank Nodes, Uri Nodes and Nulls, they are less than Graph Literal Nodes. -

- Two Literal Nodes are initially compared based upon Data Type, untyped literals are less than typed literals. Two untyped literals are compared purely on lexical value, Language Specifier has no effect on the ordering. This means Literal Nodes are only partially ordered, for example "hello"@en and "hello"@en-us are considered to be the same for ordering purposes though they are different for equality purposes. Datatyped Literals can only be properly ordered if they are one of a small subset of types (Integers, Booleans, Date Times, Strings and URIs). If the datatypes for two Literals are non-matching they are ordered on Datatype Uri, this ensures that each range of Literal Nodes is sorted to some degree. Again this also means that Literals are partially ordered since unknown datatypes will only be sorted based on lexical value and not on actual value. + + For the operation this will be the reference to the newly returned store instance +
- + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the operation that was performed - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the template that was created (if any) - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the templates that were created (if any) - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Generic callback for async storage API operations - Node to test against - + Originator of the callback + Callback Arguments + State object originally passed to the async call - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Static Helper for the Storage API - Node to test against - - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Template for posting form data as part of a HTTP multipart request - Node to test against - - + - Gets the serialization information + Gets a new unique boundary for HTTP mutlipart requests - Serialization Information - Streaming Context - + - Reads the data for XML deserialization + Handles HTTP Query Errors obtaining additional information from the HTTP response if possible - XML Reader + HTTP Error + - + - Writes the data for XML serialization + Handles HTTP Errors obtaining additional information from the HTTP response if possible - XML Writer + HTTP Error + Action being performed + - + - Class for representing Literal Nodes + Handles HTTP Errors obtaining additional information from the HTTP response if possible + HTTP Error + Action being performed + Function that generates the actual errors + + Adapted from Ron Michael's Zettlemoyer's original patch for this in Stardog to use it across all operations as far as possible + - + - Constants used to add salt to the hashes of different Literal Nodes + Tries to get the status line for inclusion in the HTTP error message + Web exception + Status line if available, empty string otherwise - + - Constants used to add salt to the hashes of different Literal Nodes + Handles Query Errors + Error + - + - Constants used to add salt to the hashes of different Literal Nodes + Handles Errors + Error + Action being performed + - + - Internal Only Constructor for Literal Nodes + Handles Errors - Graph this Node is in - String value of the Literal + Error Type + Error + Action being performed + Function that generates the actual errors + - + - Internal Only Constructor for Literal Nodes + Interface for Virtual Nodes - Graph this Node is in - String value of the Literal - Whether to Normalize the Literal Value + Node ID Type + Graph ID Type - + - Internal Only Constructor for Literal Nodes + Gets the Node ID - Graph this Node is in - String value of the Literal - String value for the Language Specifier for the Literal - + - Internal Only Constructor for Literal Nodes + Gets the Virtual Node provider - Graph this Node is in - String value of the Literal - String value for the Language Specifier for the Literal - Whether to Normalize the Literal Value - + - Internal Only Constructor for Literal Nodes + Gets whether the Nodes value has been materialised - Graph this Node is in - String value of the Literal - Uri for the Literals Data Type - + - Internal Only Constructor for Literal Nodes + Gets the materialised value forcing it to be materialised if necessary - Graph this Node is in - String value of the Literal - Uri for the Literals Data Type - Whether to Normalize the Literal Value - + - Deserialization Only Constructor + Interface for comparing nodes on their VirtualID property - + - Deserialization Constructor + Attempt to compare the VirtualID of this node with the VirtualID of the other node - Serialization Information - Streaming Context + The other node to try to compare against + The result of the comparison if it could be performed + True if a comparison could be performed, false otherwise. - + - Implementation of Compare To for Literal Nodes + Interface for nodes that know for themseves how to create a copy of themselves to a different graph - Literal Node to Compare To - - Simply invokes the more general implementation of this method - + Especially virtual nodes need to copy themselves during query algebra processing, + because the standard copy tools might destroy their virtual state by duplicating it's virtualized + values. In consequence all indices in the various triple stores fail to match such value-copied nodes + - + - Determines whether this Node is equal to a Literal Node + Copies the Node into another Graph, currently only used by virtual nodes - Literal Node + Target Graph - + - Class for representing Literal Nodes where the Literal values are not normalized + A Virtual RDF Provider is a provider that transforms materialised values into virtual ID values. These virtual values can be used to do much faster term equality checking and to minimise memory usage when accessing out of memory data. + Node ID Type + Graph ID Type + + + An implementation of this is typically in addition to a more general RDF store implementation (such as an IStorageProvider) and was originally designed and intended for use in creating ISparqlDataset instances which allow out of memory data to be queried more efficiently. + + + It is expected that most implementations will use a cache to ensure that repeated transformations are as fast as possible + +

Important Note re: Blank Nodes

+ + In order for code that uses this class to function correctly it must be ensured that IDs issued for Blank Nodes are graph scoped, as such a specific method for converting Blank Nodes into Virtual Node IDs is given + +
- + - Internal Only Constructor for Literal Nodes + Given a Node ID returns the materialised value in the given Graph - Graph this Node is in - String value of the Literal + Graph to create the Node in + Node ID + - + - Internal Only Constructor for Literal Nodes + Given a Graph ID returns the value of the Graph URI - Graph this Node is in - String value of the Literal - Lanaguage Specifier for the Literal + Graph ID + - + - Internal Only Constructor for Literal Nodes + Given a non-blank Node returns the Node ID - Graph this Node is in - String value of the Literal - Uri for the Literals Data Type + Node + + Should function as equivalent to the two argument version with the createIfNotExists parameter set to false + - + - Deserialization Only Constructor + Gets the Graph ID for a Graph + Graph + + + Should function as equivalent to the two argument version with the createIfNotExists parameter set to false + - + - Deserialization Constructor + Gets the Graph ID for a Graph creating it if necessary - Serialization Information - Streaming Context + Graph + Determines whether to create a new Graph ID if there is not already one for the given Graph + - + - Implementation of Compare To for Literal Nodes + Gets the Graph ID for a Graph URI - Literal Node to Compare To + Graph URI - Simply invokes the more general implementation of this method + Should function as equivalent to the two argument version with the createIfNotExists parameter set to false - + - Helper Class containing definitions of MIME Types for the various RDF Concrete Syntaxes and Content Negotation Methods + Gets the Graph ID for a Graph URI + Graph URI + Determines whether to create a new Graph ID if there is not already one for the given Graph URI + - + - Constant for W3C File Formats Namespace + Given a non-blank Node returns the Node ID + Node + Determines whether to create a new Node ID if there is not already one for the given value + - + - MIME Type for accept any content Type + Given a Blank Node returns a Graph scoped Node ID + Blank Node + Determines whether to create a new Node ID if there is not already one for the given value + - + - MIME Type for URL Encoded WWW Form Content used when POSTing over HTTP + Given a Blank Node returns a Graph scoped Node ID + Blank Node + + + Should function as equivalent to the two argument version with the createIfNotExists parameter set to false + - + - MIME Type for URL Enoded WWW Form Content used when POSTing over HTTP in UTF-8 encoding + Gets the Node ID that is used to indicate that a Node does not exist in the underlying storage - + - MIME Type for Multipart Form Data + Loads a Graph creating all the Triples with virtual node values + Graph to load into + URI of the Graph to load - + - MIME Types for Turtle + Simple implementation of a Virtual Blank Node where the virtual IDs are integers - + - MIME Types for RDF/XML + Creates a new Virtual Blank Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - MIME Types for Notation 3 + Creates a new Virtual Blank Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - MIME Types for NTriples + Determines whether this Node is equal to another virtual Blank node + Other Blank Node + - + - MIME Types for NQuads + Compares this Node to another virtual Blank node + Other Blank Node + - + - MIME Types for TriG + Copies the Node to another Graph + Target Graph + - + - MIME Types for TriX + Method to be implemented in derived classes to provide comparison of VirtualId values + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - MIME Types for RDF/JSON + Simple implementation of a Virtual Graph Literal Node where the virtual IDs are integers - + - MIME Types for SPARQL Result Sets + Creates a new Virtual Graph Literal Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - MIME Types for SPARQL Results XML + Creates a new Virtual Graph Literal Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Values - + - MIME Types for SPARQL Results JSON + Determines whether this Node is equal to another virtual Graph Literal node + Other Graph Literal Node + - + - MIME Types for SPARQL Boolean Result + Compares this Node to another virtual Graph Literal node + Other Graph Literal Node + - + - MIME Types for CSV + Copies the Node to another Graph including the materialised value if present + Target Graph + - + - MIME Types for TSV + Method to be implemented in derived classes to provide comparison of VirtualId values + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - MIME Types for HTML + Simple implementation of a Virtual Literal Node where the virtual IDs are integers - + - MIME Type for SPARQL Queries + Creates a new Virtual Literal Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - MIME Type for SPARQL Updates + Creates a new Virtual Literal Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Default File Extension for Turtle Files + Determines whether this Node is equal to another virtual Literal node + Other Literal Node + - + - Default File Extension for RDF/XML + Compares this Node to another virtual Literal node + Other Literal Node + - + - Default File Extension for Notation 3 + Copies the Node to another Graph including the materialised value if present + Target Graph + - + - Default File Extension for NTriples + Method to be implemented in derived classes to provide comparison of VirtualId values + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - Default File Extension for Json formats + Simple implementation of a Virtual URI Node where the virtual IDs are integers - + - Default File Extension for RDF/JSON + Creates a new Virtual URI Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Default File Extension for SPARQL XML Results Format + Creates a new Virtual URI Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Default File Extension for SPARQL JSON Results Format + Determines whether this Node is equal to another virtual URI node + Other URI Node + - + - Default File Extension for TriG + Compares this Node to another virtual URI node + Other URI Node + - + - Default File Extension for NQuads + Copies the Node to another Graph including the materialised value if present + Target Graph + - + - Default File Extension for TriX + Method to be implemented in derived classes to provide comparison of VirtualId values + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - Default File Extension for CSV + Simple implementation of a Virtual URI Node where the virtual IDs are integers - + - Default File Extension for TSV + Creates a new Virtual Variable Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Default File Extension for HTML + Creates a new Virtual Variable Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + + + Determines whether this Node is equal to another virtual variable node + + Other Variable Node + + + - Default File Extension for XHTML + Compares this Node to another virtual Variable node + Other Variable Node + - + - Default File Extension for SPARQL Queries + Copies the Node to another Graph including the materialised value if present + Target Graph + - + - Default File Extension for SPARQL Updates + Method to be implemented in derived classes to provide comparison of VirtualId values + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - Default File Extension for GZip + Base class for update operations on virtualized graphs. Implementors have to provide a method to + convert standard Nodes to their virtual form according to the IVirtualRdfProvider which is in use. + Node ID Type + Graph ID Type - + - Extensions which are considered stackable + Converts a standard INode to a virtualized node with a pre-materialized value. + Virtual RDF Provider, the object, e.g. a storage manger, that provides virtualization of nodes + Node that has to be converted to it's virtualized form with itself as materialized value. Usually a parsed Literal or Uri. - + - Charset constants + Virtual RDF Provider - + - Charset constants + Creates a new Store Graph Persistence Wrapper for Virtualized Nodes + Generic IO Manager + Virtual RDF Provider + Graph with virtualized Nodes to wrap + Graph URI (the URI the Graph will be persisted as) + Whether to operate in write-only mode + + + Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph + + - + - List of MIME Type Definition + Asserts a Triple after virtualization in the Graph + Triple - + - Whether MIME Type Definitions have been initialised + Retracts a Triple after virtualization from the Graph + Triple - + - Checks whether something is a valid MIME Type + Gets whether the virtualized form of a given Triple exists in this Graph - MIME Type - + Triple to test + Triple is known to the Graph - + - Determines whether the given string is valid as a type/subtype for a MIME type + Converts subject, predicate and object of a given Triple to their respective virtualized forms - String - + Triple to virtualize + The virtualized Triple. Itself, if it was already virtual. - + - Initialises the MIME Type definitions + Virtualizes a Node + Node to be virtualized + The Node in its virtual form. Itself, if it was already virtual. - + - Resets the MIME Type Definitions (the associations between file extensions, MIME types and their respective parsers and writers) to the library defaults + Abstract Base implementation of a Virtual Node which is a Node that is represented only by some ID until such time as its value actually needs materialising + Node ID Type + Graph ID Type - May be useful if you've altered the definitions and caused something to stop working as a result + As far as possible equality checks are carried out using these IDs and limited comparisons may also be done this way. More specific implementations may wish to derive from this class in order to override the default comparison implementation to further reduce the number of places where value materialisation is done. + + + Note that this class does not implement any of the specialised Node interfaces and instead relies on the casting of its materialised value to an appropriately typed node to provide the true values to code that needs it - + - Gets the available MIME Type Definitions + The materialised value of the Virtual Node - + - Adds a new MIME Type Definition + Creates a new Base Virtual Node - MIME Type Definition + Graph the Node belongs to + Type of the node + Virtual ID + Virtual RDF Provider - + - Registers a parser as the default RDF Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Creates a new Base Virtual Node - RDF Parser - MIME Types - File Extensions + Graph the Node belongs to + Type of the node + Virtual ID + Virtual RDF Provider + Materialised Value - + - Registers a parser as the default RDF Dataset Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Materialises the Value if it is not already materialised - RDF Dataset Parser - MIME Types - File Extensions - + - Registers a parser as the default SPARQL Rsults Parser for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Called after the value is materialised for the first time - SPARQL Results Parser - MIME Types - File Extensions - + - Registers a writer as the default RDF Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Gets the Virtual ID of the Node - RDF Writer - MIME Types - File Extensions - + - Registers a writer as the default RDF Dataset Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Gets the Virtual RDF Provider of the Node - RDF Dataset Writer - MIME Types - File Extensions - + - Registers a writer as the default SPARQL Results Writer for all the given MIME types and updates relevant definitions to include the MIME types and file extensions + Gets whether the Nodes value has been materialised - SPARQL Results Writer - MIME Types - File Extensions - + - Gets all MIME Type definitions which support the given MIME Type + Gets the materialised value of the Node forcing it to be materialised if it hasn't already - MIME Type - - + - Gets all MIME Type definitions which support the given MIME Types + Gets the Type of the Node - MIME Types - - + - Gets all MIME Types definitions which are associated with a given file extension + Gets the Graph the Node belongs to - File Extension - - + - Builds the String for the HTTP Accept Header that should be used when you want to ask for content in RDF formats (except Sparql Results) + Gets/Sets the Graph URI of the Node - - + - Builds the String for the HTTP Accept Header that should be used for querying Sparql Endpoints where the response will be a SPARQL Result Set format + Gets the String representation of the Node formatted with the given Node formatter + Formatter - + - Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data may be RDF or a SPARQL Result Set + Gets the String representation of the Node formatted with the given Node formatter + Formatter + Triple Segment - + - Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data will be an RDF dataset + Compares this Node to another Virtual Node + Other Virtual Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Builds the String for the HTTP Accept Header that should be used for making HTTP Requests where the returned data may be RDF or an RDF dataset + Compares this Node to another Virtual Node + Other Virtual Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Creates a Custom HTTP Accept Header containing the given selection of MIME Types + Compares this Node to another Node - Enumeration of MIME Types to use + Other Node - - Note: No validation is done on MIME Types so it is possible to generated a malformed header using this function - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + - Creates a Custom HTTP Accept Header containing the given selection of MIME Types where those MIME Types also appear in the list of supported Types + Compares this Node to another Blank Node - Enumeration of MIME Types to use - Enumeration of supported MIME Types + Other Blank Node - - Note: No validation is done on MIME Types so it is possible to generated a malformed header using this function - - - Use this function when you wish to generate a Custom Accept Header where the URI to which you are making requests supports a set range of URIs (given in the parameter) where that range of types may exceed the range of types actually supported by the library or your response processing code. - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + - Creates a Custom HTTP Accept Header containing only the Accept Types supported by a specific parser + Compares this Node to another Graph LiteralNode - RDF Parser + Other Graph Literal Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Creates a Custom HTTP Accept Header containing only the Accept Types supported by a specific parser + Compares this Node to another Literal Node - RDF Parser + Other Literal Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Gets the Enumeration of supported MIME Types for RDF Graphs + Compares this Node to another URI Node + Other URI Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Gets the Enumeration of supported MIME Types for RDF Datasets + Compares this Node to another Variable Node + Other Variable Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Gets the Enumeration of supported MIME Types for SPARQL Results + Checks this Node for equality against another Object + Other Object + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Gets the Enumeration of supported MIME Types for RDF Graphs or SPARQL Results + Checks this Node for equality against another Virtual Node + Other Virtual Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Generates a Filename Filter that can be used with any .Net application and includes all formats that dotNetRDF is aware of + Checks this Node for equality against another Virtual Node + Other Virtual Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Generates a Filename Filter that can be used with any .Net application and includes a user dictated subset of the formats that dotNetRDF is aware of + Checks this Node for equality against another Node - Allow RDF Graph formats (e.g. Turtle) - Allow RDF Dataset formats (e.g. NQuads) - Allow SPARQL Results formats (e.g. SPARQL Results XML) - Allow SPARQL Query (i.e. .rq files) - Allow SPARQL Update (i.e. .ru files) - Allow All Files (i.e. */*) + Other Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Applies global options to a writer + Checks the Node Types and if they are equal invokes the INode based comparison - Writer + Node to compare with for equality + - + - Applies global options to a parser + Tries to check for equality using virtual node IDs - Parser + Node to test against + Whether the virtual nodes are equal + + Whether the virtual equality test was valid, if false then other means must be used to determine equality + - + - Selects an appropriate IRdfWriter based on the given MIME Types + Checks this Node for equality against another Blank Node - MIME Types + Other Blank Node - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + - Selects an appropriate IRdfWriter based on the given MIME Types + Checks this Node for equality against another Graph Literal Node - MIME Types - The Content Type header that should be sent in the Response to the Request - - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - - + Other Graph Literal Node - - - - Selects an appropriate IRdfWriter based on the HTTP Accept header form a HTTP Request - - Value of the HTTP Accept Header - The Content Type header that should be sent in the Response to the Request - A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + - Selects an appropriate IRdfWriter based on the HTTP Accept header form a HTTP Request + Checks this Node for equality against another Literal Node - Value of the HTTP Accept Header - A Writer for a Content Type the client accepts + Other Literal Node + - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + - Selects a based on the file extension + Checks this Node for equality against another URI Node - File Extension - Thrown if no writers are associated with the given file extension + Other URI Node + - - Global options pertaining to writers will be applied to the selected writer - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Selects a based on the file extension + Checks this Node for equality against another Variable Node - File Extension - Content Type of the chosen writer - Thrown if no writers are associated with the given file extension + Other Variable Node + - - Global options pertaining to writers will be applied to the selected writer - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - - + - Selects an appropriate IRdfReader based on the given MIME Types + Copies the Virtual Node into another Graph - MIME TYpes + Target Graph - + - Selects an appropriate IRdfReader based on the HTTP Content-Type header from a HTTP Response + Gets the Hash Code of the Virtual Node - Value of the HTTP Content-Type Header - + - Selects a based on the file extension + Method to be implemented in derived classes to provide comparison of VirtualId values - File Extension - + The other virtual ID value to be compared with this node's virtual ID value. + The comparison result. - + - Selects a SPARQL Parser based on the MIME types + Attempt to compare this node with another node - MIME Types - Whether to allow for plain text results - + The node to compare to + The comparison result + True if the comparison could be performed, false otherwise + This node can only be compared to if + is a from the same + as this node. - + - Selects an appropriate ISparqlResultsReader based on the HTTP Content-Type header from a HTTP Response + Gets the String representation of the Node - Value of the HTTP Content-Type Header - + - Selects an appropriate ISparqlResultsReader based on the HTTP Content-Type header from a HTTP Response + Gets the data for serialization - Value of the HTTP Content-Type Header - Whether you allow Sparql Boolean results in text/plain format (Boolean results in text/boolean are handled properly but text/plain results can be conflated with CONSTRUCT/DESCRIBE results in NTriples format) - + Serialization Information + Streaming Context + Thrown because serializing a Virtual Node would be lossy - + - Selects a based on the file extension + Gets the schema for XML serialization - File Extension - + - Selects an appropriate ISparqlResultsWriter based on the given MIME Types + Reads the data for XML deserialization - MIME Types - A Writer for a Content Type the client accepts - - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - - + XML Reader + Thrown because serializing a Virtual Node would be lossy - + - Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request + Writes the data for XML deserialization - String array of accepted Content Types - The Content Type header that should be sent in the Response to the Request - A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - - + XML Writer + Thrown because serializing a Virtual Node would be lossy - + - Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request + Abstract Base implementation of a Virtual Blank Node - Value of the HTTP Accept Header - The Content Type header that should be sent in the Response to the Request - A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - - + Node ID Type + Graph ID Type - + - Selects an appropriate ISparqlResultsWriter based on the HTTP Accept header form a HTTP Request - - Value of the HTTP Accept Header - A Writer for a Content Type the client accepts - - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - Global options pertaining to writers will be applied to the selected writer - - + Creates a new Virtual Blank Node + + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Selects a based on a file extension + Creates a new Virtual Blank Node - File Extension - + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Selects a based on a file extension + Takes post materialisation actions - File Extension - Content Type of the selected writer - - + - Selects a Store parser based on the MIME types + Gets the Internal ID of the Blank Node - MIME Types - - + - Selects an appropriate IStoreReader based on the HTTP Content-Type header from a HTTP Response + Compares this Node to another Blank Node - Value of the HTTP Content-Type Header + Other Blank Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Selects a Store parser based on the file extension + Checks this Node for equality against another Blank Node - File Extension + Other Blank Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Selects an appropriate IStoreWriter based on the given MIME Types + Checks this Node for equality against another Blank Node - MIME Types + Other Blank Node - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - For writers which support ICompressingWriter they will be instantiated with the Compression Level specified by Options.DefaultCompressionLevel - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. - + - Selects an appropriate IStoreWriter based on the given MIME Types + Compares this Node to another Blank Node - MIME Types - The Content Type header that should be sent in the Response to the Request + Other Blank Node - - This method does not take account of any quality/charset preference parameters included in the Accept Header - - - For writers which support ICompressingWriter they will be instantiated with the Compression Level specified by Options.DefaultCompressionLevel - + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + - Selects an appropriate IStoreWriter based on the HTTP Accept header form a HTTP Request + Throws an error as blank nodes cannot be cast to types - Value of the HTTP Accept Header - The Content Type header that should be sent in the Response to the Request - A Writer for a Content Type the client accepts and the Content Type that should be sent to the client - This method does not take account of any quality/charset preference parameters included in the Accept Header + - + - Selects an appropriate IStoreWriter based on the HTTP Accept header form a HTTP Request + Throws an error as blank nodes cannot be cast to types - Value of the HTTP Accept Header - A Writer for a Content Type the client accepts - This method does not take account of any quality/charset preference parameters included in the Accept Header + - + - Selects a by file extension + Throws an error as blank nodes cannot be cast to types - File Extension - + - Selects a by file extension + Throws an error as blank nodes cannot be cast to types - File Extension - Content Type of the selected writer - + - Selects the appropriate MIME Type for the given File Extension if the File Extension is a standard extension for an RDF format + Throws an error as blank nodes cannot be cast to types - File Extension - + - Gets all the MIME Types associated with a given File Extension + Throws an error as blank nodes cannot be cast to types - File Extension - + - Gets the true file extension for a filename + Throws an error as blank nodes cannot be cast to types - - - - This is an alternative to using which is designed to take into account known extensions which are used in conjunction with other extensions and mask the true extension, for example .gz - - - Consider the filename example.ttl.gz, obtaining the extension the standard way gives only .gz which is unhelpful since it doesn't actually tell us the underlying format of the data only that it is GZipped and if it is GZipped we almost certainly want to stream the data rather than read all into memory and heuristically detect the actual format. Instead we'd like to get .ttl.gz as the file extension which is much more useful and this is what this function does. - - - Important: This method does not blindly return double extensions whenever they are present (since they may simply by period characters in the filename and not double extensions at all) rather it returns double extensions only when the standard extension is an extension is known to be used with double extensions e.g. .gz that is relevan to the library - - - + - Gets the true extension for a resource + Throws an error as blank nodes cannot be cast to types - Resource - + - Selects the appropriate File Extension for the given MIME Type + Throws an error as blank nodes cannot be cast to a time span - MIME Type - + - Selects the appropriate File Extension for the given RDF Writer + Gets the URI of the datatype this valued node represents as a String - RDF Writer - - + - Selects the appropriate File Extension for the given Store Writer + Gets the numeric type of the node - Store Writer - - + - - Top Level Namespace for the dotNetRDF Library which embodies a simple but powerful API for working with RDF and SPARQL. - - - Specific Namespaces within the Hierarchy provide Parsing and Serialization functionality along with a host of related classes to support these functions. - - - Support for querying RDF is provided in the Query namespace which includes SPARQL Query, limited reasoning support in the Query.Inference namespace and a Pellet Server client in the Query.Inference.Pellet namespace. - - - Support for updating RDF based on the SPARQL 1.1 Update and Graph Store HTTP Protocol for RDF Graph Management is provided in the Update and Update.Protocol namespaces. - -

Third Party Storage

- For communicating with arbitrary Triple Stores we have a dedicated Storage namespace. As of this release we support the following Triple Stores: -
    -
  • AllegroGraph
  • -
  • Dydra
  • -
  • 4store
  • -
  • Fuseki
  • -
  • Any Sesame HTTP Protocol compliant store e.g. Sesame, OWLIM
  • -
  • Any SPARQL Graph Store HTTP Protocol for RDF Graph Management compliant stores
  • -
  • Any SPARQL store that exposes a Query and/or Update endpoint
  • -
  • Stardog
  • -
  • Virtuoso
  • -
-
-

ASP.Net Integration

- - For those building ASP.Net based websites the Web namespace is dedicated to providing classes for integrating RDF into ASP.Net applications. - -

Ontology API

- - There is also an Ontology namespace which provides a more resource and ontology centric API for working with RDF than the standard Graph and Triple centric APIs - -

Configuration API

- - We provide a Configuration API which provides for encoding configuration in RDF Graphs. This configuration system is used extensively as part of the ASP.Net support as it allows for much more expressive and flexible configurations than were previously possible. See the documentation on the main website for many detailed examples. This is primarily intended as an easy way to help deploy configurations for ASP.Net applications though you can make use of the API to describe the configuration of various types of objects in other applications, for example we use it in our Store Manager utility to store connection details. - -

Notes

- - dotNetRDF 1.0.0 is now considered a stable release, this means it should be stable for production scenarios. However it is open source software and despite our best efforts there may still be bugs. Please help us improve this library by emailing us when you find a bug, you can use the Bug Reports list to report bugs, the Support list to ask questions and the Developer list to request new features or discuss development plans (all these are SourceForge mailing lists which require subscription). - -

Alternative Builds

-
Mono Build
- - There is no separate build for Mono since dotNetRDF can run directly under Mono. Note that there may still be some features of .Net we use that Mono does not fully support, see the Mono Issues page for more details. We recommend Mono 2.10 or higher though the library should run on recent 2.6/2.8 releases. - -
Client Profile Build
- - The Client Profile build omits the reference to System.Web so lacks the ASP.Net integration and some other features that rely on this dependency but is otherwise a fairly complete build of the library. - -
Silverlight/Windows Phone 7 Build
- - The Silverlight and Windows Phone 7 builds of dotNetRDF (dotNetRDF.Silverlight.dll and dotNetRDF.WindowsPhone.dll) are experimental builds that receive limited internal testing so please be aware that these are not as stable as the standard .Net builds. These build runs on Silverlight 4/Windows Phone 7 and omits the following features since they can't be supported on these platforms: - -
    -
  • Most of the Web namespaces
  • -
  • Does not include parts of the Storage namespace that would require synchronous HTTP
  • -
  • No String normalization support
  • -
  • No UriLoader caching support
  • -
  • No multi-threaded support where ReaderWriterLockSlim is used
  • -
  • Various writers and parsers use streaming rather than DOM based XML parsing
  • -
  • No support for XSL in TriX files
  • -
  • Synchronous HTTP Request Features - For most of these there are asynchronous callback driven versions of these features available from the 0.5.0 release onwards
  • -
+ Abstract Base implementation of a Virtual Graph Literal Node
+ Node ID Type + Graph ID Type
- + - Delegate Type for the Events of the Namespace Mapper + Creates a new Virtual Graph Literal Node - Namespace Prefix - Namespace Uri + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Class for representing Mappings between Prefixes and Namespace URIs + Creates a new Virtual Graph Literal Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Constant Uri for the RDF Namespace + Takes post materialisation actions - + - Constant Uri for the RDF Scheme Namespace + Gets the subgraph this Graph Literal represents - + - Constant Uri for the XML Scheme Namespace + Compares this Node to another Graph Literal Node + Other Graph Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Constant Uri for the OWL Namespace + Checks this Node for equality against another Graph Literal Node + + Other Graph Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + + + + + Checks this Node for equality against another Graph Literal Node + Other Graph Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Mapping of Prefixes to URIs + Compares this Node to another Graph Literal Node + Other Graph Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Mapping of URIs to Prefixes + Throws an error as graph literal nodes cannot be cast to types + - + - Constructs a new Namespace Map + Throws an error as graph literal nodes cannot be cast to types - The Prefixes rdf, rdfs and xsd are automatically defined + - + - Constructs a new Namespace Map which is optionally empty + Throws an error as graph literal nodes cannot be cast to types - Whether the Namespace Map should be empty, if set to false the Prefixes rdf, rdfs and xsd are automatically defined + - + - Constructs a new Namespace Map which is based on an existing map + Throws an error as graph literal nodes cannot be cast to types - + - + - Returns the Prefix associated with the given Namespace URI + Throws an error as graph literal nodes cannot be cast to types - The Namespace URI to lookup the Prefix for - String prefix for the Namespace + - + - Returns the Namespace URI associated with the given Prefix + Throws an error as graph literal nodes cannot be cast to types - The Prefix to lookup the Namespace URI for - URI for the Namespace + - + - Adds a Namespace to the Namespace Map + Throws an error as graph literal nodes cannot be cast to types - Namespace Prefix - Namespace Uri + - + - Removes a Namespace from the NamespaceMapper + Throws an error as graph literal nodes cannot be cast to types - Namespace Prefix of the Namespace to remove + - + - Method which checks whether a given Namespace Prefix is defined + Throws an error as graph literals cannot be cast to a time span - Prefix to test - + - Method which checks whether a given Namespace is defined + Gets the URI of the datatype this valued node represents as a String - Namespace to test - + - Clears the Namespace Map + Gets the numeric type of the node - + - Gets a Enumerator of all the Prefixes + Abstract Base implementation of a Virtual Literal Node + Node ID Type + Graph ID Type - + - A Function which attempts to reduce a Uri to a QName + Creates a new Virtual Literal Node - The Uri to attempt to reduce - The value to output the QName to if possible - - This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Imports the contents of another Namespace Map into this Namespace Map + Creates a new Virtual Literal Node - Namespace Map to import - - Prefixes in the imported Map which are already defined in this Map are ignored, this may change in future releases. - + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Event which is raised when a Namespace is Added + Takes post materialisation actions - + - Event which is raised when a Namespace is Modified + Gets the lexical value of the Literal - + - Event which is raised when a Namespace is Removed + Gets the language specifier (if any) of the Literal - + - Internal Helper for the NamespaceAdded Event which raises it only when a Handler is registered + Gets the Datatype (if any) of the Literal - Namespace Prefix - Namespace Uri - + - Internal Helper for the NamespaceModified Event which raises it only when a Handler is registered + Compares this Node to another Literal Node - Namespace Prefix - Namespace Uri + Other Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Internal Helper for the NamespaceRemoved Event which raises it only when a Handler is registered + Checks this Node for equality against another Literal Node - Namespace Prefix - Namespace Uri + Other Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Disposes of a Namespace Map + Checks this Node for equality against another Literal Node + Other Literal Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Class for representing Mappings from URIs to QNames + Compares this Node to another Literal Node + Other Literal Node + - Used primarily in outputting RDF syntax + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. - + - Mapping of URIs to QNames + Ensures that a strong value has been determined for this node - + - Next available Temporary Namespace ID + Gets the value as a string + + + Forces a materialisation of the value + - + - Creates a new QName Output Mapper using the given Namespace Map + Gets the value as an integer - Namespace Map + + + Forces a materialisation of the value + - + - Creates a new QName Output Mapper which has an empty Namespace Map + Gets the value as a decimal + + + Forces a materialisation of the value + - + - A Function which attempts to reduce a Uri to a QName + Gets the value as a float - The Uri to attempt to reduce - The value to output the QName to if possible - This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. + + Forces a materialisation of the value + - + - A Function which attempts to reduce a Uri to a QName and issues a Temporary Namespace if required + Gets the value as a double - The Uri to attempt to reduce - The value to output the QName to if possible - The Temporary Namespace issued (if any) - - This function will always returns a possible QName for the URI if the format of the URI permits it. It doesn't guarentee that the QName will be valid for the syntax it is being written to - it is up to implementers of writers to validate the QNames returned. - - - Where necessary a Temporary Namespace will be issued and the tempNamespace parameter will be set to the prefix of the new temporary namespace - + Forces a materialisation of the value - + - Adds a QName mapping to the cache + Gets the value as a boolean - URI - Mapping + + + Forces a materialisation of the value + - + - Gets the next available Temporary Namespace ID + Gets the value as a date time + + Forces a materialisation of the value + - + - Thread Safe version of the QNameOutputMapper + Gets the value as a date time + + + + Forces a materialisation of the value + + + + + Gets the value as a time span + + + Forces a materialisation of the value + - + - Creates a new Thread Safe QName Output Mapper + Gets the URI of the datatype this valued node represents as a String - Namespace Mapper - + - Adds a QName Mapping to the Cache in a Thread Safe way + Gets the numeric type of the node - Key - Value - + - Adds a Namespace to the QName Output Mapper + Abstract Base implementation of a Virtual URI Node - Prefix - Namespace URI + Node ID Type + Graph ID Type - + - Represents a mapping from a URI to a QName + Creates a new Virtual URI Node + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Creates a new QName Mapping + Creates a new Virtual URI Node - URI + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - URI this is a mapping for + Takes post materialisation actions - + - QName this URI maps to + Gets the URI - + - Gets the String representation of the URI + Compares this Node to another URI Node + Other URI Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Checks whether this is equal to another Object + Checks this Node for equality against another URI Node - Object to test against + Other URI Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Class for representing Triple Stores which are collections of RDF Graphs + Checks this Node for equality against another URI Node + Other URI Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - List of Reasoners that are applied to Graphs as they are added to the Triple Store + Compares this Node to another URI Node + Other URI Node + + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Controls whether inferred information is stored in a special Graph or in the original Graph + Gets the string value of the node - + - Graph Uri for the special Graph used to store inferred information + Throws an error as URI nodes cannot be cast to numerics + - + - Creates a new Triple Store using a new empty Graph collection + Throws an error as URI nodes cannot be cast to numerics + - + - Creates a new Triple Store using the given Graph collection which may be non-empty + Throws an error as URI nodes cannot be cast to numerics - Graph Collection + - + - Returns whether the Store contains the given Triple within the Query Triples + Throws an error as URI nodes cannot be cast to numerics - Triple to search for - + - Selects all Triples which have a Uri Node with the given Uri from all the Query Triples + Throws an error as URI nodes cannot be cast to a boolean - Uri - + - Selects all Triples which contain the given Node from all Graphs in the Triple Store + Throws an error as URI nodes cannot be cast to a date time - Node - + - Selects all Triples where the Object is a Uri Node with the given Uri from all Graphs in the Triple Store + Throws an error as URI nodes cannot be cast to a date time - Uri - + - Selects all Triples where the Object is a given Node from all Graphs in the Triple Store + Throws an error as URIs cannot be cast to a time span - Node - + - Selects all Triples where the Predicate is a given Node from all Graphs in the Triple Store + Gets the URI of the datatype this valued node represents as a String - Node - - + - Selects all Triples where the Predicate is a Uri Node with the given Uri from all Graphs in the Triple Store + Gets the numeric type of the expression - Uri - - + - Selects all Triples where the Subject is a given Node from all Graphs in the Triple Store + Abstract Base implementation of a Virtual Variable Node - Node - + Node ID Type + Graph ID Type - + - Selects all Triples where the Subject is a Uri Node with the given Uri from all Graphs in the Triple Store + Creates a new Virtual Variable Node - Uri - + Graph the Node belongs to + Virtual ID + Virtual RDF Provider - + - Selects all the Triples with the given Subject-Predicate pair from all the Query Triples + Creates a new Virtual Variable Node - Subject - Predicate - + Graph the Node belongs to + Virtual ID + Virtual RDF Provider + Materialised Value - + - Selects all the Triples with the given Predicate-Object pair from all the Query Triples + Takes post materialisation actions - Predicate - Object - - + - Selects all the Triples with the given Subject-Object pair from all the Query Triples + Gets the Variable Name - Subject - Object - - + - Selects all Triples which have a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Compares this Node to another Variable Node - List of the Graph URIs of Graphs you want to select over - Uri + Other Variable Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Selects all Triples which contain the given Node from a Subset of Graphs in the Triple Store + Checks this Node for equality against another Variable Node - List of the Graph URIs of Graphs you want to select over - Node + Other Variable Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Selects all Triples where the Object is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Checks this Node for equality against another Variable Node - List of the Graph URIs of Graphs you want to select over - Uri + Other Variable Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform the equality check. + - + - Selects all Triples where the Object is a given Node from a Subset of Graphs in the Triple Store + Compares this Node to another Variable Node - List of the Graph URIs of Graphs you want to select over - Node + Other Variable Node + + Unless Virtual Equality (equality based on the Virtual RDF Provider and Virtual ID) can be determined or the Nodes are of different types then the Nodes value will have to be materialised in order to perform comparison. + - + - Selects all Triples where the Predicate is a given Node from a Subset of Graphs in the Triple Store + Throws an error as variables nodes cannot be cast to types - List of the Graph URIs of Graphs you want to select over - Node - + - Selects all Triples where the Predicate is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Throws an error as variables nodes cannot be cast to types - List of the Graph URIs of Graphs you want to select over - Uri - + - Selects all Triples where the Subject is a given Node from a Subset of Graphs in the Triple Store + Throws an error as variables nodes cannot be cast to types - List of the Graph URIs of Graphs you want to select over - Node - + - Selects all Triples where the Subject is a Uri Node with the given Uri from a Subset of Graphs in the Triple Store + Throws an error as variables nodes cannot be cast to types - List of the Graph URIs of Graphs you want to select over - Uri - + - Executes a SPARQL Query on the Triple Store + Throws an error as variables nodes cannot be cast to types - SPARQL Query as unparsed String - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Executes a SPARQL Query on the Triple Store + Throws an error as variables nodes cannot be cast to types - SPARQL Query as a SparqlQuery instance - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - - + - Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided + Throws an error as variables nodes cannot be cast to types - RDF Handler - Results Handler - SPARQL Query as unparsed String - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - + - + - Executes a SPARQL Query on the Triple Store processing the results with an appropriate handler from those provided + Throws an error as variables nodes cannot be cast to types - RDF Handler - Results Handler - SPARQL Query as unparsed String - - - This method of making queries often leads to no results because of misconceptions about what data is being queries. dotNetRDF's SPARQL engine only queries the default unnamed graph of the triple store (the graph added with a null URI) by default unless your query uses FROM clauses to change the default graph or you use GRAPH clauses to access named graphs in the store. Therefore a common mistake is to add a single graph to the store and then query the store which typically results in no results because usually the added graph is named and so is not queried. - - - We recommend using a instead for making queries over in-memory data since using our standard implementation () affords you much more explicit control over which graphs are queried. - - + - + - Applies Inference to the given Graph + Throws an error as variables cannot be cast to a time span - Graph to apply inference to + - + - Adds an Inference Engine to the Triple Store + Gets the URI of the datatype this valued node represents as a String - Reasoner to add - + - Removes an Inference Engine from the Triple Store + Gets the numeric type of the node - Reasoner to remove - + - Clears all Inference Engines from the Triple Store + A Cache that maps from Virtual IDs to Materialised Values - + - Disposes of a Triple Store + Creates a new Virtual ID cache + Function that maps Node IDs to dictionary keys - + - Executes an Update against the Triple Store + Gets/Sets the materialised value for a particular Virtual ID - SPARQL Update Command(s) - - As per the SPARQL 1.1 Update specification the command string may be a sequence of commands - + Virtual ID + - + - Executes a single Update Command against the Triple Store + A Cache that maps from Virtual IDs to Materialised Values where the IDs map directly to dictionary keys - SPARQL Update Command + Node ID Type - + - Executes a set of Update Commands against the Triple Store + Creates a new Simple Virtual Node Cache - SPARQL Update Command Set - + - Event Handler for the Graph Added event of the underlying Graph Collection which calls the normal event processing of the parent class BaseTripleStore and then applies Inference to the newly added Graph + Class of exceptions that may occur when performing SPARQL Updates - Sender - Graph Event Arguments - + - A thread safe variant of , simply a instance with a decorator around it's underlying + Creates a new RDF Update Exception + Error Message - + - Creates a new Thread Safe triple store + Createa a new RDF Update Exception + Error Message + Exception that caused this exception to be thrown - + - Creates a new Thread safe triple store using the given Thread safe graph collection + Class for representing Timeout errors that occur while updating RDF using SPARQL - Collection - + - Creates a new Thread safe triple store using a thread safe decorator around the given graph collection + Creates a new SPARQL Update Timeout Exception - Collection + Error Message - + - Tools class which contains a number of utility methods which are declared as static methods + Class for representing Permissions errors with SPARQL Updates - + - Checks whether a Uri is valid as a Base Uri for resolving Relative URIs against + Creates a new Permission Exception - Base Uri to test - True if the Base Uri can be used to resolve Relative URIs against - A Base Uri is valid if it is an absolute Uri and not using the mailto: scheme + Error Message - + - Checks whether a URI Reference appears malformed and if so fixes it + Creates a new Permission Exception - URI Reference - + Error Message + Exception that caused this exception to be thrown - + - Returns a URI with any Fragment ID removed from it + Class for representing malformed SPARQL Updates - URI - + + This is distinct from a RdfParseException as it is possible for an update to be syntactically valid but semantically malformed + - + - Generic Helper Function which Resolves Uri References against a Base Uri + Creates a new Malformed Update Exception - Uri Reference to resolve - Base Uri to resolve against - Resolved Uri as a String - RDF Parse Exception if the Uri cannot be resolved for a know reason - Uri Format Exception if one/both of the URIs is malformed + Error Message - + - Generic Helper Function which Resolves Uri References against a Base Uri + Creates a new Malformed Update Exception - Uri Reference to resolve - Base Uri to resolve against - Resolved Uri as a String - Uri Format Exception if one/both of the URIs is malformed + Error Message + Exception that caused this exception to be thrown - + - Resolves a QName into a Uri using the Namespace Mapper and Base Uri provided + Class of exceptions that may occur when using the SPARQL Graph Store HTTP Protocol for Graph Management - QName to resolve - Namespace Map to resolve against - Base Uri to resolve against - - + - Resolves a QName into a Uri using the Namespace Mapper and Base Uri provided + Creates a new SPARQL Graph Store HTTP Protocol Exception - QName to resolve - Namespace Map to resolve against - Base Uri to resolve against - Whether when the default prefix is used but not defined it can fallback to Base URI - + Error Message - + - Resolves a QName/Uri into a Uri using the Namespace Mapper and Base Uri provided + Creates a new SPARQL Graph Store HTTP Protocol Exception - QName/Uri to resolve - Namespace Map to resolve against - Base Uri to resolve against - + Error Message + Exception that caused this Exception - + - Copies a Node so it can be used in another Graph since by default Triples cannot contain Nodes from more than one Graph + Exception that occurs when a Protocol Processor cannot resolve the URI for the Graph to be acted upon - Node to Copy - Graph to Copy into - Indicates whether the Copy should preserve the Graph Uri of the Node being copied - - + - Copies a Node so it can be used in another Graph since by default Triples cannot contain Nodes from more than one Graph + Creates a new Protocol URI Resolution Exception - Node to Copy - Graph to Copy into - - - - Warning: Copying Blank Nodes may lead to unforseen circumstances since no remapping of IDs between Graphs is done - - - + - Copies a Node using another Node Factory + Creates a new Protocol URI Resolution Exception - Node to copy - Factory to copy into - - - - Warning: Copying Blank Nodes may lead to unforseen circumstances since no remapping of IDs between Factories is done - - + Error Message - + - Copies a Triple from one Graph to another + Exception that occurs when a Protocol Processor is provided with a invalid URI for the Graph to be acted upon - Triple to copy - Graph to copy to - - + - Copies a Triple from one Graph to another + Creates a new Protocol Invalid URI Exception - Triple to copy - Graph to copy to - Indicates whether the Copy should preserve the Graph Uri of the Nodes being copied - - + - Does a quick and simple combination of the Hash Codes of two Objects - - First Object - Second Object - + + Namespaces containing classes which implement the SPARQL Graph Store HTTP Protocol for RDF Graph Management + + - + - Prints Debugging Output to the Console Standard Out for a HTTP Web Request + Abstract Base class for SPARQL Graph Store HTTP Protocol for Graph Management implementations - HTTP Web Request - Only available in Debug builds - + - Prints Debugging Output to the Console Standard Out for a HTTP Web Response + This is the Pattern that is used to check whether ?default is present in the querystring. This is needed since IIS does not recognise ?default as being a valid querystring key unless it ends in a = which the specification does not mandate so cannot be assumed - HTTP Web Response - Only available in Debug builds - + - Abstract Base Class for URI Nodes + Processes a GET operation + HTTP Context - + - Internal Only Constructor for URI Nodes + Processes a POST operation - Graph this Node is in - URI + HTTP Context - + - Internal Only Constructor for URI Nodes + Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph - Graph this Node is in - QName for the Node + HTTP Context - This Constructor tries to resolve the QName using the NamespaceMapper and Base Uri of the Graph it is in. Exceptions may occur if we cannot resolve the QName correctly. + + This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + - + - Internal Only Constructor for URI Nodes + Processes a PUT operation - URI + HTTP Context - + - Deserialization Only Constructor + Processes a DELETE operation + HTTP Context - + - Deserialization Constructor + Processes a HEAD operation - Serialization Information - Streaming Context + HTTP Context - + - Gets the Uri for this Node + Processes a PATCH operation + HTTP Context - + - Implementation of Equality for Uri Nodes + Gets the Graph URI that the request should affect - Object to compare with + HTTP Context + + + + + Gets the Graph URI that the request should affect + + HTTP Context + Graph parsed from the request body - URI Nodes are considered equal if the string form of their URIs match using Ordinal string comparison + The Graph parameter may be null in which case the other overload of this method will be invoked - + - Implementation of Equality for Uri Nodes + Generates a new Graph URI that should be used to create a new Graph in the Store in conjunction with the ProcessPostCreate() operation - Object to compare with + HTTP Context + Graph parsed from the request body - URI Nodes are considered equal if the string form of their URIs match using Ordinal string comparison + Default behaviour is to mint a URI based on a hash of the Request IP and Date Time. Implementations can override this method to control URI creation as they desire - + - Determines whether this Node is equal to a Blank Node (should always be false) + Gets the Graph which can be parsed from the request body - Blank Node + HTTP Context + + In the event that there is no request body a null will be returned + - + - Determines whether this Node is equal to a Graph Literal Node (should always be false) + Sends the given Graph to the Client via the HTTP Response - Graph Literal Node - + HTTP Context + Graph to send - + - Determines whether this Node is equal to a Literal Node (should always be false) + Retrieves the Graph with the given URI - Literal Node + Graph URI + + + Helper method intended for use by the ProcessGet() and ProcessHead() methods + + - + - Determines whether this Node is equal to a URI Node + Determines whether a Graph with the given URI exists - URI Node + Graph URI - + - Determines whether this Node is equal to a Variable Node (should always be false) + A processor for the SPARQL Graph Store HTTP Protocol which operates by performing the desired operations on some arbitrary underlying Store for which an IStorageProvider is available - Variable Node - - + - Determines whether this Node is equal to a URI Node + Creates a new Generic Protocol Processor - URI Node - + Generic IO Manager - + - Gets a String representation of a Uri as a plain text Uri + Processes a GET operation - + HTTP Context + + Implemented by making a call to LoadGraph() on the underlying IStorageProvider + - + - Implementation of Compare To for Uri Nodes + Processes a POST operation - Node to Compare To - + HTTP Context - Uri Nodes are greater than Blank Nodes and Nulls, they are less than Literal Nodes and Graph Literal Nodes. -

- Uri Nodes are ordered based upon lexical ordering of the string value of their URIs + + Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. + + + Otherwise this is implemented using UpdateGraph() if updates are supported, if not then the Graph has to be loaded, the POSTed data merged into it and then the Graph is saved again. +
- + - Returns an Integer indicating the Ordering of this Node compared to another Node + Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph - Node to test against - + HTTP Context + + + This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + + - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Processes a PUT operation - Node to test against - + HTTP Context + + + Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. + + + Implemented by calling SaveGraph() on the underlying manager + + - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Processes a DELETE operation - Node to test against - + HTTP Context + + + Warning: If the underlying IStorageProvider is read-only then this operation returns a 403 Forbidden. + + + The delete operation does not explicitly remove the Graph but simply replaces it with an empty Graph + + - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Processes a HEAD operation - Node to test against - + HTTP Context - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Processes a PATCH operation - Node to test against - + HTTP Context - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Retrieves the Graph with the given URI - Node to test against + Graph URI - + - Gets the data for serialization + Determines whether a Graph with the given URI exists - Serialization Information - Streaming Context + Graph URI + - + - Reads the data for XML deserialization + Interface for SPARQL Graph Store HTTP Protocol for Graph Management processors - XML Reader - + - Writes the data for XML serialization + Processes a GET operation which should retrieve a Graph from the Store and return it - XML Writer + HTTP Context - + - Gets the value of the node as a string + Processes a POST operation which should add triples to a Graph in the Store - + HTTP Context - + - Throws an error as URIs cannot be cast to numerics + Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph - + HTTP Context + + + This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + + - + - Throws an error as URIs cannot be cast to numerics + Processes a PUT operation which should save a Graph to the Store completely replacing any existing Graph with the same URI - + HTTP Context - + - Throws an error as URIs cannot be cast to numerics + Processes a DELETE operation which delete a Graph from the Store - + HTTP Context - + - Throws an error as URIs cannot be cast to numerics + Processes a HEAD operation which gets information about a Graph in the Store - + HTTP Context - + - Throws an error as URIs cannot be cast to a boolean + Processes a PATCH operation which may choose - + - + - Throws an error as URIs cannot be cast to a date time + A processor for the SPARQL Graph Store HTTP Protocol which operates by using the libraries in-memory Leviathan SPARQL engine and converting protocol actions to SPARQL Query/Update commands as appropriate - - + - Throws an error as URIs cannot be cast to a date time + Creates a new Leviathan Protocol Processor - + Triple Store - + - Throws an error as URIs cannot be cast to a time span + Creates a new Leviathan Protocol Processor - + SPARQL Dataset - + - Gets the URI of the datatype this valued node represents as a String + A processor for the SPARQL Graph Store HTTP Protocol which operates by translating the requests into SPARQL Query/Update commands as specified by the SPARQL Graph Store HTTP Protocol specification and passing the generated commands to a ISparqlUpdateProcessor which will handle the actual application of the updates + + The conversion from HTTP operation to SPARQL Query/Update is as defined in the SPARQL 1.1 Graph Store HTTP Protocol specification + - + - Gets the numeric type of the expression + Creates a new Protocol to Update Processor + Query Processor + Update Processor - + - Class for representing URI Nodes + Processes a GET operation + HTTP Context - + - Internal Only Constructor for URI Nodes + Processes a POST operation - Graph this Node is in - URI for the Node + HTTP Context - + - Internal Only Constructor for URI Nodes + Processes a POST operation which adds triples to a new Graph in the Store and returns the URI of the newly created Graph - Graph this Node is in - QName for the Node + HTTP Context - This Constructor tries to resolve the QName using the NamespaceMapper and Base Uri of the Graph it is in. Exceptions may occur if we cannot resolve the QName correctly. + + This operation allows clients to POST data to an endpoint and have it create a Graph and assign a URI for them. + - + - Deserilization Only Constructor + Processes a PUT operation + HTTP Context - + - Deserialization Constructor + Processes a DELETE operation - Serialization Information - Streaming Context + HTTP Context - + - Implementation of Compare To for URI Nodes + Processes a HEAD operation - URI Node to Compare To - - - Simply invokes the more general implementation of this method - + HTTP Context - + - Determines whether this Node is equal to a URI Node + Processes a PATCH operation - URI Node + HTTP Context + + + + Retrieves the Graph with the given URI + + Graph URI - + - Class for representing RDF Triples in memory + Determines whether a Graph with the given URI exists + Graph URI + - + - Constructs a Triple from Nodes that belong to the same Graph/Node Factory + + Namespace for performing updates on Triple Stores using SPARQL Update + + + This is a new part of the API introduced in the 0.3.0 release and adds support for using SPARQL to update Triple Stores. SPARQL Update is part of the new SPARQL 1.1 standard and provides syntax for inserting, modifying and deleting data as well as managing graphs in a store. + - Subject of the Triple - Predicate of the Triple - Object of the Triple - Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory - Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Constructs a Triple from Nodes that belong to the same Graph/Node Factory and associates this Triple with the given Graph (doesn't assert the Triple) + + Namespace containing classes which model SPARQL Update Commands. These can be used both to represent SPARQL Updates and to execute them over in-memory stores. + - Subject - Predicate - Object - Graph - Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory - Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Constructs a Triple from Nodes that belong to the same Graph/Node Factory with some Context + Represents the SPARQL Update ADD Command - Subject of the Triple - Predicate of the Triple - Object of the Triple - Context Information for the Triple - Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory - Thrown if the Nodes aren't all from the same Graph/Node Factory - + - Creates a Triple and associates it with the given Graph URI permanently (though not with a specific Graph as such) + Creates a Command which merges the data from the Source Graph into the Destination Graph - Subject of the Triple - Predicate of the Triple - Object of the Triple - Graph URI - Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory - Thrown if the Nodes aren't all from the same Graph/Node Factory + Source Graph URI + Destination Graph URI + Whether errors should be suppressed - + - Constructs a Triple from Nodes that belong to the same Graph/Node Factory with some Context + Creates a Command which merges the data from the Source Graph into the Destination Graph - Subject of the Triple - Predicate of the Triple - Object of the Triple - Context Information for the Triple - Graph URI - Will throw an RdfException if the Nodes don't belong to the same Graph/Node Factory - Thrown if the Nodes aren't all from the same Graph/Node Factory + Source Graph URI + Destination Graph URI - + - Gets the Subject of the Triple + Evaluates the Command in the given Context + Evaluation Context - + - Gets the Predicate of the Triple + Processes the Command using the given Update Processor + SPARQL Update Processor - + - Gets the Object of the Triple + Abstract Base class for classes that represent SPARQL Update INSERT, DELETE and INSERT/DELETE commands - + - Gets the Graph this Triple was created for + URI from the WITH statement - This is not necessarily the actual Graph this Triple is asserted in since this property is set from the Subject of the Triple when it is created and it is possible to create a Triple without asserting it into an actual Graph or to then assert it into a different Graph. - + - Gets the Uri of the Graph this Triple was created for + URIs for the USING clauses - This is not necessarily the actual Graph Uri of the Graph this Triple is asserted in since this property is set from the Subject of the Triple when it is created and it is possible to create a Triple without asserting it into an actual Graph or to then assert it into a different Graph. - + - Gets the Context Information for this Triple + URIS for the USING NAMED clauses - - Context may be null where no Context for the Triple has been defined - - + - Gets an enumeration of the Nodes in the Triple + Creates a new Base Modification Command - - Returned as subject, predicate, object - + Update Command Type - + - Gets whether the Triple is a Ground Triple + Gets the URIs specified in USING clauses - - - A Ground Triple is any Triple considered to state a single fixed fact. In practise this means that the Triple does not contain any Blank Nodes. - - - + - Checks whether the Triple involves a given Node + Gets the URIs specified in USING NAMED clauses - The Node to test upon - True if the Triple contains the given Node - + - Checks whether the Triple involves a given Uri + Gets the URI of the Graph specified in the WITH clause - The Uri to test upon - True if the Triple has a UriNode with the given Uri - + - Indicates whether the Triple has the given Node as the Subject + Adds a new USING URI - Node to test upon - + URI - + - Indicates whether the Triple has the given Node as the Predicate + Adds a new USING NAMED URI - Node to test upon - + URI - + - Indicates whether the Triple has the given Node as the Object + Determines whether a Graph Pattern is valid for use in an DELETE pattern - Node to test upon + Graph Pattern + Is this the top level pattern? - + - Implementation of Equality for Triples + Abstract Base Class for SPARQL Update Commands which move data between Graphs - Object to compare with - - - Triples are considered equal on the basis of two things: -
    -
  1. The Hash Codes of the Triples are identical
  2. -
  3. The logical conjunction (AND) of the equality of the Subject, Predicate and Object is true. Each pair of Nodes must either be Equal using Node Equality or are both Blank Nodes and have identical Node IDs (i.e. are indistinguishable for equality purposes on a single Triple level)
  4. -
-
- + - Implementation of Hash Codes for Triples + Source Graph URI - - - - Returns the Hash Code of the Triple which is calculated as the Hash Code of the String formed by concatenating the Hash Codes of its constituent Nodes. This Hash Code is precomputed in the Constructor of a Triple since it will be used a lot (in Triple Equality calculation, Triple Collections etc) - - - Since Hash Codes are based on a String representation there is no guarantee of uniqueness though the same Triple will always give the same Hash Code (on a given Platform - see the MSDN Documentation for string.GetHashCode() for further details) - - - + - Gets a String representation of a Triple in the form 'Subject , Predicate , Object' + Destination Graph URI - - + - Gets a String representation of a Triple in the form 'Subject , Predicate , Object' with optional compression of URIs to QNames + Whether errors should be suppressed - Controls whether URIs will be compressed to QNames in the String representation - - + - Gets the String representation of a Triple using the given Triple Formatter + Creates a new Transfer Command - Formatter - + Command Type + Source Graph URI + Destination Graph URI + Whether errors should be suppressed - + - Implementation of CompareTo for Triples which allows Triples to be sorted + Creates a new Transfer Command - Triple to compare to - - Triples are Ordered by Subjects, Predicates and then Objects. Triples are only partially orderable since the CompareTo methods on Nodes only define a partial ordering over Nodes + Command Type + Source Graph URI + Destination Graph URI - + - Gets the data for serialization + URI of the Source Graph - Serilization Information - Streaming Context - + - Gets the schema for XML serialization + URI of the Destination Graph - - + - Reads the data for XML deserialization + Whether errors during evaluation should be suppressed - XML Reader - + - Writes the data for XML serialization + Gets whether the Command affects a Single Graph - XML Writer - + - Basic Triple Collection which is not indexed + Gets whether the Command affects a given Graph + Graph URI + - + - Underlying Storage of the Triple Collection + Gets the String representation of the Command + - + - Creates a new Triple Collection + Mode by which to clear Graphs - + - Determines whether a given Triple is in the Triple Collection + Clears a specific Graph of Triples - The Triple to test - True if the Triple already exists in the Triple Collection - + - Adds a Triple to the Collection + Clears all Named Graphs of Triples - Triple to add - + - Deletes a Triple from the Colleciton + Clears the Default Graph of Triples - Triple to remove - Deleting something that doesn't exist has no effect and gives no error - + - Gets the Number of Triples in the Triple Collection + Clears all Graphs of Triples - + - Gets the given Triple + Represents the SPARQL Update CLEAR command - Triple to retrieve - - Thrown if the given Triple does not exist in the Triple Collection - + - Gets all the Nodes which are Subjects of Triples in the Triple Collection + Creates a Command which clears the given Graph or Graphs depending on the Clear Mode specified + Graph URI + Clear Mode + Whether errors should be suppressed - + - Gets all the Nodes which are Predicates of Triples in the Triple Collection + Creates a Command which clears the given Graph + URI of the Graph to clear - + - Gets all the Nodes which are Objects of Triples in the Triple Collectio + Creates a Command which clears the Default Graph (if any) - + - Gets the Enumerator for the Collection + Creates a Command which performs the specified type of clear - + Clear Mode + Whether errors should be suppressed - + - Gets the Enumerator for the Collection + Creates a Command which performs the specified type of clear - + Clear Mode - + - Disposes of a Triple Collection + Gets whether this Command affects a Single Graph - + - Thread Safe decorator for triple collections + Gets whether this Command affects the given Graph - - Depending on the platform this either uses to provide MRSW concurrency or it uses to provide exclusive access concurrency, either way usage is thread safe - - This decorator provides thread safe access to any underlying triple collection + Graph URI + - + - Creates a new thread safe triple collection which wraps a new instance of the default unindexed + Gets the URI of the Graph to be cleared (or null if the default graph should be cleared) - + - Creates a new thread safe triple collection which wraps the provided triple collection + Gets whether errors should be suppressed - Triple Collection - + - Enters the write lock + Gets the Mode by which Graphs are to be cleared - + - Exists the write lock + Evaluates the Command in the given Context + Evaluation Context - + - Enters the read lock + Processes the Command using the given Update Processor + SPARQL Update Processor - + - Exists the read lock + Gets the String representation of the Command + - + - Adds a Triple to the Collection + Represents the SPARQL Update COPY Command - Triple to add - + - Determines whether a given Triple is in the Triple Collection + Creates a Command which Copies the contents of one Graph to another overwriting the destination Graph - The Triple to test - True if the Triple already exists in the Triple Collection + Source Graph URI + Destination Graph URI + Whether errors should be suppressed - + - Gets the Number of Triples in the Triple Collection + Creates a Command which Copies the contents of one Graph to another overwriting the destination Graph + Source Graph URI + Destination Graph URI - + - Gets the original instance of a specific Triple from the Triple Collection + Evaluates the Command in the given Context - Triple - + Evaluation Context - + - Deletes a Triple from the Collection + Processes the Command using the given Update Processor - Triple to remove - Deleting something that doesn't exist has no effect and gives no error + SPARQL Update Processor - + - Gets the Enumerator for the Collection + Represents the SPARQL Update CREATE command - - + - Gets all the Nodes which are Objects of Triples in the Triple Collectio + Creates a new CREATE command + URI of the Graph to create + Whether the create should be done silenty - + - Gets all the Nodes which are Predicates of Triples in the Triple Collection + Creates a new CREATE command + URI of the Graph to create - + - Gets all the Nodes which are Subjects of Triples in the Triple Collection + Gets whether the Command affects a Single Graph - + - Gets all triples with the given Object + Gets whether the Command affects a given Graph - Object + Graph URI - + - Gets all triples with the given predicate + Gets the URI of the Graph to be created - Predicate - - + - Gets all triples with the given predicate object + Gets whether the Create should be done silently - Predicate - Object - - + - Gets all the triples with the given subject + Evaluates the Command in the given Context - Subject - + Update Evaluation Context - + - Gets all the triples with the given subject and object + Processes the Command using the given Update Processor - Subject - Object - + SPARQL Update Processor - + - Gets all triples with the given subject and predicate + Gets the String representation of the Command - Subject - Predicate - + - Disposes of a Triple Collection + Represents the SPARQL Update DELETE command - + - Callback for methods that return a SparqlResultSet asynchronously + Creates a new DELETE command - SPARQL Results - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + Pattern to construct Triples to delete + Pattern to select data which is then used in evaluating the deletions pattern + URI of the affected Graph - + - Callback for methods that return a IGraph asynchronously + Creates a new DELETE command which operates on the Default Graph - Graph - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + Pattern to construct Triples to delete + Pattern to select data which is then used in evaluating the deletions pattern - + - Callback for methods that return a ITripleStore asynchronously + Creates a new DELETE command - Triple Store - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + Pattern to construct Triples to delete + URI of the affected Graph - + - Callbacks for methods that process the resulting triples with an RDF Handler asynchronously + Createa a new DELETE command which operates on the Default Graph - RDF Handler - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + Pattern to construct Triples to delete - + - Callbacks for methods that process the results with an SPARQL Results Handler asynchronously + Gets whether the Command affects a single Graph - SPARQL Results Handler - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - - + - Callbacks for methods that may process the results with either an RDF or a SPARQL Results Handler + Gets whether the Command affects a given Graph - RDF Handler - SPARQL Results Handler - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - + Graph URI + - + - Callbacks for methods that perform SPARQL Updates + Gets the URI of the Graph the deletions are made from - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - - + - Callback for methods that return a Namespace Map + Gets the pattern used for Deletions - Namespace Map - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - - + - Callbacks for methods that return a list of nodes + Gets the pattern used for the WHERE clause - Node List - State - - In the event of an error you will be passed an instance of which will contain the error and the original state information you passed into the method that took this callback - - + - Marker that will be passed to your callback in the event that an async error occurs, provides access to the error and any state that you passed in originally + Optimises the Commands WHERE pattern - + - Creates new async error + Evaluates the Command in the given Context - Exception - State + Evaluation Context - + - Gets the error that occurred + Processes the Command using the given Update Processor + SPARQL Update Processor - + - Gets the original state that was passed in to the async call + Gets the String representation of the Command + - + - Abstract Base class for HTTP endpoints + Represents a SPARQL Update DELETE DATA command - + - Creates a new Base Endpoint + Creates a new DELETE DATA command + Pattern composed of concrete Triples to delete - + - Creates a new Base Endpoint + Determines whether a Graph Pattern is valid for use in an DELETE DATA command - Endpoint URI + Graph Pattern + Is this the top level pattern? + - + - Gets the Endpoints URI + Gets the Data Pattern containing Triples to delete - + - Gets/Sets the HTTP Mode used for requests + Gets whether the Command affects a single Graph - - - Only GET and POST are permitted - implementations may override this property if they wish to support more methods - - - + - Gets/Sets the HTTP Timeouts used specified in milliseconds + Gets whether the Command affects a given Graph - - - Defaults to 30 Seconds (i.e. the default value is 30,000) - - - It is important to understand that this timeout only applies to the HTTP request portions of any operation performed and that the timeout may apply more than once if a POST operation is used since the timeout applies separately to obtaining the request stream to POST the request and obtaining the response stream. Also the timeout does not in any way apply to subsequent work that may be carried out before the operation can return so if you need a hard timeout you should manage that yourself. - - - When set to a zero/negative value then the standard .Net timeout of 100 seconds will apply, use if you want the maximum possible timeout i.e. if you expect to launch extremely long running operations. - - - Not supported under Silverlight, Windows Phone and Portable Class Library builds - - + Graph URI + - + - Controls whether the Credentials set with the SetCredentials() method or the Credentialsare also used for a Proxy (if used) + Evaluates the Command in the given Context + Evaluation Context - + - Sets the HTTP Digest authentication credentials to be used + Processes the Command using the given Update Processor - Username - Password + SPARQL Update Processor - + - Sets the HTTP Digest authentication credentials to be used + Gets the String representation of the Command - Username - Password - Domain + - + - Gets/Sets the HTTP authentication credentials to be used + Represents a SPARQL Update DROP command - + - Clears any in-use credentials so subsequent requests will not use HTTP authentication + Creates a new DROP command + URI ofthe Graph to DROP + DROP Mode to use + Whether the DROP should be done silently - + - Sets a Proxy Server to be used + Creates a new DROP command - Proxy Address + URI of the Graph to DROP + DROP Mode to use - + - Sets a Proxy Server to be used + Creates a new DROP command - Proxy Address + URI of the Graph to DROP - + - Gets/Sets a Proxy Server to be used + Creates a new DROP command which drops the Default Graph - + - Clears any in-use credentials so subsequent requests will not use a proxy server + Creates a new DROP command which performs a specific clear mode drop operation + Clear Mode - + - Sets Credentials to be used for Proxy Server + Creates a new DROP command which performs a specific clear mode drop operation - Username - Password + Clear Mode + Whether errors should be suppressed - + - Sets Credentials to be used for Proxy Server + Gets whether the Command affects a single Graph - Username - Password - Domain - + - Gets/Sets Credentials to be used for Proxy Server + Gets whether the Command affects a given Graph + Graph URI + - + - Clears the in-use proxy credentials so subsequent requests still use the proxy server but without credentials + Gets the URI of the Graph to be dropped - + - Serializes the endpoints Credential and Proxy information + Gets whether the Drop should be done silently - Configuration Serialization Context - + - Applies generic request options (timeout, authorization and proxy server) to a request + Gets the type of DROP operation to perform - HTTP Request - + - Method which may be overridden in derived classes to add any additional custom request options/headers to the request + Evaluates the Command in the given Context - HTTP Request - - This is called at the end of so can also be used to override that methods default behaviour - + Evaluation Context - + - Abstract Class for Nodes, implements the two basic properties of the INode Interface + Processes the Command using the given Update Processor + SPARQL Update Processor - + - Reference to the Graph that the Node belongs to + Gets the String representation of the command + - + - Uri of the Graph that the Node belongs to + Represents a SPARQL Update INSERT command - + - Node Type for the Node + Creates a new INSERT command + Pattern to construct Triples to insert + Pattern to select data which is then used in evaluating the insertions + URI of the affected Graph - + - Stores the computed Hash Code for this Node + Creates a new INSERT command which operates on the Default Graph + Pattern to construct Triples to insert + Pattern to select data which is then used in evaluating the insertions - + - Base Constructor which instantiates the Graph reference, Graph Uri and Node Type of the Node + Gets whether the Command affects a single Graph - Graph this Node is in - Node Type - + - Nodes have a Type + Gets whether the Command affects a given Graph + Graph URI + - + - Nodes belong to a Graph + Gets the URI of the Graph the insertions are made to - + - Gets/Sets the Graph Uri of the Node + Gets the pattern used for insertions - + - Nodes must implement an Equals method + Gets the pattern used for the WHERE clause - Object to compare against - - + - Nodes must implement a ToString method + Optimises the Commands WHERE pattern - - - - Essential for the implementation of GetHashCode to work correctly, Nodes should generate a String representation that is 'unique' as far as that is possible. - - - Any two Nodes which match via the Equals method (based on strict RDF Specification Equality) should produce the same String representation since Hash Codes are generated by calling GetHashCode on this String - - - + - Gets the String representation of the Node formatted with the given Node formatter + Evaluates the Command in the given Context - Formatter - + Evaluation Context - + - Gets the String representation of the Node formatted with the given Node formatter + Processes the Command using the given Update Processor - Formatter - Triple Segment - + SPARQL Update Processor - + - Gets a Hash Code for a Node + Gets the String representation of the Command - - - Implemented by getting the Hash Code of the result of ToString for a Node prefixed with its Node Type, this is pre-computed for efficiency when a Node is created since Nodes are immutable. See remarks on ToString for more detail. - - - Since Hash Codes are based on a String representation there is no guarantee of uniqueness though the same Node will always give the same Hash Code (on a given Platform - see the MSDN Documentation for string.GetHashCode() for further details) - - - + - The Equality operator is defined for Nodes + Represents the SPARQL Update INSERT DATA command - First Node - Second Node - Whether the two Nodes are equal - Uses the Equals method to evaluate the result - + - The Non-Equality operator is defined for Nodes + Creates a new INSERT DATA command - First Node - Second Node - Whether the two Nodes are non-equal - Uses the Equals method to evaluate the result + Pattern containing concrete Triples to insert - + - Nodes must implement a CompareTo method to allow them to be Sorted + Determines whether a Graph Pattern is valid for use in an INSERT DATA command - Node to compare self to + Graph Pattern + Is this the top level pattern? - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - - + - Nodes must implement a CompareTo method to allow them to be Sorted + Gets the Data Pattern containing Triples to insert - Node to compare self to - - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - - + - Nodes must implement a CompareTo method to allow them to be Sorted + Gets whether the Command affects a single Graph - Node to compare self to - - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - - + - Nodes must implement a CompareTo method to allow them to be Sorted + Gets whether the Command affects a given Graph - Node to compare self to + Graph URI - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - - + - Nodes must implement a CompareTo method to allow them to be Sorted + Evaluates the Command in the given Context - Node to compare self to - - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - + Evaluation Context - + - Nodes must implement a CompareTo method to allow them to be Sorted + Processes the Command using the given Update Processor - Node to compare self to - - - Implementations should use the SPARQL Term Sort Order for ordering nodes (as opposed to value sort order). Standard implementations of Node type specific comparisons can be found in ComparisonHelper - + SPARQL Update Processor - + - Nodes must implement an Equals method so we can do type specific equality + Gets the String representation of the Command - Node to check for equality - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - - + - Nodes must implement an Equals method so we can do type specific equality + Represents the SPARQL Update LOAD command - Node to check for equality - - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - - + - Nodes must implement an Equals method so we can do type specific equality + Creates a new LOAD command - Node to check for equality - - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - + Source URI to load data from + Target URI for the Graph to store data in + Whether errors loading should be suppressed - + - Nodes must implement an Equals method so we can do type specific equality + Creates a new LOAD command - Node to check for equality - - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - + Source URI to load data from + Whether errors loading should be suppressed - + - Nodes must implement an Equals method so we can do type specific equality + Creates a new LOAD command - Node to check for equality - - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - + Source URI to load data from + Target URI for the Graph to store data in - + - Nodes must implement an Equals method so we can do type specific equality + Creates a new LOAD command which operates on the Default Graph - Node to check for equality - - - Nodes implementations are also required to implement an override of the non-generic Equals method. Standard implementations of some equality comparisons can be found in EqualityHelper - + Source URI to load data from - + - Gets the information for serialization + Gets whether the Command affects a specific Graph - Serialization Information - Streaming Context - + - Gets the schema for XML serialization + Gets whether the Command affects a given Graph + Graph URI - + - Reads the data for XML deserialization + Gets the URI that data is loaded from - XML Reader - + - Writes the data for XML serialization + Gets the URI of the Graph to load data into - XML Writer - + - Mapper class which creates Blank Node IDs and ensures that auto-assigned and user specified IDs don't collide + Gets whether errors loading the data are suppressed - + - Creates a new Blank Node Mapper + Evaluates the Command in the given Context + Evaluation Context - + - Creates a new Blank Node Mapper that uses a custom Prefix + Processes the Command using the given Update Processor - Prefix + SPARQL Update Processor - + - Gets the next available auto-assigned Blank Node ID + Gets the String representation of the Command - + - Checks that an ID can be used as a Blank Node ID remapping it to another ID if necessary + Represents the SPARQL Update INSERT/DELETE command - ID to be checked - - If the ID is not known it is added to the ID maps. If the ID is known but is user-assigned then this can be used fine. If the ID is known and was auto-assigned then it has to be remapped to a different ID. - - + - Mapper class which remaps Blank Node IDs which aren't valid as-is in a given serialization to a new ID + Creates a new INSERT/DELETE command - - This also has to take care of the fact that it's possible that these remapped IDs then collide with existing valid IDs in which case these also have to be remapped - + Pattern to construct Triples to delete + Pattern to construct Triples to insert + Pattern to select data which is then used in evaluating the insertions and deletions + URI of the affected Graph - + - Creates a new Blank Node ID mapper + Creates a new INSERT/DELETE command which operates on the Default Graph - Function which determines whether IDs are valid or not + Pattern to construct Triples to delete + Pattern to construct Triples to insert + Pattern to select data which is then used in evaluating the insertions and deletions - + - Takes a ID, validates it and returns either the ID or an appropriate remapped ID + Gets whether the Command affects a Single Graph - ID to map - - + - Internal Helper function which generates the new IDs + Gets whether the Command affects a given Graph + Graph URI - + - Records Blank Node assigments + Gets the URI of the Graph the insertions are made to - + - Creates a new Blank Node ID Assigment Record + Gets the pattern used for deletions - ID to assign - Was the ID auto-assigned - + - Assigned ID + Gets the pattern used for insertions - + - Whether the ID is auto-assigned + Gets the pattern used for the WHERE clause - + - Returns whether a given Object is equal to this Blank Node ID assignment + Optimises the Commands WHERE pattern - Object to test - - + - Class for representing Triple Stores which are collections of RDF Graphs + Evaluates the Command in the given Context - - The 'Disk Demand' Triple Store is a Triple Store which automatically retrieves Graphs from the Disk based on the URIs of Graphs that you ask it for when those URIs are file:/// URIs - + Evaluation Context - + - Creates a new Disk Demand Triple Store + Processes the Command using the given Update Processor + SPARQL Update Processor - + - Static Helper class containing standard implementations of Equality between various Node types + Gets the String representation of the Command + - + - Determines whether two URIs are equal + Represents a SPARQL Update MOVE Command - First URI - Second URI - - - Unlike the Equals method provided by the Uri class by default this takes into account Fragment IDs which are essential for checking URI equality in RDF - - + - Determines whether two URIs are equal + Creates a Command which Moves data from one Graph to another overwriting the destination Graph and deleting the source Graph - First URI Node - Second URI Node - + Source Graph URI + Destination Graph URI + Whether errors should be suppressed - + - Determines whether two Literals are equal + Creates a Command which Moves data from one Graph to another overwriting the destination Graph and deleting the source Graph - First Literal - Second Literal - + Source Graph URI + Destination Graph URI - + - Determines whether two Blank Nodes are equal + Evaluates the Command in the given Context - First Blank Node - Second Blank Node - + Evaluation Context - + - Determines whether two Graph Literals are equal + Processes the Command using the given Update Processor - First Blank Node - Second Blank Node - + SPARQL Update Processor - + - Determines whether two Variable Nodes are equal + An Update Processor that extends the Leviathan Engine to include explanations of the query portions of the Updates - First Variable Node - Second Variable Node - - + - Static Helper class containing standard implementations of Comparison between various Node types + Creates a new Explain Update Processor + Dataset - + - Compares two URIs + Creates a new Explain Update Processor - First URI - Second URI - + Dataset + Explanation Level - + - Compares two URI Nodes + Creates a new Explain Update Processor - First URI Node - Second URI Node - + Triple Store + Explanation Level - + - Compares two Literal Nodes using global default comparison options where applicable + Creates a new Explain Update Processor - First Literal Node - Second Literal Node - + Triple Store - + - Compares two Literal Nodes + Gets the Query Processor to be used - First Literal Node - Second Literal Node - Culture to use for lexical string comparisons where more natural comparisons are not possible/applicable - String Comparison options used for lexical string comparisons where more natural comparisons are not possible/applicable - + - Compares two Blank Nodes + SPARQL Update Processor which processes commands against a generic underlying store represented by an IStorageProvider implementation - First Blank Node - Second Blank Node - + + + If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used for the non-type specific calls i.e. ProcessCommand() and ProcessCommandSet(). At all other times the SPARQL Update commands will be processed by approximating their behaviour through calls to SaveGraph(), LoadGraph() and UpdateGraph() in addition to local in-memory manipulation of the data. Some commands such as INSERT and DELETE can only be processed when the manager is also a IQueryableStorage since they rely on making a query and performing actions based on the results of that query. + + + The performance of this processor is somewhat dependent on the underlying IStorageProvider. If the underlying manager supports triple level updates as indicated by the UpdateSupported property then operations can be performed quite efficiently, if this is not the case then any operation which modifies a Graph will need to load the existing Graph from the store, make the modifications locally in-memory and then save the resulting Graph back to the Store + + - + - Compares two Graph Literals + Creates a new Generic Update Processor - First Graph Literal - Second Graph Literal - + Generic IO Manager - + - Compares two Variable Nodes + Discards any outstanding changes - First Variable Node - Second Variable Node - - + - Equality comparer that compares URIs + Flushes any outstanding changes to the underlying store - + - Compares two URIs + Processes an ADD command - URI - URI - + Add Command - + - Determines whether two URIs are equal + Processes a CLEAR command - URI - URI - + Clear Command + + Implemented by replacing the Graph with an empty Graph + - + - Gets the Hash Code for a URI + Processes a COPY command - URI - + Copy Command - + - Implementation of a Graph Difference algorithm for RDF Graphs + Processes a CREATE command + Create Command - This algorithm is broadly based upon the methodology fror computing differences in RDF Graphs described in the RDFSync paper by Tummarello et al. This is an implementation purely of a difference algorithm and not the synchronisation aspects described in their paper. Main difference between their algorithm and mine is that mine does not make the input Graphs lean as it is concerned with showing the raw differences between the Graphs and does not concern itself with whether the differences may be semantically irrelevant. - - - To understand this consider the following Graphs: + Implemented by adding an empty Graph to the Store -

Graph A

- - _:autos1 rdfs:label "Rob" . - -

Graph B

- - _:autos1 rdfs:label "Rob" . - _:autos2 rdfs:label "Rob" . - - Given these Graphs computing the Graph Difference between A and B would report an Added MSG (Minimal Spanning Graph) when in fact the 2nd Graph is non-lean and could be reduced to the same as the 1st Graph + Warning: As the IStorageProvider interface does not allow checking whether a Graph exists processing CREATE commands can result in overwriting existing Graphs
- + - Calculates the Difference between the two Graphs i.e. the changes required to get from the 1st Graph to the 2nd Graph + Processes a command - First Graph - Second Graph - + Command + + + If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used. + + - + - Computes MSGs for a Graph + Processes a command set - Graph - Triples that need assigning to MSGs - MSGs list to populate + Command Set + + + If the provided manager also implements the IUpdateableStorage interface then the managers native SPARQL Update implementation will be used. + + - + - Represents the Differences between 2 Graphs + Processes a DELETE command + Delete Command - The Diff represents the Difference between the 2 Graphs at the time the Difference was calculated - if the Graphs subsequently change then the Diff must be recalculated + Note: The underlying manager must implement the IQueryableStorage interface in order for DELETE commands to be processed - + - Gets whether the Graphs were equal at the time the Diff was calculated + Processes a DELETE DATA command + DELETE Data Command - + - Gets whether the Graphs are different sizes, different sized graphs are by definition non-equal + Processes a DROP command + Drop Command - + - Provides the mapping from Blank Nodes in 1 Graph to Blank Nodes in another + Processes an INSERT command + Insert Command - In the case of Equal Graphs this will be a complete mapping, if the Graphs are different then it will be an empty/partial mapping depending on whether Blank Nodes can be mapped from one Graph to another or not + Note: The underlying manager must implement the IQueryableStorage interface in order for INSERT commands to be processed - + - Gets the Ground Triples (i.e. no Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph + Processes an INSERT DATA command + Insert Data Command - + - Gets the Ground Triples (i.e. no Blank Nodes) that must be removed from the 1st Graph to get the 2nd Graph + Processes a LOAD command + Load Command - + - Gets the MSGs (Minimal Spanning Graphs i.e. sets of Triples sharing common Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph + Processes an INSERT/DELETE command + Insert/Delete Command - + - Gets the MSGs (Minimal Spanning Graphs i.e. sets of Triples sharing common Blank Nodes) that must be added to the 1st Graph to get the 2nd Graph + Processes a MOVE command + Move Command - + - A Comparer for Graphs which compares based on number of Triples + Determines whether a Graph Pattern is valid for use in an INSERT/DELETE DATA command + + Graph Pattern + Is this the top level pattern? + + + + + Interface for SPARQL Update Processors - Used internally in computing Graph Differences but made a public Graph as it may occasionally come in useful + + A SPARQL Update Processor is a class that knows how apply SPARQL Update Commands to some data source to which the processor has access + + + The point of this interface is to allow for end users to implement custom update processors or to extend and modify the behaviour of the default Leviathan engine as required. + - + - Compares Graphs based on their number of Triples + Processes an ADD command - Graph - Graph - + Add Command - + - Interface for Namespace Maps which provide mappings between Namespace Prefixes and Namespace URIs + Processes a CLEAR command + Clear Command - + - Adds a Namespace to the Namespace Map + Processes a COPY command - Namespace Prefix - Namespace Uri + Copy Command - + - Clears the Namespace Map + Processes a CREATE command + Create Command - + - Returns the Namespace URI associated with the given Prefix + Processes a command - The Prefix to lookup the Namespace URI for - URI for the Namespace + Command - + - Returns the Prefix associated with the given Namespace URI + Processes a command set - The Namespace URI to lookup the Prefix for - String prefix for the Namespace + Command Set - + - Method which checks whether a given Namespace Prefix is defined + Processes a DELETE command - Prefix to test - + Delete Command - + - Imports the contents of another Namespace Map into this Namespace Map + Processes a DELETE DATA command - Namespace Map to import - - Prefixes in the imported Map which are already defined in this Map are ignored, this may change in future releases. - + DELETE Data Command - + - Event which is raised when a Namespace is Added + Processes a DROP command + Drop Command - + - Event which is raised when a Namespace is Modified + Processes an INSERT command + Insert Command - + - Event which is raised when a Namespace is Removed + Processes an INSERT DATA command + Insert Data Command - + - Gets a Enumeratorion of all the Prefixes + Processes a LOAD command + Load Command - + - A Function which attempts to reduce a Uri to a QName + Processes an INSERT/DELETE command - The Uri to attempt to reduce - The value to output the QName to if possible - - - This function will return a Boolean indicated whether it succeeded in reducing the Uri to a QName. If it did then the out parameter qname will contain the reduction, otherwise it will be the empty string. - + Insert/Delete Command - + - Removes a Namespace from the Namespace Map + Processes a MOVE command - Namespace Prefix of the Namespace to remove + Move Command - + - Interface for classes which can create Nodes + Causes any outstanding changes to be discarded - + - Creates a Blank Node with a new automatically generated ID + Causes any outstanding changes to be flushed to the underlying storage - - + - Creates a Blank Node with the given Node ID + Default SPARQL Update Processor provided by the library's Leviathan SPARQL Engine - Node ID - + + + The Leviathan Update Processor simply invokes the Evaluate method of the SPARQL Commands it is asked to process. Derived implementations may override the relevant virtual protected methods to substitute their own evaluation of an update for our default standards compliant implementations. + + - + - Creates a Graph Literal Node which represents the empty Subgraph + Dataset over which updates are applied - - + - Creates a Graph Literal Node which represents the given Subgraph + Creates a new Leviathan Update Processor - Subgraph - + Triple Store - + - Creates a Literal Node with the given Value and Data Type + Creates a new Leviathan Update Processor - Value of the Literal - Data Type URI of the Literal - + SPARQL Dataset - + - Creates a Literal Node with the given Value + Gets/Sets whether Updates are automatically committed - Value of the Literal - - + - Creates a Literal Node with the given Value and Language + Flushes any outstanding changes to the underlying dataset - Value of the Literal - Language Specifier for the Literal - - + - Creates a URI Node for the given URI + Discards and outstanding changes from the underlying dataset - URI - - + - Creates a Variable Node for the given Variable Name + Creates a new Evaluation Context - + Update Commands - + - Creates a new unused Blank Node ID and returns it + Creates a new Evaluation Context - + - Interface for Handlers which handle the RDF produced by parsers + Gets the Query Processor to be used + + + By default null is returned which indicates that the default query processing behaviour is used, to use a specific processor extend this class and override this method. If you do so you will have access to the dataset in use so generally you will want to use a query processor that accepts a ISparqlDataset instance + - + - Start the Handling of RDF + Processes an ADD command - May be thrown if the Handler is already in use and the implementation is not thread-safe + Add Command - + - End the Handling of RDF + Processes an ADD command - Whether parsing finished without error + Add Command + SPARQL Update Evaluation Context - + - Handles a Namespace Definition + Processes a CLEAR command - Namespace Prefix - Namespace URI - Should return true if parsing should continue or false if it should be aborted + Clear Command - + - Handles a Base URI Definition + Processes a CLEAR command - Base URI - Should return true if parsing should continue or false if it should be aborted + Clear Command + SPARQL Update Evaluation Context - + - Handles a Triple + Processes a COPY command - Triple - Should return true if parsing should continue or false if it should be aborted + Copy Command - + - Gets whether the Handler will always handle all data (i.e. won't terminate parsing early) + Processes a COPY command + Copy Command + SPARQL Update Evaluation Context - + - Interface for Handlers which wrap other Handlers + Processes a CREATE command + Create Command - + - Gets the Inner Handlers used by this Handler + Processes a CREATE command + Create Command + SPARQL Update Evaluation Context - + - Interface for Handlers which handle the SPARQL Results produced by parsers + Processes a command + Command - + - Starts the Handling of Results + Processes a command + Command + SPARQL Update Evaluation Context + + Invokes the type specific method for the command type + - + - Ends the Handling of Results + Processes a command set - Indicates whether parsing completed without error + Command Set + + Invokes ProcessCommand() on each command in turn + - + - Handles a Boolean Result + Processes a DELETE command - Result + Delete Command - + - Handles a Variable Declaration + Processes a DELETE command - Variable Name - + Delete Command + SPARQL Update Evaluation Context - + - Handles a SPARQL Result + Processes a DELETE DATA command - Result - + DELETE Data Command - + - Represents the definition of a MIME Type including mappings to relevant readers and writers + Processes a DELETE DATA command + Delete Data Command + SPARQL Update Evaluation Context - + - Creates a new MIME Type Definition + Processes a DROP command - Syntax Name for the Syntax which has this MIME Type definition - MIME Types - File Extensions + Drop Command - + - Creates a new MIME Type Definition + Processes a DROP command - Syntax Name for the Syntax which has this MIME Type definition - Format URI as defined by the W3C - MIME Types - File Extensions + Drop Command + SPARQL Update Evaluation Context - + - Creates a new MIME Type Definition + Processes an INSERT command - Syntax Name for the Syntax which has this MIME Type definition - MIME Types - File Extensions - Type to use to parse RDF (or null if not applicable) - Type to use to parse RDF Datasets (or null if not applicable) - Type to use to parse SPARQL Results (or null if not applicable) - Type to use to writer RDF (or null if not applicable) - Type to use to write RDF Datasets (or null if not applicable) - Type to use to write SPARQL Results (or null if not applicable) + Insert Command - + - Creates a new MIME Type Definition + Processes an INSERT command - Syntax Name for the Syntax which has this MIME Type definition - Format URI as defined by the W3C - MIME Types - File Extensions - Type to use to parse RDF (or null if not applicable) - Type to use to parse RDF Datasets (or null if not applicable) - Type to use to parse SPARQL Results (or null if not applicable) - Type to use to writer RDF (or null if not applicable) - Type to use to write RDF Datasets (or null if not applicable) - Type to use to write SPARQL Results (or null if not applicable) + Insert Command + SPARQL Update Evaluation Context - + - Gets the name of the Syntax to which this MIME Type Definition relates + Processes an INSERT DATA command + Insert Data Command - + - Gets the Format URI as defined by the W3C (where applicable) + Processes an INSERT DATA command + Insert Data Command + SPARQL Update Evaluation Context - + - Gets the Encoding that should be used for reading and writing this Syntax + Processes a LOAD command + Load Command - + - Gets the MIME Types defined + Processes a LOAD command + Load Command + SPARQL Update Evaluation Context - + - Checks that MIME Types are valid + Processes an INSERT/DELETE command - Type + Insert/Delete Command - + - Adds a MIME Type to this definition + Processes an INSERT/DELETE command - MIME Type + Insert/Delete Command + SPARQL Update Evaluation Context - + - Gets the Canonical MIME Type that should be used + Processes a MOVE command + Move Command - + - Determines whether the Definition supports a particular MIME type + Processes a MOVE command - MIME Type - + Move Command + SPARQL Update Evaluation Context - + - Determines whether the definition supports the MIME type specified by the selector + SPARQL Update Processor which processes updates by sending them to a remote SPARQL Update endpoint represented by a SparqlRemoteUpdateEndpoint instance - MIME Type selector - - + - Gets the File Extensions associated with this Syntax + Creates a new Remote Update Processor + Endpoint URI - + - Adds a File Extension for this Syntax + Creates a new Remote Update Processor - File Extension + Endpoint URI - + - Gets whether any file extensions are associated with this syntax + Creates a new Remote Update Processor + SPARQL Remote Update Endpoint - + - Gets/Sets the Canonical File Extension for this Syntax + Discards any outstanding changes - + - Determines whether the Definition supports a particular File Extension + Flushes any outstanding changes to the underlying store - File Extension - - + - Ensures that a given Type implements a required Interface + Processes an ADD command - Property to which we are assigning - Type - Required Interface Type + Add Command - + - Gets/Sets the Type to use to parse RDF (or null if not applicable) + Processes a CLEAR command + Clear Command - + - Gets/Sets the Type to use to parse RDF Datasets (or null if not applicable) + Processes a COPY command + Copy Command - + - Gets/Sets the Type to use to parse SPARQL Results (or null if not applicable) + Processes a CREATE command + Create Command - + - Gets/Sets the Type to use to writer RDF (or null if not applicable) + Processes a command + Command - + - Gets/Sets the Type to use to writer RDF Dataets (or null if not applicable) + Processes a command set + Command Set - + - Gets/Sets the Type to use to write SPARQL Results (or null if not applicable) + Processes a DELETE command + Delete Command - + - Gets whether this definition can instantiate a Parser that can parse RDF + Processes a DELETE DATA command + DELETE Data Command - + - Gets whether this definition can instantiate a Parser that can parse RDF Datasets + Processes a DROP command + Drop Command - + - Gets whether this definition can instantiate a Parser that can parse SPARQL Results + Processes an INSERT command + Insert Command - + - Gets whether the definition provides a RDF Writer + Processes an INSERT DATA command + Insert Data Command - + - Gets whether the Definition provides a RDF Dataset Writer + Processes a LOAD command + Load Command - + - Gets whether the Definition provides a SPARQL Results Writer + Processes an INSERT/DELETE command + Insert/Delete Command - + - Gets an instance of a RDF parser + Processes a MOVE command - + Move Command - + - Gets an instance of a RDF writer + SPARQL Update Processor which processes updates by handing them off to the ExecuteUpdate() method of an IUpdateableTripleStore - - + - Gets an instance of a RDF Dataset parser + Creates a new Simple Update Processor - + Updateable Triple Store - + - Gets an instance of a RDF Dataset writer + Discards any outstanding changes - - + - Gets an instance of a SPARQL Results parser + Flushes any outstanding changes to the underlying store - - + - Gets an instance of a SPARQL Results writer + Processes an ADD command - + Add Command - + - Gets whether a particular Type of Object can be parsed + Processes a CLEAR command - Object Type - + Clear Command - + - Gets an Object Parser for the given Type + Processes a COPY command - Object Type - + Copy Command - + - Sets an Object Parser for the given Type + Processes a CREATE command - Object Type - Parser Type + Create Command - + - Gets an Object Parser for the given Type + Processes a command - Object Type - + Command - + - Gets the registered Object Parser Types + Processes a command set + Command Set - + - Selector used in selecting which MIME type to use + Processes a DELETE command + Delete Command - + - Creates a MIME Type selector + Processes a DELETE DATA command - MIME Type - Order the selector appears in the input - + DELETE Data Command - + - Creates an enumeration of MIME type selectors + Processes a DROP command - MIME Types - + Drop Command - + - Creates a new MIME Type Selector + Processes an INSERT command - MIME Type to match - Charset - Quality (in range 0.0-1.0) - Order of appearance (used as precendence tiebreaker where necessary) + Insert Command - + - Gets the selected type + Processes an INSERT DATA command - A type string of the form type/subtype assuming the type if valid + Insert Data Command - + - Gets the range type if this is a range selector + Processes a LOAD command - A type string of the form type/ if this is a range selector, otherwise null + Load Command - + - Gets the Charset for the selector (may be null if none specified) + Processes an INSERT/DELETE command + Insert/Delete Command - + - Gets the quality for the selector (range of 0.0-1.0) + Processes a MOVE command + Move Command - + - Gets the order of apperance for the selector (used as precedence tiebreaker where necessary) + A Class for connecting to a remote SPARQL Update endpoint and executing Updates against it - + - Gets whether the selector if for a */* pattern i.e. accept any + Creates a new SPARQL Update Endpoint for the given URI + Endpoint URI - + - Gets whether the selector is for a type/* pattern i.e. accept any sub-type of the given type + Creates a new SPARQL Update Endpoint for the given URI + Endpoint URI - + - Gets whether the selector is invalid + Gets/Sets the HTTP Method used for requests + + The SPARQL 1.1 Protocol specification mandates that Update requests may only be POSTed, attempting to alter the HTTP Mode to anything other than POST will result in a SparqlUpdateException + - + - Gets whether the selector is for a specific MIME type e.g. type/sub-type + Makes an update request to the remote endpoint + SPARQL Update - + - Sorts the selector in precedence order according to the content negotiation rules from the relevant RFCs + Makes an update request asynchronously to the remote endpoint - Selector to compare against - + SPARQL Update + Callback to invoke when the update completes + State to pass to the callback - + - Gets the string representation of the selector as it would appear in an Accept header + Serializes configuration for the endpoint - - - Unless this is an invalid selector this will always be a valid selector that could be appended to a MIME type header - + Serialization Context - + - The Graph Persistence Wrapper is a wrapper around another Graph that can be used to batch persistence actions with the ability to Flush/Discard changes as desired. + Possible SPARQL Update Command Types - - - When disposed any outstanding changes are always flushed so if you make changes which you don't want to persist be sure to call the Discard() method before disposing of the Graph - - - Implementors who wish to make persistent graphs should extend this class and override the SupportsTriplePersistence property and the PersistInsertedTriples(), PersistDeletedTriples() and PersistGraph() methods. If you return true for the property then the PersistInsertedTriples() and PersistDeletedTriples() methods will be invoked to do persistence on batches of Triples. If your persistence mechanism requires persisting the entire graph at once return false for the property and override the PersistGraph() method appropriately. - -

Warning

- - Note that the wrapper does not automatically dispose of the wrapped graph when the wrapper is Dispose, this is by design since disposing of the wrapped Graph can have unintended consequences - -
- + - Underlying Graph this is a wrapper around + Command inserts data - + - Creates a new Graph Persistence Wrapper around a new Graph + Command deletes data - + - Creates a new Graph Persistence Wrapper around a new Graph with the given always queue setting + Command inserts data and may be based upon a template - Whether to always queue actions - - The alwaysQueueActions setting when enabled will cause the wrapper to queue Asserts and Retracts for persistence regardless of whether the relevant Triples already exist (i.e. normally if a Triple exists is cannot be asserted again and if it doesn't exist it cannot be retracted). This is useful for creating derived wrappers which operate in write-only mode i.e. append mode for an existing graph that may be too large to reasonably load into memory - - + - Creates a new Graph Persistence Wrapper around the given Graph + Command deletes data and may be based upon a template - Graph - + - Creates a new Graph Persistence Wrapper around the given Graph with the given always queue setting + Command modifies data - Graph - Whether to always queue actions - - The alwaysQueueActions setting when enabled will cause the wrapper to queue Asserts and Retracts for persistence regardless of whether the relevant Triples already exist (i.e. normally if a Triple exists is cannot be asserted again and if it doesn't exist it cannot be retracted). This is useful for creating derived wrappers which operate in write-only mode i.e. append mode for an existing graph that may be too large to reasonably load into memory - - + - Deserialization Constructor + Command loads a graph into the Store - Serialization Information - Streaming Context - + - Destructor for the wrapper to ensure that Dispose() is called and thus that persistence happens + Command clears a graph in the Store - + - Gets/Sets the Base URI of the Graph + Command creates a Graph in the Store - + - Gets whether the Graph is empty + Command removes a Graph from the Store - + - Gets the Namespace Map for the Graph + Command which merges the data from one Graph into another - + - Gets the Nodes of the Graph + Command which copies the data from one Graph into another overwriting the destination Graph - + - Gets the Triple Collection for the Graph + Command which moves data from one Graph to another overwriting the destination Graph and deleting the Source Graph - + - Asserts a Triple in the Graph + Unknown - Triple - + - Asserts Triples in the Graph + Base Class of SPARQL Update Commands - Triples - + - Retracts a Triple from the Graph + Creates a new SPARQL Update Command - Triple + Command Type - + - Retracts Triples from the Graph + Gets the Type of this Command - Triples - + - Clears the Graph + Gets whether the Command will only affect a single Graph - + - Creates a new Blank Node with the given Node ID + Gets whether the Command will potentially affect the given Graph - Node ID + Graph URI + + A return value of true does not guarantee that the Graph will be affected. Some Commands (e.g. DROP ALL) affect all Graphs in the Dataset but the command itself doesn't know whether a Graph with the given URI is actually present in the dataset to which it is applied + - + - Creates a new Blank Node + Optimises the Command - - + - Gets the next available Blank Node ID + Evaluates the Command in the given Context - + Evaluation Context - + - Creates a new Graph Literal Node with the given sub-graph + Processes the Command Set using the given Update Processor - Sub-graph - + Update Processor - + - Creates a new Graph Literal Node + Gets the String representation of the Command - + - Creates a new Literal Node + Represents a sequence of SPARQL Update Commands to be executed on a Dataset - Value - - + - Creates a new Literal Node with the given Datatype + Creates a new empty Command Set - Value - Datatype URI - - + - Creates a new Literal Node with the given Language + Creates a new Command Set containing the given Command - Value - Language - + Command - + - Creates a new URI Node that references the Graphs Base URI + Creates a new Command Set with the given Commands - + Commands - + - Creates a new URI Node from a QName + Adds a new Command to the end of the sequence of Commands - QName - + Command to add - + - Creates a new URI Node + Gets the Command at the given index - URI + Index - + - Creates a new Variable Node + Gets the number of Commands in the set - Variable Name - - + - Attempts to get the Blank Node with the given ID + Gets the enumeration of Commands in the set - Node ID - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value and Language + Gets/Sets the Base URI for the Command Set - Value - Language - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value + Gets the Namespace Map for the Command Set - Value - The Node if it exists or null - + - Attempts to get the Literal Node with the given Value and Datatype + Gets/Sets the Timeout in milliseconds for the execution of the Updates - Value - Datatype URI - The Node if it exists or null otherwise + Default is no timeout - + - Gets all the Triples involving the given URI + Gets/Sets the Time the updates took to execute - The URI to find Triples involving - Zero/More Triples + Thrown if you try to inspect the execution time before/during the execution of updates - + - Gets all the Triples involving the given Node + Gets/Sets the Algebra Optimisers to be applied to portions of updates that require queries to be made - The Node to find Triples involving - Zero/More Triples - + - Gets all the Triples with the given URI as the Object + Optimises the Commands in the Command Set - The URI to find Triples with it as the Object - Zero/More Triples + Optimiser to use - + - Gets all the Triples with the given Node as the Object + Optimises the Commands in the Command Set - The Node to find Triples with it as the Object - + Uses the globally registered query optimiser from SparqlOptimiser.QueryOptimiser - + - Gets all the Triples with the given Node as the Predicate + Processes the Command Set using the given Update Processor - The Node to find Triples with it as the Predicate - + Update Processor - + - Gets all the Triples with the given Uri as the Predicate + Gets the String representation of the Command Set - The Uri to find Triples with it as the Predicate - Zero/More Triples + - + - Gets all the Triples with the given Node as the Subject + Evaluation Context for SPARQL Updates evaluated by the libraries Leviathan SPARQL Engine - The Node to find Triples with it as the Subject - Zero/More Triples - + - Gets all the Triples with the given Uri as the Subject + Creates a new SPARQL Update Evaluation Context - The Uri to find Triples with it as the Subject - Zero/More Triples + Command Set + SPARQL Dataset + Query Processor for WHERE clauses - + - Selects all Triples with the given Subject and Predicate + Creates a new SPARQL Update Evaluation Context - Subject - Predicate - + Command Set + SPARQL Dataset - + - Selects all Triples with the given Subject and Object + Creates a new SPARQL Update Evaluation Context - Subject - Object - + SPARQL Dataset + Query Processor for WHERE clauses - + - Selects all Triples with the given Predicate and Object + Creates a new SPARQL Update Evaluation Context - Predicate - Object - + SPARQL Dataset - + - Returns the UriNode with the given QName if it exists + Gets the Command Set (if any) that this context pertains to - The QName of the Node to select - - + - Returns the UriNode with the given Uri if it exists + Dataset upon which the Updates are applied - The Uri of the Node to select - Either the UriNode Or null if no Node with the given Uri exists - + - Gets whether a given Triple exists in this Graph + Gets the Query Processor used to process the WHERE clauses of DELETE or INSERT commands - Triple to test - - + - Merges another Graph into the current Graph + Retrieves the Time in milliseconds the update took to evaluate - Graph to Merge into this Graph - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - + - Merges another Graph into the current Graph + Retrieves the Time in ticks the updates took to evaluate - Graph to Merge into this Graph - Indicates that the Merge should preserve the Graph URIs of Nodes so they refer to the Graph they originated in - - - The Graph on which you invoke this method will preserve its Blank Node IDs while the Blank Nodes from the Graph being merged in will be given new IDs as required in the scope of this Graph. - - - The Graph will raise the MergeRequested event before the Merge operation which gives any event handlers the oppurtunity to cancel this event. When the Merge operation is completed the Merged event is raised - - - + - Determines whether a Graph is equal to another Object + Gets the Remaining Timeout i.e. the Timeout taking into account time already elapsed - Object to test - - - A Graph can only be equal to another Object which is an IGraph - - - Graph Equality is determined by a somewhat complex algorithm which is explained in the remarks of the other overload for Equals - + If there is no timeout then this is always zero, if there is a timeout this is always >= 1 since any operation that wants to respect the timeout must have a non-zero timeout to actually timeout properly. - + - Determines whether this Graph is equal to the given Graph + Gets the Update Timeout used for the Command Set - Graph to test for equality - Mapping of Blank Nodes iff the Graphs are equal and contain some Blank Nodes - - The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs - - - Graph Equality is determined according to the following algorithm: + This is taken either from the Timeout property of the SparqlUpdateCommandSet to which this evaluation context pertains or from the global option Options.UpdateExecutionTimeout. To set the Timeout to be used set whichever of those is appropriate prior to evaluating the updates. If there is a Command Set present then it's timeout takes precedence unless it is set to zero (no timeout) in which case the global timeout setting is applied. You cannot set the Update Timeout to be higher than the global timeout unless the global timeout is set to zero (i.e. no global timeout) -
    -
  1. If the given Graph is null Graphs are not equal
  2. -
  3. If the given Graph is this Graph (as determined by Reference Equality) then Graphs are equal
  4. -
  5. If the Graphs have a different number of Triples they are not equal
  6. -
  7. Declare a list of Triples which are the Triples of the given Graph called OtherTriples
  8. -
  9. Declare two dictionaries of Nodes to Integers which are called LocalClassification and OtherClassification
  10. -
  11. For Each Triple in this Graph -
      -
    1. If it is a Ground Triple and cannot be found and removed from OtherTriples then Graphs are not equal since the Triple does not exist in both Graphs
    2. -
    3. If it contains Blank Nodes track the number of usages of this Blank Node in LocalClassification
    4. -
    -
  12. -
  13. If there are any Triples remaining in OtherTriples which are Ground Triples then Graphs are not equal since this Graph does not contain them
  14. -
  15. If all the Triples from both Graphs were Ground Triples and there were no Blank Nodes then the Graphs are equal
  16. -
  17. Iterate over the remaining Triples in OtherTriples and populate the OtherClassification
  18. -
  19. If the count of the two classifications is different the Graphs are not equal since there are differing numbers of Blank Nodes in the Graph
  20. -
  21. Now build two additional dictionaries of Integers to Integers which are called LocalDegreeClassification and OtherDegreeClassification. Iterate over LocalClassification and OtherClassification such that the corresponding degree classifications contain a mapping of the number of Blank Nodes with a given degree
  22. -
  23. If the count of the two degree classifications is different the Graphs are not equal since there are not the same range of Blank Node degrees in both Graphs
  24. -
  25. For All classifications in LocalDegreeClassification there must be a matching classification in OtherDegreeClassification else the Graphs are not equal
  26. -
  27. Then build a possible mapping using the following rules: -
      -
    1. Any Blank Node used only once should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    2. -
    3. Any Blank Node with a unique degree should be mapped to an equivalent Blank Node in the other Graph. If this is not possible then the Graphs are not equal
    4. -
    5. Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    6. -
    7. Build up lists of dependent pairs of Blank Nodes for both Graphs
    8. -
    9. Use these lists to determine if there are any independent nodes not yet mapped. These should be mapped to equivalent Blank Nodes in the other Graph, if this is not possible the Graphs are not equal
    10. -
    11. Use the Dependencies and existing mappings to generate a possible mapping
    12. -
    13. If a Complete Possible Mapping (there is a Mapping for each Blank Node from this Graph to the Other Graph) then test this mapping. If it succeeds then the Graphs are equal
    14. -
    15. Otherwise we now fallback to the Base Mapping and use it as a basis for Brute Forcing the possible solution space and testing every possibility until either a mapping works or we find the Graphs to be non-equal
    16. -
    -
  28. -
- + - Checks whether this Graph is a sub-graph of the given Graph + Checks whether Execution should Time out - Graph - + Thrown if the Update has exceeded the Execution Timeout - + - Checks whether this Graph is a sub-graph of the given Graph + Starts the Execution Timer - Graph - Mapping of Blank Nodes - - + - Checks whether this Graph has the given Graph as a sub-graph + Ends the Execution Timer - Graph - - + - Checks whether this Graph has the given Graph as a sub-graph + Static Helper class for providing constants, helper functions etc regarding the SPARQL Update specification - Graph - Mapping of Blank Nodes - - + - Computes the Difference between this Graph the given Graph + Possible Triple Index types - Graph - - Produces a report which shows the changes that must be made to this Graph to produce the given Graph + Index types are given Integer values with the lowest being the least useful index and the highest being most useful index. Non-Index based Patterns are given arbitrary high values since these will typically never be used as these items are usually inserted into a Graph Pattern after the ordering step + + + When used to sort Patterns as part of query optimisation the patterns are partially ordered on the usefullness of their index since more useful indexes are considered more likely to return fewer results which will help restrict the query space earlier in the execution process. - + - Helper function for Resolving QNames to URIs + No Index should be used as the Pattern does not use Variables - QName to resolve to a Uri - - + - Converts the wrapped graph into a DataTable + No Index should be used as the Pattern is three Variables - - + - Event which is raised when a Triple is asserted in the Graph + Subject Index should be used - + - Event which is raised when a Triple is retracted from the Graph + Predicate Index should be used - + - Event which is raised when the Graph contents change + Object Index should be used - + - Event which is raised just before the Graph is cleared of its contents + Subject-Predicate Index should be used - + - Event which is raised after the Graph is cleared of its contents + Predicate-Object Index should be used - + - Event which is raised when a Merge operation is requested on the Graph + Subject-Object Index should be used - + - Event which is raised when a Merge operation is completed on the Graph + A Comparer which sorts based on Triple Index Type - + - Event Handler which handles the Triple Added event from the underlying Triple Collection by raising the Graph's TripleAsserted event + Compares two Triple Index types to see which is greater - Sender - Triple Event Arguments + First Index type + Second Index type + + + Implemented by converting to Integers and then using the Integer comparison function + - + - Helper method for raising the Triple Asserted event manually + Helper Class for indexing related operations - Triple Event Arguments - + - Helper method for raising the Triple Asserted event manually + Searches an Index using the given Comparer - Triple + Indexed Object Type + Index + Comparer to use for binary search + Item to search for + - + - Event Handler which handles the Triple Removed event from the underlying Triple Collection by raising the Graph's Triple Retracted event + Enumeration of the supported container mappings - Sender - Triple Event Arguments - + - Helper method for raising the Triple Retracted event manually + No container mapping - - + - Helper method for raising the Triple Retracted event manually + @list container mapping - Triple - + - Helper method for raising the Changed event + @set container mapping - Triple Event Arguments - + - Helper method for raising the Changed event + @index container mapping - + - Helper method for raising the Clear Requested event and returning whether any of the Event Handlers cancelled the operation + @id container mapping - True if the operation can continue, false if it should be aborted - + - Helper method for raising the Cleared event + @type container mapping - + - Helper method for raising the Merge Requested event and returning whether any of the Event Handlers cancelled the operation + @language container mapping - True if the operation can continue, false if it should be aborted - + - Helper method for raising the Merged event + Represents a JSON-LD context - + - Helper method for attaching the necessary event Handlers to a Triple Collection + The collection of active term definitions indexed by the term key - Triple Collection - - May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's - - + - Helper method for detaching the necessary event Handlers from a Triple Collection + Create a new empty context - Triple Collection - - May be useful if you replace the Triple Collection after instantiation e.g. as done in SparqlView's - - + - Flushes all changes which have yet to be persisted to the underlying storage + Get or set the base IRI specified by this context + The value may be a relative or an absolute IRI or null - + - Discards all changes which have yet to be persisted so that they are not persisted to the underlying storage + Returns true if the Base property of this context has been explicitly set. - + - Used to indicate whether the persistence mechansim can persist batches of Triples + Get the default language code specified by this context - - - If true then the PersistInsertedTriples() and PersistDeletedTriples() methods are used to persist changes when the Flush() method is called. If false then the PersistGraph() method will be invoked instead. - - + May be null - + - Persists inserted Triples to the underlying Storage + Get the default vocabulary IRI - Triples - + - Persists deleted Triples to the underlying Storage + Get or set the version of the JSON-LD syntax specified by this context - - + - Persists the entire Graph to the underlying Storage + An enumeration of the terms defined by this context - + - Disposes of the persistence wrapper and in doing so persists any changes to the underlying storage + Add a term definition to this context + + - + - Disposes of the persistence wrapper and in doing so persists any changes to the underlying storage + Create a deep clone of this context - Whether the method was called from Dispose() or the destructor + A new JsonLdContext that is a clone of this context - + - Gets the Serialization Information + Add or update an existing term definition - Serialization Information - Streaming Context + The term key + The new value for the term definition - + - Gets the Schema for XML serialization + Remote an existing term definition - + The key for the term to be removed - + - Reads the data for XML deserialization + Get an existing term defintiion - XML Reader + The key for the term to be retrieved + The term definition found for the specified key or null if there is no term definition defined for that key - + - Writes the data for XML serialization + Attempt to get an existing term defintion - XML Writer + The key for the term to be retrieved + Receives the term definition found + True if an entry was found for , false otherwise - + - The Store Graph Persistence Wrapper is a wrapper around another Graph that will be persisted to an underlying store via a provided IStorageProvider implementation + Retrieve all mapped aliases for the given keyword + + An enumeration of the key of each term definition whose IriMapping matches the specified keyword. - + - Creates a new Store Graph Persistence Wrapper + Enumeration of the allowed values for the object embed option for JSON-LD framing - Generic IO Manager - Graph to wrap - Graph URI (the URI the Graph will be persisted as) - Whether to operate in write-only mode - - - Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph - - - + - Creates a new Store Graph Persistence Wrapper + @always - Generic IO Manager - Graph to wrap - Whether to operate in write-only mode - - - Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property and the Graph to be wrapped must be an empty Graph - - - + - Creates a new Store Graph Persistence Wrapper + @last - Generic IO Manager - Graph to wrap - + - Creates a new Store Graph Persistence Wrapper around a new empty Graph + @link + + + + + @never + + + + + Enumeration of the error code defined in the JSON-LD specification - Generic IO Manager - Graph URI (the URI the Graph will be persisted as) - Whether to operate in write-only mode - - Note: In order to operate in write-only mode the IStorageProvider must support triple level updates indicated by it returning true to its UpdateSupported property - - - When not operating in write-only mode the existing Graph will be loaded from the underlying store - + The error codes are converted to C# canel-case as follows: + (1) Replace IRI by Iri + (2) Remove @ character + (3) Replace "language-tagged" by "Language Tagged" + (4) Split on space character and convert each token to Sentence case (first letter uppercase, remainder lower-case) + (5) Join tokens with no token separator - + - Creates a new Store Graph Persistence Wrapper around a new empty Graph + Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time. - Generic IO Manager - Graph URI (the URI the Graph will be persisted as) - + - Gets whether the in-use IStorageProvider supports triple level updates + The compacted document contains a list of lists as multiple lists have been compacted to the same term. - + - Persists the deleted Triples to the in-use IStorageProvider + Multiple conflicting indexes have been found for the same node. - Triples - + - Persists the inserted Triples to the in-use IStorageProvider + A cycle in IRI mappings has been detected. - Triples - + - Persists the entire Graph to the in-use IStorageProvider + An @id member was encountered whose value was not a string. - + - The File Graph Persistence Wrapper is a wrapper around antoher Graph that will be persisted to a file + An @index member was encountered whose value was not a string. - + - Creates a new File Graph Persistence Wrapper around the given Graph + An invalid value for @nest has been found. - Graph - File to persist to - + - Creates a new File Graph Persistence Wrapper around a new emtpy Graph + An invalid value for @prefix has been found. - File to persist to - - If the given file already exists then the Graph will be loaded from that file - - + - Returns that Triple persistence is not supported + An invalid value for an @reverse member has been detected, i.e., the value was not a dictionary. - + - Persists the entire Graph to a File + The @version key was used in a context with an out of range value. - + - A Namespace Mapper which has an explicit notion of Nesting + An invalid base IRI has been detected, i.e., it is neither an absolute IRI nor null. - + - Constructs a new Namespace Map + An @container member was encountered whose value was not one of the following strings: @list, @set, or @index. - The Prefixes rdf, rdfs and xsd are automatically defined - + - Constructs a new Namespace Map which is optionally empty + The value of the default language is not a string or null and thus invalid. - Whether the Namespace Map should be empty, if set to false the Prefixes rdf, rdfs and xsd are automatically defined - + - Adds a Namespace at the Current Nesting Level + A local context contains a term that has an invalid or missing IRI mapping. - Prefix - Namespace URI - + - Clears the Namespace Map + An invalid keyword alias definition has been encountered. - + - Gets the Namespace URI for the given Prefix at the current Nesting Level + An invalid value in a language map has been detected. It has to be a string or an array of strings. - Prefix - - + - Gets the Namespace Prefix for the given URI at the current Nesting Level + An @language member in a term definition was encountered whose value was neither a string nor null and thus invalid. - Namespace URI - - + - Gets the Nesting Level at which the given Namespace is definition is defined + A language-tagged string with an invalid language value was detected. - Prefix - - + - Gets whether the given Namespace exists + A number, true, or false with an associated language tag was detected. - Prefix - - + - Imports another Namespace Map into this one + An invalid local context was detected. - Namespace Map - + - Increments the Nesting Level + No valid context document has been found for a referenced, remote context. - + - Decrements the Nesting Level + An invalid reverse property definition has been detected. - - When the Nesting Level is decremented any Namespaces defined at a greater Nesting Level are now out of scope and so are removed from the Mapper - - + - Gets the current Nesting Level + An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps. - + - Event which occurs when a Namespace is added + An invalid value for a reverse property has been detected. The value of an inverse property must be a node object. - + - Event which occurs when a Namespace is modified + The local context defined within a term definition is invalid. - + - Event which occurs when a Namespace is removed + A set object or list object with disallowed members has been detected. - + - Internal Helper for the NamespaceAdded Event which raises it only when a Handler is registered + An invalid term definition has been detected. - Namespace Prefix - Namespace Uri - + - Internal Helper for the NamespaceModified Event which raises it only when a Handler is registered + An @type member in a term definition was encountered whose value could not be expanded to an absolute IRI. - Namespace Prefix - Namespace Uri - + - Internal Helper for the NamespaceRemoved Event which raises it only when a Handler is registered + An invalid value for an @type member has been detected, i.e., the value was neither a string nor an array of strings. - Namespace Prefix - Namespace Uri - + - Gets the Namespace Prefixes + A typed value with an invalid type was detected. - + - Tries to reduce a URI to a QName using this Namespace Map + A value object with disallowed members has been detected. - URI - Resulting QName - - + - Removes a Namespace provided that Namespace is defined on the current Nesting Level + An invalid value for the @value member of a value object has been detected, i.e., it is neither a scalar nor null. - Prefix - + - Disposes of the Namespace Map + An invalid vocabulary mapping has been detected, i.e., it is neither an absolute IRI nor null. - + - Class used to hold Nested Namespace definition information + A keyword redefinition has been detected. - + - Creates a new Nested Mapping + A list of lists was detected. List of lists are not supported in this version of JSON-LD due to the algorithmic complexity. - Prefix - Namespace URI - Nesting Level - + - Creates a new Nested Mapping + The document could not be loaded or parsed as JSON. - Prefix - Namespace URI - + - Gets the Nesting Level + There was a problem encountered loading a remote context. - + - Gets the Namespace Prefix + Multiple HTTP Link Headers [RFC5988] using the http://www.w3.org/ns/json-ld#context link relation have been detected. - + - Gets the Namespace URI + An attempt was made to change the processing mode which is incompatible with the previous specified version. - + - A default implementation of a Node Factory which generates Nodes unrelated to Graphs (wherever possible we suggest using a Graph based implementation instead) + A cycle in remote context inclusions has been detected. - + - Creates a new Node Factory + Enumeration of the error codes specified in the JSON-LD framing specification - + - Creates a Blank Node with a new automatically generated ID + invalid frame - - + - Creates a Blank Node with the given Node ID + invalid @embed value - Node ID - - + - Creates a Graph Literal Node which represents the empty Subgraph + Exception raised when the JSON-LD framing algorithm encounters an error during processing - - + - Creates a Graph Literal Node which represents the given Subgraph + The JSON-LD error code describing the error encountered - Subgraph - - + - Creates a Literal Node with the given Value and Data Type + Create a new exception instance - Value of the Literal - Data Type URI of the Literal - + The JSON-LD error code describing the error encountered + A string containing contextual information to help the user identify the root cause of the error - + - Creates a Literal Node with the given Value + Enumeration of the processing modes supported by the - Value of the Literal - - + - Creates a Literal Node with the given Value and Language + Process only JSON-LD 1.0 features - Value of the Literal - Language Specifier for the Literal - - + - Creates a URI Node for the given URI + Process JSON-LD 1.1 features with frame expansion features disabled. - URI - - + - Creates a Variable Node for the given Variable Name + Process JSON-LD 1.1 features with frame expansion features enabled. - - - + - Creates a new unused Blank Node ID and returns it + Implements the core JSON-LD processing - - + - A Graph Factory provides access to consistent Graph References so that Nodes and Triples can be instantiated in the correct Graphs + Create a new processor instance - - - Primarily designed for internal use in some of our code but may prove useful to other users hence is a public class. Internally this is just a wrapper around a TripleStore instance. - - - The main usage for this class is scenarios where consistent graph references matter such as returning node references from out of memory datasets (like SQL backed ones) particularly with regards to blank nodes since blank node equality is predicated upon Graph reference. - + JSON-LD processing options + + + + Get or set the base IRI for processing + + This value should be set to the IRI of the document being processed if available. - + - Gets a Graph Reference for the given Graph URI + Get or set the current processing mode - Graph URI - - + - Gets a Graph Reference for the given Graph URI + Process a context in the scope of a current active context resulting in a new context - Graph URI + The currently active context + The context to be processed. May be a JSON object, string or array. + The remote context's already processed. Used to detect circular references in the current context processing step. - - Synonym for the index access method i.e. factory[graphUri] - - + - Gets a Graph Reference for the given Graph URI and indicates whether this was a new Graph reference + Run the Compaction algorithm - Graph URI - Indicates whether the returned reference was newly created + The JSON-LD data to be compacted. Expected to be a JObject or JArray of JObject or a JString whose value is the IRI reference to a JSON-LD document to be retrieved + The context to use for the compaction process. May be a JObject, JArray of JObject, JString or JArray of JString. String values are treated as IRI references to context documents to be retrieved + Additional processor options - + - Resets the Factory so any Graphs with contents are emptied + Flattens the given input and compacts it using the passed context according to the steps in the JSON-LD Flattening algorithm + + + + - + - A private implementation of a Node Factory which returns mock constants regardless of the inputs + Applies the Node Map Generation algorithm to the specified input. - - - Intended for usage in scenarios where the user of the factory does not care about the values returned, for example it is used internally in the CountHandler to speed up processing - - + The element to be processed + JSON-LD processor options + The generated node map dictionary as a JObject instance - + - Represents a Triple that is queued for persistence (either insertion/deletion) + Applies the JSON-LD Framing algorithm to the specified input JSON object + The RDF data to be framed as a JSON-LD document + The JSON-LD frame to be applied + Processor options + A JSON object representing the framed RDF data - + - Creates a new Triple Persistence Action (an insertion/deletion) + Determine if a JSON token is a JSON-LD value object - Triple to persist - Whether the Triple is to be deleted + + True of is a with a @value property, false otherwise. - + - Creates a new Triple Persistence Action (an insertion) + Determine if a JSON token is a JSON-LD list object - Triple to persist + + True of is a with a @list property, false otherwise. - + - Gets the Triple to persist + Determine if the specified string is a relative IRI + + - + - Gets whether the action is a Delete Action + Determine if the specified string is a JSON-LD keyword + + True if is a JSON-LD keyword, false otherwise - + - Possible Types of Graph Persistence Actions + Determine if the specified string is a JSON-LD framing keyword + + - + - Graph was Added + Determine if the specified string is a blank node identifier + + - + - Graph was Deleted + Apply the JSON-LD context expansion algorithm to the context found at the specified URL + The URL to load the source context from + Options to apply during the expansion processing + The expanded JSON-LD contex - + - Graph was Modified + Apply the JSON-LD expansion algorithm to a context JSON object + The context JSON object to be expanded + Options to apply during the expansion processing + The expanded JSON-LD contex - + - Represents a Graph that is queued for persistence (added/modified/removed) + Exception raised by the JSON-LD processor when a processing error is encountered - + - Creates a new Graph Persistence action + The JSON-LD error code that describes the processing error encountered - Graph - Action Type - + - Creates a new Graph Persistence action + Create a new exception - Graph - Action Type + The JSON-LD error code that describes the processing error encountered + A message providing the user with further contextual information about the error - + - Gets the Graph to be persisted + Create a new exception + The JSON-LD error code that describes the processing error encountered + A message proiding the user with further contextual information about the error + The inner exception that led to this exception being raised - + - Gets the Action Type + A collection of options for setting up the JSON-LD processor - + - Represents an action on a Triple Store that is queued for persistence + Overrides the base IRI of the document being processed - + - Creates a new persistence action that pertains to a Graph + Get or set the function to use to resolve an IRI reference to a document + into parsed JSON. - Graph Action + If the function returns null or throws an exception, it will be assumed that dereferencing the IRI has failed - + - Creates a new persistence action that pertains to a Triple + Get or set the syntax version that the processor will use. - Triple Action + Defaults to - + - Gets whether this action pertains to a Graph + A context that is used to initialize the active context when expanding a document. - + - Gets whether this action peratins to a Triple + Flag indicating if arrays of one element should be replaced by the single value during compaction. + Defaults to true - + - Gets the Graph Action (if any) + If set to true, the JSON-LD processor may emit blank nodes for triple predicates, otherwise they will be omitted. + Defaults to true - + - Gets the Triple Action (if any) + Sets the value object embed flag used in the Framing Algorithm - + - Represents an in-memory view of a triple store provided by an IStorageProvider instance where changes to the in-memory view get reflected in the persisted view. + Sets the value explicit inclusion flag used in the Framing Algorithm. - -

Persistence Behaviour

- - Note: This is a transactional implementation - this means that changes made are not persisted until you either call Flush() or you dispose of the instance. Alternatively you may invoke the Discard() method to throw away changes made to the in-memory state. - - - The actual level of persistence provided will vary according to the IStorageProvider instance you use. For example if the DeleteGraph() method is not supported then Graph removals won't persist in the underlying store. Similarily an instance which is read-only will allow you to pull out existing graphs from the store but won't persist any changes. - - - The Contains() method of the underlying BaseGraphCollection has been overridden so that invoking Contains causes the Graph from the underlying store to be loaded if it exists, this means that operations like HasGraph() may be slower than expected or cause applications to stop while they wait to load data from the store. - -

SPARQL Query Behaviour

- - The exact SPARQL Query behaviour will depend on the capabilities of the underlying IStorageProvider instance. If it also implements the IQueryableStorage interface then its own SPARQL implementation will be used, note that if you try and make a SPARQL query but the in-memory view has not been synced (via a Flush() or Discard() call) prior to the query then an RdfQueryException will be thrown. If you want to make the query regardless you can do so by invoking the query method on the underlying store directly by accessing it via the UnderlyingStore property. - - - If the underlying store does not support SPARQL itself then SPARQL queries cannot be applied and a NotSupportedException will be thrown. - -

SPARQL Update Behaviour

- - Similarly to SPARQL Query support the SPARQL Update behaviour depends on whether the underlying IStorageProvider instance also implements the IUpdateableStorage interface. If it does then its own SPARQL implementation is used, otherwise a GenericUpdateProcessor will be used to approximate the SPARQL Update. - - - Please be aware that as with SPARQL Query if the in-memory view is not synced with the underlying store a SparqlUpdateException will be thrown. - -

Other Notes

- - It is possible for the in-memory view of the triple store to get out of sync with the underlying store if that store is being modified by other processes or other code not utilising the PersistentTripleStore instance that you have created. Currently there is no means to resync the in-memory view with the underlying view so you should be careful of using this class in scenarios where your underlying store may be modified. - -
- + - Creates a new in-memory view of some underlying store represented by the IStorageProvider instance + Sets the value omit default flag used in the Framing Algorithm - IO Manager - - Please see the remarks for this class for notes on exact behaviour of this class - - + - Finalizer which ensures that the instance is properly disposed of thereby persisting any outstanding changes to the underlying store + Sets the value require all flag used in the Framing Algorithm - - If you do not wish to persist your changes you must call Discard() prior to disposing of this instance or allowing it to go out of scope such that the finalizer gets called - - + - Gets the underlying store + Instead of framing a merged graph, frame only the default graph. - + - Disposes of the Triple Store flushing any outstanding changes to the underlying store + Removes @id from node objects where the value is a blank node identifier used only once within the document. - - If you do not want to persist changes you have please ensure you call Discard() prior to disposing of the instance - - + - Flushes any outstanding changes to the underlying store + Enumeration of the supported JSON-LD syntax versions - + - Discards any outstanding changes returning the in-memory view of the store to the state it was in after the last Flush/Discard operation + The original 1.0 version of the JSON-LD syntax - + - Executes a SPARQL Query on the Triple Store + The currently in-development 1.1 version of the JSON-LD syntax - Sparql Query as unparsed String - + PLEASE NOTE: the features supported by this version of the parser are subject to change as the specificiation is still in development. - + - Executes a SPARQL Query on the Triple Store processing the results using an appropriate handler from those provided + Represents a term definition in a context - RDF Handler - Results Handler - SPARQL Query as unparsed String - + - Executes an Update against the Triple Store + Get or set the IRI mapping for the term - SPARQL Update Command(s) - - As per the SPARQL 1.1 Update specification the command string may be a sequence of commands - - + - Executes a single Update Command against the Triple Store + Indicates if this term represents a reverse property - SPARQL Update Command - + - Executes a set of Update Commands against the Triple Store + Get or set the type mapping for this term definition - SPARQL Update Command Set + May be null. MUST be null if LanguageMapping is not null - + - Internal implementation of a Graph Collection for use by the PersistentTripleStore + Get or set the language mapping for this term definition + May be null. MUST be null if TypeMapping is not null - + - Implements a Sub-Graph Isomorphism Algorithm + Boolean flag indicating if this term definition specifies a language mapping - + - Checks to see whether a given Graph is a sub-graph of the other Graph + Get or set the context specified for this term definition - Sub-Graph - Graph - - + - Uses a series of Rules to attempt to generate a mapping without the need for brute force guessing + Get or set the container mapping for this term definition - 1st Graph - 2nd Graph - 1st Graph Node classification - 2nd Graph Node classification - 1st Graph Degree classification - 2nd Graph Degree classification - - + - Generates and Tests all possibilities in a brute force manner + Get or set the nest property for this term definition - 1st Graph - 2nd Graph - 1st Graph Node classification - 2nd Graph Node classification - Dependencies in the 1st Graph - Dependencies in the 2nd Graph - - + - Helper method for brute forcing the possible mappings + Create a clone of this term defintion - Possible Mappings - Dependencies in the 1st Graph - Dependencies in the 2nd Graph - Target Graph (2nd Graph) - + - Gets the Blank Node mapping if one could be found + Class used to return information about a remote document or context - + - Implements a Graph Isomorphism Algorithm + If available, the value of the HTTP Link Header [RFC5988] using the http://www.w3.org/ns/json-ld#context link relation in the response. - - - The algorithm used to determine Graph equality is based in part on a Iterative Vertex Classification Algorithm described in a Technical Report from HP by Jeremy J Carroll - Matching RDF Graphs but has been expanded upon significantly to use a variety of techniques. - - - Graph Equality is determined according to the following algorithm, we refer to the first graph as the Source Graph and the second graph as the Target Graph: - -
    -
  1. If both graphs are null they are considered equal
  2. -
  3. If only one of the given graph is null then they are not equal
  4. -
  5. If the given graphs are reference equal then they are equal
  6. -
  7. If the given graphs have a different number of Triples they are not equal
  8. -
  9. Declare a list of triples which are the triples of the second graph called TargetTriples
  10. -
  11. Declare two dictionaries of Nodes to Integers which are called SourceClassification and TargetClassification
  12. -
  13. For Each Triple in the Source Graph -
      -
    1. If it is a ground triple and cannot be found and removed from TargetTriples then graphs are not equal since the triple does not exist in both graphs
    2. -
    3. If it contains blank nodes track the number of usages of this blank node in SourceClassification
    4. -
    -
  14. -
  15. If there are any triples remaining in TargetTriples which are ground triples then graphs are not equal since the Source Graph does not contain them
  16. -
  17. If all the triples from both graphs were ground triples (i.e. there were no blank nodes) then the graphs are equal
  18. -
  19. Iterate over the remaining triples in TargetTriples and populate the TargetClassification
  20. -
  21. If the count of the two classifications is different the graphs are not equal since there are differing numbers of blank nodes in the Graph
  22. -
  23. Now build two additional dictionaries of Integers to Integers which are called SourceDegreeClassification and TargetDegreeClassification. Iterate over SourceClassification and TargetClassification such that the corresponding degree classifications contain a mapping of the number of blank nodes with a given degree
  24. -
  25. If the count of the two degree classifications is different the graphs are not equal since there are not the same range of blank node degrees in both graphs
  26. -
  27. For All classifications in SourceDegreeClassification there must be a matching classification in TargetDegreeClassification else the graphs are not equal
  28. -
  29. Then build a possible mapping using the following rules: -
      -
    1. Any blank bode used only once (single-use) in the Source Graph should be mapped to an equivalent blank bode in the Target Graph. If this is not possible then the graphs are not equal
    2. -
    3. Any blank node with a unique degree in the Source Graph should be mapped to an equivalent blank node in the Target Graph. If this is not possible then the graphs are not equal
    4. -
    5. Any blank node used with unique constants (two other ground terms in a triple) in the Source Graph should be mapped to an equivalent blank bode in the Target Graph. If this is not possible then the graphs are not equal.
    6. -
    7. Build up lists of dependent pairs of blank Nodes for both graphs
    8. -
    9. Use these lists to determine if there are any independent nodes not yet mapped in the Source Graph. These should be mapped to equivalent blank nodes in the Target Graph, if this is not possible the graphs are not equal
    10. -
    11. Important: Keep a copy of the mapping up to this point as a Base Mapping for use as a fallback in later steps
    12. -
    13. Use the dependency information and existing mappings to generate a possible mapping
    14. -
    15. If a complete possible mapping (there is a mapping for each blank node from the Source Graph to the Target Graph) then test this mapping. If it succeeds then the graphs are equal
    16. -
    -
  30. -
  31. If we don't yet have a mapping take a divide and conquer approach: -
      -
    1. Take the not yet mapped blank nodes for each graph and sub-divide them into their isolated sub-graphs
    2. -
    3. If there are at least 2 isolated sub-graphs proceed to divide and conquer
    4. -
    5. For Each Isolated Sub-Graph from the Source Graph -
        -
      1. Consider each possible isolated sub-graph of the same size from the target graph, if there are none then graphs are not equal. If there is a single possible equal isolated sub-graph add the mappings for all involved blank nodes.
      2. -
      -
    6. -
    7. If we now have a complete possible mapping (there is a mapping for each blank node from the Source Graph to the Target Graph) then test the mapping. Return success/failure depending on whether the mapping is valid.
    8. -
    9. Important: Keep a copy of the mapping up to this point as a Base Mapping for use as a base for the brute force step
    10. -
    -
  32. -
  33. If we still don't have a complete mapping we now fallback to the Base Mapping and use it as a basis for brute forcing the possible solution space and testing every possibility until either a mapping works or we find the graphs to be non-equal
  34. -
-
+ If the response's content type is application/ld+json, the HTTP Link Header is ignored. + If multiple HTTP Link Headers using the http://www.w3.org/ns/json-ld#context link relation are found, + the Promise of the LoadDocumentCallback is rejected with a JsonLdError whose code is set to multiple context link headers.
- + - Compares two Graphs for equality + The final URL of the loaded document. This is important to handle HTTP redirects properly. - Graph - Graph - - + - Uses a series of Rules to attempt to generate a mapping without the need for brute force guessing + The retrieved document. This can either be the raw payload or the already parsed document. - 1st Graph - 2nd Graph - 1st Graph Node classification - 2nd Graph Node classification - 1st Graph Degree classification - 2nd Graph Degree classification - + This property may be a JToken or a string. If it is a string, the string is parsed to a JToken - + - Uses a divide and conquer based approach to generate a mapping without the need for brute force guessing + + Top Level Namespace for the dotNetRDF Library which embodies a simple but powerful API for working with RDF and SPARQL. + + + Specific Namespaces within the Hierarchy provide Parsing and Serialization functionality along with a host of related classes to support these functions. + + + Support for querying RDF is provided in the Query namespace which includes SPARQL Query, limited reasoning support in the Query.Inference namespace and a Pellet Server client in the Query.Inference.Pellet namespace. + + + Support for updating RDF based on the SPARQL 1.1 Update and Graph Store HTTP Protocol for RDF Graph Management is provided in the Update and Update.Protocol namespaces. + +

Third Party Storage

+ For communicating with arbitrary Triple Stores we have a dedicated Storage namespace. As of this release we support the following Triple Stores: +
    +
  • AllegroGraph
  • +
  • Dydra
  • +
  • 4store
  • +
  • Fuseki
  • +
  • Any Sesame HTTP Protocol compliant store e.g. Sesame, OWLIM
  • +
  • Any SPARQL Graph Store HTTP Protocol for RDF Graph Management compliant stores
  • +
  • Any SPARQL store that exposes a Query and/or Update endpoint
  • +
  • Stardog
  • +
  • Virtuoso
  • +
+
+

ASP.Net Integration

+ + For those building ASP.Net based websites the Web namespace is dedicated to providing classes for integrating RDF into ASP.Net applications. + +

Ontology API

+ + There is also an Ontology namespace which provides a more resource and ontology centric API for working with RDF than the standard Graph and Triple centric APIs + +

Configuration API

+ + We provide a Configuration API which provides for encoding configuration in RDF Graphs. This configuration system is used extensively as part of the ASP.Net support as it allows for much more expressive and flexible configurations than were previously possible. See the documentation on the main website for many detailed examples. This is primarily intended as an easy way to help deploy configurations for ASP.Net applications though you can make use of the API to describe the configuration of various types of objects in other applications, for example we use it in our Store Manager utility to store connection details. + +

Notes

+ + dotNetRDF 1.0.0 is now considered a stable release, this means it should be stable for production scenarios. However it is open source software and despite our best efforts there may still be bugs. Please help us improve this library by emailing us when you find a bug, you can use the Bug Reports list to report bugs, the Support list to ask questions and the Developer list to request new features or discuss development plans (all these are SourceForge mailing lists which require subscription). + +

Alternative Builds

+
Mono Build
+ + There is no separate build for Mono since dotNetRDF can run directly under Mono. Note that there may still be some features of .Net we use that Mono does not fully support, see the Mono Issues page for more details. We recommend Mono 2.10 or higher though the library should run on recent 2.6/2.8 releases. + +
Client Profile Build
+ + The Client Profile build omits the reference to System.Web so lacks the ASP.Net integration and some other features that rely on this dependency but is otherwise a fairly complete build of the library. + +
Silverlight/Windows Phone 7 Build
+ + The Silverlight and Windows Phone 7 builds of dotNetRDF (dotNetRDF.Silverlight.dll and dotNetRDF.WindowsPhone.dll) are experimental builds that receive limited internal testing so please be aware that these are not as stable as the standard .Net builds. These build runs on Silverlight 4/Windows Phone 7 and omits the following features since they can't be supported on these platforms: + +
    +
  • Most of the Web namespaces
  • +
  • Does not include parts of the Storage namespace that would require synchronous HTTP
  • +
  • No String normalization support
  • +
  • No UriLoader caching support
  • +
  • No multi-threaded support where System.Threading.ReaderWriteLockSlim is used
  • +
  • Various writers and parsers use streaming rather than DOM based XML parsing
  • +
  • No support for XSL in TriX files
  • +
  • Synchronous HTTP Request Features - For most of these there are asynchronous callback driven versions of these features available from the 0.5.0 release onwards
  • +
- 1st Graph - 2nd Graph - 1st Graph Node classification - 2nd Graph Node classification - Dependencies in the 1st Graph - Dependencies in the 2nd Graph -
- + - Generates and Tests all possibilities in a brute force manner + Namespace for specialised node implementations and the interface, this is an extension of the interface that provides strongly typed access to the value of a node. + + These implementations are primarily used internally in the SPARQL engine, however as these all derive from the standard implementations they can be used interchangeably with those if desired. + - 1st Graph - 2nd Graph - 1st Graph Node classification - 2nd Graph Node classification - Dependencies in the 1st Graph - Dependencies in the 2nd Graph - - + - Helper method for brute forcing the possible mappings + Valued Node representing boolean values - Base Mapping - Possible Mappings - - - The base mapping at the time of the initial call shoudl contain known good mappings - - + - Helper method for brute forcing the possible mappings + Creates a new boolean valued node - Base Mapping - Possible Mappings - Node to consider for mapping - - - The base mapping contains known good mappings - + Graph the node belong to + Boolean Value + Lexical Value - + - Gets the Blank Node Mapping found between the Graphs (if one was found) + Creates a new boolean valued node + Graph the node belongs to + Boolean Value - + - Represents a Pair of Nodes that occur in the same Triple + Gets the string value of the boolean + - + - Class for representing Graphs which can be directly queried using SPARQL + Throws an error as booleans cannot be cast to integers + - + - Creates a new Queryable Graph + Throws an error as booleans cannot be cast to decimals + - + - Executes a SPARQL Query on the Graph + Throws an error as booleans cannot be cast to floats - SPARQL Query - + - Executes a SPARQL Query on the Graph handling the results with the given handlers + Throws an error as booleans cannot be cast to doubles - RDF Handler - SPARQL Results Handler - SPARQL Query + - + - Executes a SPARQL Query on the Graph + Gets the boolean value - SPARQL Query - + - Executes a SPARQL Query on the Graph handling the results with the given handlers + Throws an error as booleans cannot be cast to date times - RDF Handler - SPARQL Results Handler - SPARQL Query + - + - A Node Comparer which does faster comparisons since it only does lexical comparisons for literals rather than value comparisons, - and it compares virtual nodes on their VirtualID where possible. + Throws an error as booleans cannot be cast to date times + - + - Compares two Nodes + Throws an error as booleans cannot be cast to a time span - Node - Node - + - A Node Comparer which does faster comparisons since it only does lexical comparisons for literals rather than value comparisons + Gets the URI of the datatype this valued node represents as a String - + - Compares two Nodes + Gets the numeric type of the node - Node - Node - - + - Returns whether two Triples are equal + Valued node representing a byte (8-bit unsigned integer) - Triple - Triple - - + - Returns a predictable HashCode for the triple based on its components' + Creates a new byte valued node - Triple - Triple - + Graph the node belongs to + Byte value + Lexical value - + - Abstract base class for Triple Comparers which provide for comparisons using different node comparers + Creates a new byte valued node + Graph the node belongs to + Byte value - + - Node Comparer + Gets the integer value of the byte + - + - Creates a new Triple Comparer + Gets the decimal value of the byte + - + - Creates a new Triple Comparer + Gets the float value of the byte - Node Comparer to use + - + - Compares two Triples + Gets the float value of the double - Triple - Triple - + - Triple comparer which compares on subjects, then predicates and finally objects + Value node representing a signed byte (8-bit signed integer) - + - Creates a new Full Triple comparer + Creates a new signed byte node + Graph the node belongs to + Signed Byte value + Lexical value - + - Creates a new Full Triple comparer that uses a specific Node comparer + Creates a new signed byte node - Node comparer + Graph the node belongs to + Signed Byte value - + - Compares two Triples + Gets the integer value of the signed byte - Triple - Triple - + - Triple comparer which compares only on subjects + Gets the decimal value of the signed byte + - + - Creates a new Subject comparer + Gets the float value of the signed byte + - + - Creates a new Subject comparer using the provided Node comparer + Gets the double value of the signed byte - Node comparer + - + - Compares two Triples + Valued Node representing a Date Time value - Triple - Triple - - + - Triple comparer which compares only on predicates + Creates a new Date Time valued node + Graph the node belongs to + Date Time value + Lexical Value + Datatype URI - + - Creates a new Predicate comparer + Creates a new Date Time valued node + Graph the node belongs to + Date Time value + Lexical Value + Datatype URI - + - Creates a new Predicate comparer using the provided Node comparer + Creates a new Date Time valued node - Node Comparer + Graph the node belongs to + Date Time value + Datatype URI - + - Compares two Triples + Creates a new Date Time valued node - Triple - Triple - + Graph the node belongs to + Date Time value + Datatype URI - + - Triple comparer which compares only on objects + Creates a new Date Time valued node + Graph the node belongs to + Date Time value - + - Creates a new Object comparer + Creates a new Date Time valued node + Graph the node belongs to + Date Time value - + - Creates a new Object comparer using the provided Node comparer + Creates a new Date Time valued node - Node comparer + Graph the node belongs to + Date Time value + Lexical Value - + - Compares two Triples + Creates a new Date Time valued node - Triple - Triple - + Graph the node belongs to + Date Time value + Lexical Value - + - Triple comparer which compares on subjects and then predicates + Creates a new Date Time valued node + Graph the node belongs to + Date Time value + Date Time offset value + Lexical Value - + - Creates a new Subject Predicate comparer + Creates a new Date Time valued node + Graph the node belongs to + Date Time value + Date Time offset value + Lexical Value + Data Type URI - + - Creates a new Subject Predicate comparer using the provided Node comparer + Gets the String form of the Date Time - Node Comparer + Value + Datatype URI + - + - Compares two Triples + Gets the String form of the Date Time - Triple - Triple + Value + Datatype URI - + - Triple comparer which compares on subjects and then objects + Gets the date time value as a string + - + - Creates a new Subject Object comparer + Throws an error as date times cannot be converted to integers + - + - Creates a new Subject Object comparer using the provided Node comparer + Throws an error as date times cannot be converted to decimals - Node comparer + - + - Compares two Triples + Throws an error as date times cannot be converted to floats - Triple - Triple - + - Triple comparer which compares on predicates and then objects + Throws an error as date times cannot be converted to doubles + - + - Creates a new Predicate Object comparer + Throws an error as date times cannot be converted to booleans + - + - Creates a new Predicate Object comparer using the provided Node comparer + Gets the date time value of the node - Node comparer + - + - Compares two Triples + Gets the date time value of the node - Triple - Triple - + - Triple comparer which compares on objects and then subjects + Throws an error as date times cannot be cast to a time span + - + - Creates a new Object Subject comparer + Gets the URI of the datatype this valued node represents as a String - + - Creates a new Object Subject comparer using the provided Node comparer + Gets the numeric type of the node - + - Compares two Triples + Valued Node representing a Date value - Triple - Triple - - + - Represents a union of multiple Triple Collections + Creates a new Date valued node - - - The union consists of a Base collection which is the collection that Triples can actually be added to and deleted from and any number of additional collections which are read-only as far as the union is concerned (this does not mean they cannot be altered elsewhere by other code) - - + Graph the node belongs to + Date Time value - + - Creates a new Union Triple Collection which is a union of two collections + Creates a new Date valued node - Base Triple Collection - Additional Triple Collection + Graph the node belongs to + Date Time value + Lexical Value - + - Creates a new Union Triple Collection which is a union of any number of collections + Creates a new Date valued node - Base Triple Collection - Additional Triple Collection(s) + Graph the node belongs to + Date Time value - + - Adds a Triple to the base collection + Creates a new Date valued node - Triple to add + Graph the node belongs to + Date Time value + Lexical Value - + - Checks whether the union contains this Triple in any of the collections it comprises + A Valued Node representing decimal nodes - Triple to test - - + - Gets the count of Triples in this union + Creates a new decimal valued node - - The Count is the total number of Triples, this may be different from the number of distinct triples - + Graph the node belongs to + Decimal value + Lexical value - + - Deletes a Triple from the base collection + Creates a new decimal valued node - Triple to delete + Graph the node belongs to + Decimal value - + - Retrieves a Triple from the union + Gets the integer value of the decimal - Triple to retrieve - Thrown if the Triple is not contained in any of the collections this union comprises - + - Gets the enumeration of distinct objects of Triples + Gets the decimal value + - + - Gets the enumeration of distinct predicates of Triples + Gets the float value of the decimal + - + - Gets the enumeration of distinct subjects of Triples + Gets the double value of the decimal + - + - Disposes of the collection + A Valued Node representing double values - - This does nothing since we don't know where and how the collections we are the union of are being used and therefore to dispose of them could have unwanted/unexpected results - - + - Gets the enumeration of Triples in the union + Creates a new double valued node - + Graph the node belongs to + Double value + Lexical value - + - A Graph which represents the Union of several Graphs + Creates a new double valued node - - - The Union is entirely virtual, the Graphs and their Triples are not actually physically merged together - - - All Assert and Retract operations are directed only to the Default Graph while a Clear() operation will clear all Graphs in the Union - - + Graph the node belongs to + Double value - + - Creates a new Union Graph which is the Union of all the given Graphs with a specific Default Graph + Gets the integer value of the double - Default Graph of the Union - Other Graphs in the Union + - + - Gets the Nodes of the Graph + Gets the decimal value of the double + - + - Asserts some Triples in the Graph + Gets the float value of the double - Triples - - Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph - + - + - Asserts s Triple in the Graph + Gets the double value - Triple - - Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph - + - + - Retracts some Triples from the Graph + A Valued Node representing float values - Triples - - Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph - - + - Retracts a Triple from the Graph + Creates a new Float valued node - Triple - - Assert and Retract operations are directed to the Default Graph of the Union. We have to override the method to do this as although the UnionTripleCollection will direct asserts/retracts to Triple Collection of the default Graph we cannot guarantee that the Graph will be able to carry out any assertion/retraction logic (e.g. persistence) it might have implemented if the Assert/Retract bypasses the Assert/Retract method of the Default Graph - + Graph the node belongs to + Float value + Lexical value - + - Clears all the Graphs in the Union + Creates a new Float valued node + Graph the node belongs to + Float value - + - A static helper class for interning URIs to reduce memory usage + Gets the integer value of the float + - + - Creates a URI interning it if interning is enabled via the Options.InternUris + Gets the decimal value of the float - String URI - - When URI interning is disabled this is equivalent to just invoking the constructor of the Uri class - - + - Clears all interned URIs + Gets the float value + - + - Abstract Base Class for Variable Nodes + Gets the double value of the float + - + - Creates a new Variable Node + Interface for Valued Nodes - Graph - Variable Name + + + This interface extends the basic INode interface with methods related to turning the lexical value into a strongly typed .Net value. It is intended primarily for use within SPARQL expression evaluation where we need to do a lot of value conversions and currently waste a lot of effort (and thus performance) doing that. + + - + - Deserialization Only Constructor + Gets the String value of the Node + + + This is distinct from ToString() because that method will typically include additional information like language specifier/datatype as appropriate whereas this method is used to produce a string as would be produced by applying the STR() function from SPARQL + - + - Deserialization Constructor + Gets the Long value of the Node - Serialization Information - Streaming Context + + Thrown if the Node cannot be converted to a Long - + - Gets the Variable Name + Gets the Decimal value of the Node + + Thrown if the Node cannot be converted to a Decimal - + - Gets whether this Node is equal to some other Node + Gets the Float value of the Node - Node to test + Thrown if the Node cannot be converted to a Float - + - Determines whether this Node is equal to a Blank Node (should always be false) + Gets the Double value of the Node - Blank Node + Thrown if the Node cannot be converted to a Double - + - Determines whether this Node is equal to a Graph Literal Node (should always be false) + Gets the Boolean value of the Node - Graph Literal Node + Thrown if the Node cannot be converted to a Boolean - + - Determines whether this Node is equal to a Literal Node (should always be false) + Gets the Date Time value of the Node - Literal Node + Thrown if the Node cannot be converted to a Date Time - + - Determines whether this Node is equal to a URI Node (should always be false) + Gets the Date Time Offset value of the Node - URI Node + Thrown if the Node cannot be converted to a Date Time Offset - + - Determines whether this Node is equal to a Variable Node + Gets the Time Span value of the Node - Variable Node - + - Determines whether this Node is equal to a Variable Node + Gets the URI of the datatype this valued node represents as a String - Variable Node - + + Either String.Empty if no type or the string form of the type URI + - + - Gets whether this Node is equal to some Object + Gets the Numeric Type of the Node - Object to test - - + - Gets the String representation of this Node + A Valued Node with a Long value - - + - Compares this Node to another Node + Creates a new long valued node - Node to compare with - + Graph the node belongs to + Long value + Lexical Value - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Creates a new long valued node - Node to test against - + Graph the node belongs to + Long value + Lexical Value + Datatype URI - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Creates a new long valued node - Node to test against - + Graph the node belongs to + Long value - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the long value - Node to test against - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the decimal value of the long - Node to test against - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the float value of the long - Node to test against - + - Returns an Integer indicating the Ordering of this Node compared to another Node + Gets the double value of the long - Node to test against - + - Gets the data for serialization + A Valued Node with a unsigned long value - Serialization Information - Streaming Context - + - Reads the data for XML deserialization + Creates a new unsigned long valued node - XML Reader + Graph the node belongs to + Unsigned Long value + Lexical Value - + - Writes the data for XML serialization + Creates a new unsigned long valued node - XML Writer + Graph the node belongs to + Unsigned Long value + Lexical Value + Datatype URI - + - Throws an error as variables cannot be converted to types + Creates a new usigned long valued node - + Graph the node belongs to + Unsigned Long value - + - Throws an error as variables cannot be converted to types + Gets the long value of the ulong - + - Throws an error as variables cannot be converted to types + Gets the decimal value of the ulong - + - Throws an error as variables cannot be converted to types + Gets the float value of the ulong - + - Throws an error as variables cannot be converted to types + Gets the double value of the ulong - + - Throws an error as variables cannot be converted to types + A Valued Node with a numeric value - - + - Throws an error as variables cannot be converted to types + Creates a new numeric valued node - + Graph the node belongs to + Lexical Value + Datatype URI + SPARQL Numeric Type - + - Throws an error as variables cannot be converted to types + Gets the string value of the node - + - Throws an error as variables cannot be cast to a time span + Gets the integer value - + - Gets the URI of the datatype this valued node represents as a String + Gets the decimal value + - + - Gets the numeric type of the expression + Gets the float value + - + - Class representing Variable Nodes (only used for N3) + Gets the double value + - + - Creates a new Variable Node + Gets the boolean value - Graph - Variable Name + - + - Deserialization Only Constructor + Throws an error as numerics cannot be converted to date times + - + - Deserialization Constructor + Throws an error as numerics cannot be converted to date times - Serialization Information - Streaming Context + - + - Compares this Node to another Variable Node + Throws an error as numerics cannot be cast to a time span - Variable Node - + - Determines whether this Node is equal to a Variable Node + Gets the URI of the datatype this valued node represents as a String - Variable Node - - + - Possible Variable Context Types + Gets the numeric type of the node - + - There is currently no variable context + Valued node whose value is a string or can only be converted to a string - + - Existential Variable Context + Creates a new String Node + Graph the Node belongs to + String value + Datatype URI - + - Universal Variable Context + Creates a new String Node + Graph the Node belongs to + String value + Language Specifier - + - Represents the Variable Context for Triples + Creates a new String Node + Graph the Node belongs to + String value - + - Creates a new Variable Context + Gets the string value - Context Type + - + - Gets the Context Type + Throws an error as the string cannot be cast to an integer + - + - Gets the Variables in this Context + Throws an error as the string cannot be cast to a decimal + - + - Adds a Variable to this Context + Throws an error as the string cannot be cast to a float - Variable + - + - Gets whether a given Variable exists in this Context + Throws an error as the string cannot be cast to a double - Variable Node - + - Gets/Sets the Inner Context + Gets the boolean value of the string - - When you set the Inner Context this sets the Inner Context of the most nested inner context, you can remove all nested contexts by setting this to null - + - + - Class for representing Triple Stores which are collections of RDF Graphs + Throws an error as the string cannot be cast to a date time - - The 'Web Demand' Triple Store is a Triple Store which automatically retrieves Graphs from the Web based on the URIs of Graphs that you ask it for - + - + - Creates an Web Demand Triple Store + Throws an error as the string cannot be cast to a date time - A Uri for the Default Graph which should be loaded from the Web as the initial Graph + - + - Creates an Web Demand Triple Store + Throws an error as the string cannot be cast to a time span - A Filename for the Default Graph which should be loaded from a local File as the initial Graph + - + - Creates a new Web Demand Triple Store + Gets the URI of the datatype this valued node represents as a String - + - Delegate Type for Warning Messages raised by RDF Readers + Gets the numeric type of the expression - Warning Message - + - Delegate Type for Warning Messages raised by RDF Writers + Valued Node representing a Time Span value - Warning Message - + - Delegate Type for Warning Events raised by RDF Dataset Writers + Creates a new Time span node - Warning Message + Graph + Time Span - + - Delegate Type for Warning Events raised by RDF Dataset Readers + Creates a new Time span node - Warning Message + Graph + Time Span + Lexical value - + - Delegate Type for Warning Events raised by SPARQL Readers and Writers for Queries, Updates and Results + Creates a new Time span node - Warning Message + Graph + Time Span + Lexical value + Data type URI - + - Delegate Type for Triple events raised by Graphs + Gets the date time value as a string - Originator of the Event - Triple Event Arguments + - + - Delegate Type for Graph events raised by Graphs + Throws an error as date times cannot be converted to integers - Originator of the Event - Graph Event Arguments + - + - Delegate Type for Graph events raised by Graphs where event handlers may set a Cancel flag to cancel the subsequent operation + Throws an error as date times cannot be converted to decimals - Originator of the Event - Graph Event Arguments + - + - Delegate Type for Triple Store events raised by Triple Stores + Throws an error as date times cannot be converted to floats - Originator of the event - Triple Store Event Arguments + - + - Event Arguments for Events regarding the assertion and retraction of Triples + Throws an error as date times cannot be converted to doubles + - + - Creates a new set of Triple Event Arguments for the given Triple + Throws an error as date times cannot be converted to booleans - Triple - Graph the Triple Event occurred in + - + - Creates a new set of Triple Event Arguments for the given Triple + Gets the date time value of the node - Triple - Graph the Triple Event occurred in - Was the Triple Asserted (if not then it was Retracted) + - + - Gets the Triple + Gets the date time value of the node + - + - Gets the Graph the Triple belongs to (may be null) + Gets the time span value of the node + - + - Gets the URI of the Graph the Triple belongs to (may be null) + Gets the URI of the datatype this valued node represents as a String - + - Gets whether the Triple was asserted + Gets the numeric type of the node - + - Gets whether the Triple was retracted + Extension Methods related to valued nodes - + - Event Arguments for Events regarding Graphs + Takes a INode and converts it to a IValuedNode if it is not already an instance that implements the interface + Node + Valued Node - + - Creates a new set of Graph Event Arguments + Tries to get the result of calling AsBoolean() on a node throwing an error if the node is null - Graph + Node + Thrown if the input is null of the specific valued node cannot be cast to a boolean + - + - Creates a new set of Graph Event Arguments + + Namespace for Classes designed to aid the deployment of Linked Data, SPARQL Endpoints and other Semantic Web technologies as part of ASP.Net web applications. + + + The ASP.Net support leverages the Configuration API heavily and so only requires only 1 <appSetting> like so: + + <add key="dotNetRDFConfig" value="~/App_Data/config.ttl" /> + + This setting provides a pointer to an RDF configuration graph that uses the Configuration Vocabulary to express the configuration of HTTP Handlers for your ASP.Net application. We also now provide a command line tool rdfWebDeploy which can be used to automate the testing and deployment of this configuration. See documentation on the Configuration API for more detail. Individual handler documentation gives basic examples of Handler configurations. + - Graph - Triple Event Arguments - + - Gets the Graph + + Namespace for Configuration classes which are used to load and store the configuration settings for HTTP Handlers provided as part of the Web namespace. + - + - Gets the Triple Event Arguments (if any) + + Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Graph Store HTTP Protocol + - + - Event Arguments for Events regarding Graphs which may be cancelled + + Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Query handlers + - + - Creates a new set of Cancellable Graph Event Arguments + + Namespace for Configuration classes which are used to load and store the configuration settings for handlers which serve resources such as Graphs + - Graph - + - Creates a new set of Cancellable Graph Event Arguments + + Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Servers + - Graph - Triple Event Arguments - + - Gets/Sets whether the Event should be cancelled + + Namespace for Configuration classes which are used to load and store the configuration settings for SPARQL Update handlers + - + - Event Arguments for Events regarding Graphs + Utility extension methods for the class - + - Creates a new set of Triple Store Event Arguments + Retrieves the Accept Types to be used to determine the content format to be used in responding to requests - Triple Store + HTTP Context + - + - Creates a new set of Triple Store Event Arguments + Interface which represents the context of some request to a HTTP server - Triple Store - Graph Event Arguments + + Abstraction which allows us to reuse code for request and response processing across different HTTP server environments + - + - Creates a new set of Triple Store Event Arguments + Gets the HTTP Request - Triple Store - Graph - + - Gets the Triple Store + Gets the HTTP Response - + - Gets the Graph Event Arguments (if any) + Gets the User - + - Possible Triple Index types + Interface which represents a HTTP request - - Index types are given Integer values with the lowest being the least useful index and the highest being most useful index. Non-Index based Patterns are given arbitrary high values since these will typically never be used as these items are usually inserted into a Graph Pattern after the ordering step - - - When used to sort Patterns as part of query optimisation the patterns are partially ordered on the usefullness of their index since more useful indexes are considered more likely to return fewer results which will help restrict the query space earlier in the execution process. - + Abstraction which allows us to reuse code for request processing across different HTTP server environments - + - No Index should be used as the Pattern does not use Variables + Gets the MIME Types specified in the Accept header - + - No Index should be used as the Pattern is three Variables + Gets the Content Length - + - Subject Index should be used + Gets the Content Type - + - Predicate Index should be used + Gets the Headers - + - Object Index should be used + Gets the HTTP Method - + - Subject-Predicate Index should be used + Gets the Input Stream - + - Predicate-Object Index should be used + Gets the Querystring parameters - + - Subject-Object Index should be used + Gets the URL - + - A Comparer which sorts based on Triple Index Type + Gets the Users Host Address - + - Compares two Triple Index types to see which is greater + Interface which represents a HTTP response - First Index type - Second Index type - - Implemented by converting to Integers and then using the Integer comparison function + Abstraction which allows us to reuse code for response processing across different HTTP server environments - + - Helper Class for indexing related operations + Adds a Header to the resposne + Name + Value - + - Searches an Index using the given Comparer + Clears the Response - Indexed Object Type - Index - Comparer to use for binary search - Item to search for - - + - Interface for parsers that generate objects of some sort + Writes a String to the response body - Generated Object Type - - - Primarily used as a marker interface in relation to MimeTypesHelper to provide a mechanism whereby parsers for arbitrary objects can be registered and associated with MIME Types and File Extensions - - + Data to write - + - Parses an Object from an Input Stream + Gets/Sets the Content Encoding for the response - Input Stream - - + - Parses an Object from a Text Stream + Gets/Sets the Content Type for the response - Text Stream - - + - Parses an Object from a File + Gets the Headers for the response - Filename - - + - Parses an Object from a String + Gets the output stream - String - - + - Parses an Object from a Parameterized String + Gets/Sets the HTTP Status Code for the response - Parameterized String -
diff --git a/Trinity.Tests/App.config b/Trinity.Tests/App.config index e471e7f..dbc9275 100644 --- a/Trinity.Tests/App.config +++ b/Trinity.Tests/App.config @@ -42,6 +42,7 @@ + diff --git a/Trinity.Tests/Linq/LinqInferencingTest.cs b/Trinity.Tests/Linq/LinqInferencingTest.cs new file mode 100644 index 0000000..39a5b0c --- /dev/null +++ b/Trinity.Tests/Linq/LinqInferencingTest.cs @@ -0,0 +1,85 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using NUnit.Framework; +using System; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text.RegularExpressions; +using System.Collections.Generic; + +namespace Semiodesk.Trinity.Test.Linq +{ + [TestFixture] + public class LinqInferencingTest + { + protected IStore Store; + + protected IModel Model; + + [SetUp] + public void SetUp() + { + // DotNetRdf memory store. + //string connectionString = "provider=dotnetrdf"; + + // OpenLink Virtoso store. + string connectionString = string.Format("{0};rule=urn:semiodesk/test/ruleset", SetupClass.ConnectionString); + + Store = StoreFactory.CreateStore(connectionString); + Store.InitializeFromConfiguration(); + + Model = Store.CreateModel(new Uri("http://test.com/test")); + Model.Clear(); + + Assert.IsTrue(Model.IsEmpty); + + Document doc = Model.CreateResource(); + doc.Title = "Hello World!"; + doc.Commit(); + + Person p1 = Model.CreateResource(); + p1.FirstName = "Peter"; + p1.LastName = "Steel"; + p1.Made.Add(doc); + p1.Commit(); + + + Assert.IsFalse(Model.IsEmpty); + } + + [Test] + public void TestInverse() + { + var maker = from document in Model.AsQueryable(true) where document.Maker.FirstName == "Peter" select document.Maker; + Assert.AreEqual(1, maker.ToList().Count); + } + + + } +} \ No newline at end of file diff --git a/Trinity.Tests/Linq/LinqModelGroupTest.cs b/Trinity.Tests/Linq/LinqModelGroupTest.cs new file mode 100644 index 0000000..3dfa0fe --- /dev/null +++ b/Trinity.Tests/Linq/LinqModelGroupTest.cs @@ -0,0 +1,119 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using NUnit.Framework; +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + [TestFixture] + public class LinqModelGroupTest : LinqTestBase + { + [SetUp] + public override void SetUp() + { + // DotNetRdf memory store. + //string connectionString = "provider=dotnetrdf"; + + // OpenLink Virtoso store. + string connectionString = string.Format("{0};rule=urn:semiodesk/test/ruleset", SetupClass.ConnectionString); + + Store = StoreFactory.CreateStore(connectionString); + + IModel model1 = Store.CreateModel(new Uri("http://test.com/test1")); + model1.Clear(); + + IModel model2 = Store.CreateModel(new Uri("http://test.com/test2")); + model2.Clear(); + + Assert.IsTrue(model1.IsEmpty); + Assert.IsTrue(model2.IsEmpty); + + // Add an agent so we can check if types are correctly queried. + Agent a1 = model1.CreateResource(ex.John); + a1.FirstName = "John"; + a1.LastName = "Doe"; + a1.Commit(); + + Group g1 = model1.CreateResource(ex.TheSpiders); + g1.Name = "The Spiders"; + g1.Commit(); + + Group g2 = model2.CreateResource(ex.AlicaKeys); + g2.Name = "Alicia Keys"; + g2.Commit(); + + Person p1 = model1.CreateResource(ex.Alice); + p1.FirstName = "Alice"; + p1.LastName = "Cooper"; + p1.Age = 69; + p1.Birthday = new DateTime(1948, 2, 4); + p1.Group = g1; + p1.Status = true; + p1.AccountBalance = 10000000.1f; + p1.Commit(); + + Person p2 = model1.CreateResource(ex.Bob); + p2.FirstName = "Bob"; + p2.LastName = "Dylan"; + p2.Age = 76; + //p2.Status = false; + p2.Birthday = new DateTime(1941, 5, 24); + p2.AccountBalance = 1000000.1f; + p2.Commit(); + + Person p3 = model2.CreateResource(ex.Eve); + p3.FirstName = "Eve"; + p3.LastName = "Jeffers-Cooper"; + p3.Birthday = new DateTime(1978, 11, 10); + p3.Age = 38; + p3.Group = g2; + p3.AccountBalance = 100000.0f; + p3.Commit(); + + p1.KnownPeople.Add(p2); + p1.Commit(); + + p2.KnownPeople.Add(p1); + p2.KnownPeople.Add(p2); + p2.Commit(); + + p3.Interests.Add(g2); + p3.Interests.Add(p3); + p3.Commit(); + + Image i1 = model1.CreateResource(); + i1.DepictedAgent = p1; + i1.Commit(); + + Assert.IsFalse(model1.IsEmpty); + Assert.IsFalse(model2.IsEmpty); + + Model = Store.CreateModelGroup(model1, model2); + } + } +} \ No newline at end of file diff --git a/Trinity.Tests/Linq/LinqModelTest.cs b/Trinity.Tests/Linq/LinqModelTest.cs new file mode 100644 index 0000000..e4f831d --- /dev/null +++ b/Trinity.Tests/Linq/LinqModelTest.cs @@ -0,0 +1,117 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using NUnit.Framework; +using System; +using System.IO; + +namespace Semiodesk.Trinity.Test.Linq +{ + [TestFixture] + public class LinqModelTest : LinqTestBase + { + public LinqModelTest() {} + + [SetUp] + public override void SetUp() + { + // DotNetRdf memory store. + //string connectionString = "provider=dotnetrdf"; + + // OpenLink Virtoso store. + //string connectionString = string.Format("{0};rule=urn:semiodesk/test/ruleset", SetupClass.ConnectionString); + + string connectionString = "provider=stardog;host=http://localhost:5820;uid=admin;pw=admin;sid=test"; + + Store = StoreFactory.CreateStore(connectionString); + + Model = Store.CreateModel(ex.Namespace); + Model.Clear(); + + Assert.IsTrue(Model.IsEmpty); + + // Add an agent so we can check if types are correctly queried. + Agent a1 = Model.CreateResource(ex.John); + a1.FirstName = "John"; + a1.LastName = "Doe"; + a1.Commit(); + + Group g1 = Model.CreateResource(ex.TheSpiders); + g1.Name = "The Spiders"; + g1.Commit(); + + Group g2 = Model.CreateResource(ex.AlicaKeys); + g2.Name = "Alicia Keys"; + g2.Commit(); + + Person p1 = Model.CreateResource(ex.Alice); + p1.FirstName = "Alice"; + p1.LastName = "Cooper"; + p1.Age = 69; + p1.Birthday = new DateTime(1948, 2, 4); + p1.Group = g1; + p1.Status = true; + p1.AccountBalance = 10000000.1f; + p1.Commit(); + + Person p2 = Model.CreateResource(ex.Bob); + p2.FirstName = "Bob"; + p2.LastName = "Dylan"; + p2.Age = 76; + //p2.Status = false; + p2.Birthday = new DateTime(1941, 5, 24); + p2.AccountBalance = 1000000.1f; + p2.Commit(); + + Person p3 = Model.CreateResource(ex.Eve); + p3.FirstName = "Eve"; + p3.LastName = "Jeffers-Cooper"; + p3.Birthday = new DateTime(1978, 11, 10); + p3.Age = 38; + p3.Group = g2; + p3.AccountBalance = 100000.0f; + p3.Commit(); + + p1.KnownPeople.Add(p2); + p1.Commit(); + + p2.KnownPeople.Add(p1); + p2.KnownPeople.Add(p2); + p2.Commit(); + + p3.Interests.Add(g2); + p3.Interests.Add(p3); + p3.Commit(); + + Image i1 = Model.CreateResource(); + i1.DepictedAgent = p1; + i1.Commit(); + + Assert.IsFalse(Model.IsEmpty); + } + } +} \ No newline at end of file diff --git a/Trinity.Tests/Linq/LinqTestBase.cs b/Trinity.Tests/Linq/LinqTestBase.cs new file mode 100644 index 0000000..c5c7207 --- /dev/null +++ b/Trinity.Tests/Linq/LinqTestBase.cs @@ -0,0 +1,1240 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using NUnit.Framework; +using System; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text.RegularExpressions; + +namespace Semiodesk.Trinity.Test.Linq +{ + [TestFixture] + public abstract class LinqTestBase + { + protected IStore Store; + + protected IModel Model; + + public LinqTestBase() { } + + [SetUp] + public abstract void SetUp(); + + [Test] + public void CanAskResourceWithBinaryExpressionOnBoolean() + { + var persons = from person in Model.AsQueryable() where person.Status.Equals(true) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Status select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Status == true select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Status.Equals(false) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where !person.Status select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Status == false select person; + + Assert.IsTrue(persons.Any()); + } + + [Test] + public void CanAskResourceWithBinaryExpressionOnDateTime() + { + var persons = from person in Model.AsQueryable() where person.Birthday.Equals(new DateTime(1948, 2, 4)) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday == new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday == new DateTime(1950, 1, 1) select person; + + Assert.IsFalse(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday != new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday < new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday <= new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday >= new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Birthday > new DateTime(1948, 2, 4) select person; + + Assert.IsTrue(persons.Any()); + } + + [Test] + public void CanAskResourceWithBinaryExpressionOnFloat() + { + var persons = from person in Model.AsQueryable() where person.AccountBalance.Equals(100000) select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance == 100000 select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance != 100000 select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance < 100000 select person; + + Assert.IsFalse(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance <= 100000 select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance >= 100000 select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.AccountBalance > 100000 select person; + + Assert.IsTrue(persons.Any()); + } + + [Test] + public void CanAskResourceWithBinaryExpressionOnString() + { + var persons = from person in Model.AsQueryable() where person.FirstName == "Alice" select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.LastName == "Alice" select person; + + Assert.IsFalse(persons.Any()); + + persons = from person in Model.AsQueryable() where person.FirstName != "Alice" select person; + + Assert.IsTrue(persons.Any()); + } + + [Test] + public void CanAskResourceWithBinaryExpressionOnResource() + { + var persons = from person in Model.AsQueryable() where person.Group.Name.Equals("The Spiders") select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Group.Name == "The Spiders" select person; + + Assert.IsTrue(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Group.Name == "The Bugs" select person; + + Assert.IsFalse(persons.Any()); + + persons = from person in Model.AsQueryable() where person.Group.Name != "The Spiders" select person; + + Assert.IsTrue(persons.Any()); + } + + [Test] + public void CanSelectBooleanWithBinaryExpression() + { + var states = from person in Model.AsQueryable() where person.Status.Equals(true) select person.Status; + + Assert.AreEqual(1, states.ToList().Count); + + states = from person in Model.AsQueryable() where person.Status select person.Status; + + Assert.AreEqual(1, states.ToList().Count); + + states = from person in Model.AsQueryable() where person.Status == true select person.Status; + + Assert.AreEqual(1, states.ToList().Count); + + states = from person in Model.AsQueryable() where person.Status.Equals(false) select person.Status; + + Assert.AreEqual(2, states.ToList().Count); + + states = from person in Model.AsQueryable() where !person.Status select person.Status; + + Assert.AreEqual(2, states.ToList().Count); + + states = from person in Model.AsQueryable() where person.Status == false select person.Status; + + Assert.AreEqual(2, states.ToList().Count); + + states = from person in Model.AsQueryable() select person.Status; + + Assert.AreEqual(3, states.ToList().Count); + } + + [Test] + public void CanSelectIntegerWithBinaryExpression() + { + var ages = from person in Model.AsQueryable() where person.Status.Equals(true) select person.Age; + + Assert.AreEqual(1, ages.ToList().Count); + + ages = from person in Model.AsQueryable() where person.Status select person.Age; + + Assert.AreEqual(1, ages.ToList().Count); + + ages = from person in Model.AsQueryable() where person.Status == true select person.Age; + + Assert.AreEqual(1, ages.ToList().Count); + + ages = from person in Model.AsQueryable() where person.Status.Equals(false) select person.Age; + + Assert.AreEqual(2, ages.ToList().Count); + + ages = from person in Model.AsQueryable() where !person.Status select person.Age; + + Assert.AreEqual(2, ages.ToList().Count); + + ages = from person in Model.AsQueryable() where person.Status == false select person.Age; + + Assert.AreEqual(2, ages.ToList().Count); + } + + [Test] + public void CanSelectIntegerWithOrderBy() + { + var n = from person in Model.AsQueryable() orderby person.KnownPeople.Count select person.KnownPeople.Count; + + Assert.AreEqual(new[] { 0, 1, 2 }, n.ToArray()); + + n = from person in Model.AsQueryable() orderby person.KnownPeople.Count descending select person.KnownPeople.Count; + + Assert.AreEqual(new[] { 2, 1, 0 }, n.ToArray()); + + n = (from person in Model.AsQueryable() select person.KnownPeople.Count).OrderBy(i => i); + + Assert.AreEqual(new[] { 0, 1, 2 }, n.ToArray()); + + n = (from person in Model.AsQueryable() select person.KnownPeople.Count).OrderByDescending(i => i); + + Assert.AreEqual(new[] { 2, 1, 0 }, n.ToArray()); + } + + [Test] + public void CanSelectIntegerWithResultOperatorSkip() + { + var ages = (from person in Model.AsQueryable() select person.Age).Skip(0); + + Assert.AreEqual(3, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Skip(1); + + Assert.AreEqual(2, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Skip(2); + + Assert.AreEqual(1, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Skip(3); + + Assert.AreEqual(0, ages.ToArray().Length); + } + + [Test] + public void CanSelectIntegerWithResultOperatorTake() + { + var ages = (from person in Model.AsQueryable() select person.Age).Take(0); + + Assert.AreEqual(0, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Take(1); + + Assert.AreEqual(1, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Take(2); + + Assert.AreEqual(2, ages.ToArray().Length); + + ages = (from person in Model.AsQueryable() select person.Age).Take(3); + + Assert.AreEqual(3, ages.ToArray().Length); + } + + [Test] + public void CanSelectDateTimeWithBinaryExpression() + { + var birthdays = from person in Model.AsQueryable() where person.Status.Equals(true) select person.Birthday; + + Assert.AreEqual(1, birthdays.ToList().Count); + + birthdays = from person in Model.AsQueryable() where person.Status select person.Birthday; + + Assert.AreEqual(1, birthdays.ToList().Count); + + birthdays = from person in Model.AsQueryable() where person.Status == true select person.Birthday; + + Assert.AreEqual(1, birthdays.ToList().Count); + + birthdays = from person in Model.AsQueryable() where person.Status.Equals(false) select person.Birthday; + + Assert.AreEqual(2, birthdays.ToList().Count); + + birthdays = from person in Model.AsQueryable() where !person.Status select person.Birthday; + + Assert.AreEqual(2, birthdays.ToList().Count); + + birthdays = from person in Model.AsQueryable() where person.Status == false select person.Birthday; + + Assert.AreEqual(2, birthdays.ToList().Count); + } + + [Test] + public void CanSelectStringWithBinaryExpression() + { + var names = from person in Model.AsQueryable() where person.Status.Equals(true) select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.Status select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.Status == true select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.Status.Equals(false) select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where !person.Status select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.Status == false select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithBinaryExpressionOnStringLength() + { + var names = from person in Model.AsQueryable() where person.FirstName.Length == 5 select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.Length != 5 select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.Length < 5 select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.Length <= 5 select person.FirstName; + + Assert.AreEqual(3, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.Length > 5 select person.FirstName; + + Assert.AreEqual(0, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.Length >= 5 select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithAsQueryable() + { + var names = Model.AsQueryable().Select(p => p.FirstName); + + Assert.AreEqual(3, names.ToList().Count); + + names = from p in Model.AsQueryable() select p.FirstName; + + Assert.AreEqual(3, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithMethodContains() + { + var names = from person in Model.AsQueryable() where person.FirstName.Contains("e") select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithMethodCount() + { + var count = Model.AsQueryable().Count(p => p.FirstName == "Bob"); + + Assert.AreEqual(1, count); + + count = Model.AsQueryable().Count(p => p.FirstName != "Bob"); + + Assert.AreEqual(2, count); + + count = Model.AsQueryable().Count(p => p.FirstName != "Bob"); + + Assert.AreEqual(1, count); + + count = Model.AsQueryable().Count(p => p.KnownPeople.Any(q => q.FirstName.Equals("Alice") && q.LastName.StartsWith("C"))); + + Assert.AreEqual(1, count); + + count = Model.AsQueryable().Count(p => p.KnownPeople.Any(q => q.FirstName.Equals("Alice") && q.LastName.StartsWith("X"))); + + Assert.AreEqual(0, count); + + count = Model.AsQueryable().Count(p => p.KnownPeople.Any(q => q.FirstName.Equals("Alice") || q.LastName.StartsWith("d", StringComparison.InvariantCultureIgnoreCase))); + + Assert.AreEqual(2, count); + + Assert.Throws(() => + { + count = Model.AsQueryable().Count(p => !p.Interests.Any()); + + //Assert.AreEqual(2, count); + }); + } + + [Test] + public void CanSelectStringWithMethodStartsWith() + { + var names = from person in Model.AsQueryable() where person.FirstName.StartsWith("A") select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.StartsWith("a", true, CultureInfo.CurrentCulture) select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.StartsWith("a", StringComparison.CurrentCultureIgnoreCase) select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.StartsWith("a", StringComparison.InvariantCultureIgnoreCase) select person.FirstName; + + Assert.AreEqual(1, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithMethodEndsWith() + { + var names = from person in Model.AsQueryable() where person.FirstName.EndsWith("e") select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.EndsWith("E", true, CultureInfo.CurrentCulture) select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.EndsWith("E", StringComparison.CurrentCultureIgnoreCase) select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where person.FirstName.EndsWith("E", StringComparison.InvariantCultureIgnoreCase) select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + } + + [Test] + public void CanSelectStringWithRegexIsMatch() + { + var names = from person in Model.AsQueryable() where Regex.IsMatch(person.FirstName, "e") select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + + names = from person in Model.AsQueryable() where Regex.IsMatch(person.FirstName, "E", RegexOptions.IgnoreCase) select person.FirstName; + + Assert.AreEqual(2, names.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithAsQueryable() + { + var persons = Model.AsQueryable(); + + Assert.AreEqual(3, persons.ToList().Count); + } + + [Test] + public void CanSelectResourceWithBinaryExpression() + { + var groups = from person in Model.AsQueryable() where person.Status.Equals(true) select person.Group; + + Assert.AreEqual(1, groups.ToList().Count); + + groups = from person in Model.AsQueryable() where person.Status select person.Group; + + Assert.AreEqual(1, groups.ToList().Count); + + groups = from person in Model.AsQueryable() where person.Status == true select person.Group; + + Assert.AreEqual(1, groups.ToList().Count); + + groups = from person in Model.AsQueryable() where person.Status.Equals(false) select person.Group; + + Assert.AreEqual(2, groups.ToList().Count); + + groups = from person in Model.AsQueryable() where !person.Status select person.Group; + + Assert.AreEqual(2, groups.ToList().Count); + + groups = from person in Model.AsQueryable() where person.Status == false select person.Group; + + Assert.AreEqual(2, groups.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnBoolean() + { + var persons = from person in Model.AsQueryable() where person.Status.Equals(true) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status == true select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status != true select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status.Equals(false) select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status == false select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Status != false select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where !person.Status select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnDateTime() + { + var persons = from person in Model.AsQueryable() where person.Birthday.Equals(new DateTime(1948, 2, 4)) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday == new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday != new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday < new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday <= new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday >= new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Birthday > new DateTime(1948, 2, 4) select person; + + Assert.AreEqual(1, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnInteger() + { + var persons = from person in Model.AsQueryable() where person.Age.Equals(69) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age == 69 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age != 69 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age < 50 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age <= 69 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age >= 69 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age > 50 select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnFloat() + { + var persons = from person in Model.AsQueryable() where person.AccountBalance.Equals(100000) select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance == 100000 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance != 100000 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance < 100000 select person; + + Assert.AreEqual(0, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance <= 100000 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance >= 100000 select person; + + Assert.AreEqual(3, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.AccountBalance > 100000 select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnString() + { + var persons = from person in Model.AsQueryable() where person.FirstName == "Alice" select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName != "Alice" select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnResource() + { + Person p = new Person(ex.Alice); + + var persons = from person in Model.AsQueryable() where person == p select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person != p select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnResourceMember() + { + var persons = from person in Model.AsQueryable() where person.Group.Name.Equals("The Spiders") select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Group.Name == "The Spiders" select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Group.Name != "The Spiders" select person; + + Assert.AreEqual(1, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnUri() + { + Person p = new Person(ex.Alice); + + var persons = from person in Model.AsQueryable() where person.Uri == p.Uri select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Uri != p.Uri select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Group.Uri == ex.TheSpiders select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Group.Uri != ex.TheSpiders select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithBinaryExpressionOnNull() + { + var persons = from person in Model.AsQueryable() where person.Group == null select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Group != null select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName == "Bob" && person.Group == null select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName != "Bob" && person.Group == null select person; + + Assert.AreEqual(0, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithNodeTypeAndAlso() + { + var persons = from person in Model.AsQueryable() where person.FirstName == "Alice" && person.LastName == "Cooper" select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName == "Alice" && person.LastName == "Dylan" select person; + + Assert.AreEqual(0, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName == "Alice" && person.LastName == "Cooper" && person.KnownPeople.Count == 1 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Any(q => q.FirstName.Equals("Alice") && q.LastName.StartsWith("C")) select person; + + Assert.AreEqual(1, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithNodeTypeOrElse() + { + var persons = from person in Model.AsQueryable() where person.FirstName == "Alice" || person.FirstName == "Bob" select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName == "Alice" || person.FirstName == "Bob" || person.FirstName == "Eve" select person; + + Assert.AreEqual(3, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.FirstName == "Alice" || person.FirstName == "Bob" || person.KnownPeople.Count == 0 select person; + + Assert.AreEqual(3, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithEqualsOnString() + { + var persons = from person in Model.AsQueryable() where person.FirstName.Equals("Alice") select person; + + Assert.AreEqual(1, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithResultOperatorCount() + { + var persons = from person in Model.AsQueryable() where person.KnownPeople.Count != 1 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Count > 0 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Count >= 1 select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Count < 1 select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Count <= 1 select person; + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithResultOperatorFirst() + { + Assert.Throws(() => + { + Model.AsQueryable().OrderBy(p => p.FirstName).First(p => p.Age > 40 && p.Age < 40); + }); + } + + [Test] + public void CanSelectResourcesWithResultOperatorFirstOrDefault() + { + var persons = Model.AsQueryable().OrderBy(p => p.FirstName).ToList(); + var person = Model.AsQueryable().OrderBy(p => p.FirstName).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.Age).ToList(); + person = Model.AsQueryable().OrderBy(p => p.Age).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.Birthday).ToList(); + person = Model.AsQueryable().OrderBy(p => p.Birthday).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).FirstOrDefault(p => p.Age > 40); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.Age > 40); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.Age > 40); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40 && p.Age < 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).FirstOrDefault(p => p.Age > 40 && p.Age < 40); + + Assert.IsEmpty(persons); + Assert.IsNull(person); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40 && p.Age < 40).FirstOrDefault(); + + Assert.IsEmpty(persons); + Assert.IsNull(person); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age < 10 || p.Age > 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).FirstOrDefault(p => p.Age < 10 || p.Age > 40); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.Age < 10 || person.Age > 40); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age < 10 || p.Age > 40).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.Age < 10 || person.Age > 40); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).FirstOrDefault(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")).FirstOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.FirstOrDefault()); + Assert.IsTrue(person.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + // Using FirstOrDefault in subqueries is not yet supported. + persons = Model.AsQueryable().Where(p => p.KnownPeople.OrderBy(q => q.FirstName).FirstOrDefault(q => q.KnownPeople.Count == 1) != null).ToList(); + + Assert.IsNotEmpty(persons); + + foreach (Person p in persons) + { + Assert.AreEqual(1, p.KnownPeople.OrderBy(q => q.FirstName).FirstOrDefault().KnownPeople.Count); + } + + persons = Model.AsQueryable().Where(p => p.KnownPeople.OrderBy(q => q.FirstName).FirstOrDefault().KnownPeople.Count == 1).ToList(); + + Assert.IsNotEmpty(persons); + + foreach (Person p in persons) + { + Assert.AreEqual(1, p.KnownPeople.OrderBy(q => q.FirstName).FirstOrDefault().KnownPeople.Count); + } + } + + [Test] + public void CanSelectResourcesWithResultOperatorLast() + { + Assert.Throws(() => + { + Model.AsQueryable().OrderBy(p => p.FirstName).Last(p => p.Age > 40 && p.Age < 40); + }); + } + + [Test] + public void CanSelectResourcesWithResultOperatorLastOrDefault() + { + var persons = Model.AsQueryable().OrderBy(p => p.FirstName).ToList(); + var person = Model.AsQueryable().OrderBy(p => p.FirstName).LastOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.Age).ToList(); + person = Model.AsQueryable().OrderBy(p => p.Age).LastOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.Birthday).ToList(); + person = Model.AsQueryable().OrderBy(p => p.Birthday).LastOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).LastOrDefault(p => p.Age > 40); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + Assert.IsTrue(person.Age > 40); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40 && p.Age < 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).LastOrDefault(p => p.Age > 40 && p.Age < 40); + + Assert.IsEmpty(persons); + Assert.IsNull(person); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age > 40 && p.Age < 40).LastOrDefault(); + + Assert.IsEmpty(persons); + Assert.IsNull(person); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age < 10 || p.Age > 40).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).LastOrDefault(p => p.Age < 10 || p.Age > 40); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + Assert.IsTrue(person.Age < 10 || person.Age > 40); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.Age < 10 || p.Age > 40).LastOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + Assert.IsTrue(person.Age < 10 || person.Age > 40); + + persons = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")).ToList(); + person = Model.AsQueryable().OrderBy(p => p.FirstName).LastOrDefault(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + Assert.IsTrue(person.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + person = Model.AsQueryable().OrderBy(p => p.FirstName).Where(p => p.KnownPeople.Any(p0 => p0.FirstName == "Alice")).LastOrDefault(); + + Assert.IsNotNull(person); + Assert.AreEqual(person, persons.LastOrDefault()); + Assert.IsTrue(person.KnownPeople.Any(p0 => p0.FirstName == "Alice")); + + // Using LastOrDefault in subqueries is not yet supported. + persons = Model.AsQueryable().Where(p => p.KnownPeople.OrderBy(q => q.FirstName).LastOrDefault(q => q.KnownPeople.Count == 1) != null).ToList(); + + Assert.IsNotEmpty(persons); + + foreach (Person p in persons) + { + Assert.AreEqual(1, p.KnownPeople.OrderBy(q => q.FirstName).LastOrDefault().KnownPeople.Count); + } + + /* + Assert.Throws(() => + { + Model.AsQueryable().Where(p => p.KnownPeople.LastOrDefault(q => q.KnownPeople.Count == 1) != null).ToList(); + }); + */ + } + + [Test] + public void CanSelectResourcesWithResultOperatorSkip() + { + var persons = (from person in Model.AsQueryable() select person).Skip(0); + + Assert.AreEqual(3, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Skip(1); + + Assert.AreEqual(2, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Skip(2); + + Assert.AreEqual(1, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Skip(3); + + Assert.AreEqual(0, persons.ToArray().Length); + } + + [Test] + public void CanSelectResourcesWithResultOperatorTake() + { + var persons = (from person in Model.AsQueryable() select person).Take(0); + + Assert.AreEqual(0, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Take(1); + + Assert.AreEqual(1, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Take(2); + + Assert.AreEqual(2, persons.ToArray().Length); + + persons = (from person in Model.AsQueryable() select person).Take(3); + + Assert.AreEqual(3, persons.ToArray().Length); + } + + [Test] + public void CanSelectResourcesWithOperatorTypeOf() + { + var resources = from resource in Model.AsQueryable() select resource; + + Assert.AreEqual(7, resources.ToList().Count); + + resources = from resource in Model.AsQueryable() where resource is Person select resource; + + Assert.AreEqual(3, resources.ToList().Count); + + resources = from resource in Model.AsQueryable() where resource.Interests.OfType().Count() > 0 select resource; + + Assert.AreEqual(1, resources.ToList().Count); + + resources = from resource in Model.AsQueryable() where resource.Interests.OfType().Count() > 0 select resource; + + Assert.AreEqual(1, resources.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithSubQuery() + { + var persons = from person in Model.AsQueryable() where person.KnownPeople.Any(p => p.FirstName == "Alice") select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Any(p => p.FirstName == "Alice") && person.KnownPeople.Any(p => p.FirstName == "Bob") && person.KnownPeople.Any(p => p.FirstName == "Eve") select person; + + var x = persons.ToList(); + + Assert.AreEqual(0, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Any(p => p.FirstName == "Alice") || person.KnownPeople.Any(p => p.FirstName == "Eve") select person; + + var y = persons.ToList(); + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.KnownPeople.Any(p => p.FirstName == "Alice") || person.KnownPeople.Any(p => p.FirstName == "Bob") select person; + + var z = persons.ToList(); + + Assert.AreEqual(2, persons.ToList().Count); + } + + [Test] + public void CanSelectResourcesWithOrderBy() + { + var persons = from person in Model.AsQueryable() orderby person.KnownPeople.Count select person; + + var P = persons.ToArray(); + + Assert.AreEqual(3, P.Length); + Assert.AreEqual(0, P[0].KnownPeople.Count); + Assert.AreEqual(1, P[1].KnownPeople.Count); + Assert.AreEqual(2, P[2].KnownPeople.Count); + + persons = from person in Model.AsQueryable() orderby person.KnownPeople.Count descending select person; + + P = persons.ToArray(); + + Assert.AreEqual(3, P.Length); + Assert.AreEqual(2, P[0].KnownPeople.Count); + Assert.AreEqual(1, P[1].KnownPeople.Count); + Assert.AreEqual(0, P[2].KnownPeople.Count); + + persons = (from person in Model.AsQueryable() select person).OrderBy(p => p.KnownPeople.Count); + + P = persons.ToArray(); + + Assert.AreEqual(3, P.Length); + Assert.AreEqual(0, P[0].KnownPeople.Count); + Assert.AreEqual(1, P[1].KnownPeople.Count); + Assert.AreEqual(2, P[2].KnownPeople.Count); + + persons = (from person in Model.AsQueryable() select person).OrderByDescending(p => p.KnownPeople.Count); + + P = persons.ToArray(); + + Assert.AreEqual(3, P.Length); + Assert.AreEqual(2, P[0].KnownPeople.Count); + Assert.AreEqual(1, P[1].KnownPeople.Count); + Assert.AreEqual(0, P[2].KnownPeople.Count); + } + + [Test] + public void CanSelectResourcesWithVariableExpression() + { + foreach (int age in new[] { 40, 50, 60 }) + { + CanSelectResourcesWithVariableExpression(age); + } + } + + [Test] + public void CanSelectResourcesWhichImplementInterface() + { + var images = (from image in Model.AsQueryable() where image.DepictedAgent.Uri == ex.Alice select image).ToList(); + + Assert.AreEqual(1, images.Count); + + // Tests if retrieving resources is possible through extension methods + // that have generic parameters with iterfaces. + Agent agent = Model.GetResource(ex.Alice); + + images = agent.GetImages(Model).Where(i => i.DepictedAgent == agent).ToList(); + + Assert.AreEqual(1, images.Count); + } + + [Test] + public void CanExecuteCollectionWithInferencingEnabled() + { + // See if inferencing works on resource queries. + var agents = Model.AsQueryable().ToList(); + + Assert.AreEqual(1, agents.Count); + + agents = Model.AsQueryable(true).ToList(); + + Assert.AreEqual(6, agents.Count); + + // See if inferencing works with queries that return bindings. + var names = Model.AsQueryable().Select(a => a.FirstName).ToList(); + + Assert.AreEqual(1, names.Count); + + names = Model.AsQueryable(true).Select(a => a.FirstName).ToList(); + + Assert.AreEqual(4, names.Count); + } + + [Test] + public void CanExecuteScalarWithInferencingEnabled() + { + // See if inferencing works for boolean (ASK) queries. + bool hasAgent = Model.AsQueryable().Where(a => a.FirstName == "Alice").Any(); + + Assert.IsFalse(hasAgent); + + hasAgent = Model.AsQueryable(true).Where(a => a.FirstName == "Alice").Any(); + + Assert.IsTrue(hasAgent); + + // See if inferencing works for queries that return numeric bindings. + int agentCount = Model.AsQueryable().Where(a => a.FirstName == "Alice").Count(); + + Assert.AreEqual(0, agentCount); + + agentCount = Model.AsQueryable(true).Where(a => a.FirstName == "Alice").Count(); + + Assert.AreEqual(1, agentCount); + } + + private void CanSelectResourcesWithVariableExpression(int minAge) + { + var persons = from person in Model.AsQueryable() where person.Age > minAge select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age >= minAge select person; + + Assert.AreEqual(2, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age < minAge select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age <= minAge select person; + + Assert.AreEqual(1, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age != minAge select person; + + Assert.AreEqual(3, persons.ToList().Count); + + persons = from person in Model.AsQueryable() where person.Age == minAge select person; + + Assert.AreEqual(0, persons.ToList().Count); + } + + private void DumpModel() + { + Debug.WriteLine(""); + + ISparqlQuery q = new SparqlQuery(@"SELECT * WHERE { ?s ?p ?o . }"); + + foreach (BindingSet b in Model.GetBindings(q)) + { + Debug.WriteLine(b["s"] + " " + b["p"] + " " + b["o"]); + } + + Debug.WriteLine(""); + } + } +} \ No newline at end of file diff --git a/Trinity.Tests/Linq/ObjectModel/Agent.cs b/Trinity.Tests/Linq/ObjectModel/Agent.cs new file mode 100644 index 0000000..4e25738 --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/Agent.cs @@ -0,0 +1,72 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; +using System.Linq; + +namespace Semiodesk.Trinity.Test.Linq +{ + [RdfClass(FOAF.Agent)] + public class Agent : Resource + { + #region Members + + [RdfProperty(FOAF.firstName)] + public string FirstName { get; set; } + + [RdfProperty(FOAF.lastName)] + public string LastName { get; set; } + + #endregion + + #region Constructors + + public Agent(Uri uri) : base(uri) { } + + #endregion + } + + public static class AgentExtensions + { + /// + /// A generic extension method used for testing purposes. + /// + /// + /// Used for a test where Remotion LINQ delivers the interface type on a MemberInfo rather + /// than the instance type. Since interfaces have no RdfPropertyAttributes, the SPARQL query + /// generator failed in these cases. + /// + /// A resource which implements an interface. + /// An agent + /// The model to be queried. + /// A queryable object. + public static IQueryable GetImages(this Agent agent, IModel model) where T : Resource, IImage + { + return model.AsQueryable().Where(i => i.DepictedAgent == agent); + } + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/Group.cs b/Trinity.Tests/Linq/ObjectModel/Group.cs new file mode 100644 index 0000000..2edd250 --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/Group.cs @@ -0,0 +1,48 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + [RdfClass(FOAF.Group)] + internal class Group : Resource + { + #region Members + + [RdfProperty(FOAF.name)] + public string Name { get; set; } + + #endregion + + #region Constructors + + public Group(Uri uri) : base(uri) { } + + #endregion + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/IImage.cs b/Trinity.Tests/Linq/ObjectModel/IImage.cs new file mode 100644 index 0000000..10d561e --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/IImage.cs @@ -0,0 +1,34 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +namespace Semiodesk.Trinity.Test.Linq +{ + public interface IImage : IResource + { + Agent DepictedAgent { get; set; } + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/Image.cs b/Trinity.Tests/Linq/ObjectModel/Image.cs new file mode 100644 index 0000000..b12efcd --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/Image.cs @@ -0,0 +1,48 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + [RdfClass(FOAF.Image)] + public class Image : Resource, IImage + { + #region Members + + [RdfProperty(FOAF.depicts)] + public Agent DepictedAgent { get; set; } + + #endregion + + #region Constructors + + public Image(Uri uri) : base(uri) { } + + #endregion + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/Page.cs b/Trinity.Tests/Linq/ObjectModel/Page.cs new file mode 100644 index 0000000..ec43eb4 --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/Page.cs @@ -0,0 +1,52 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + [RdfClass(FOAF.Document)] + internal class Document : Resource + { + #region Members + + [RdfProperty(FOAF.maker)] + public Person Maker { get; set; } + + + [RdfProperty("http://www.w3.org/2000/01/rdf-schema#label")] + public string Title { get; set; } + + #endregion + + #region Constructors + + public Document(Uri uri) : base(uri) { } + + #endregion + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/Person.cs b/Trinity.Tests/Linq/ObjectModel/Person.cs new file mode 100644 index 0000000..d2416cc --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/Person.cs @@ -0,0 +1,70 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; +using System.Collections.Generic; + +namespace Semiodesk.Trinity.Test.Linq +{ + [RdfClass(FOAF.Person)] + internal class Person : Agent + { + #region Members + + [RdfProperty(FOAF.age)] + public int Age { get; set; } + + [RdfProperty(FOAF.birthday)] + public DateTime Birthday { get; set; } + + [RdfProperty(FOAF.knows)] + public List KnownPeople { get; set; } + + [RdfProperty(FOAF.member)] + public Group Group { get; set; } + + [RdfProperty(FOAF.status)] + public bool Status { get; set; } + + [RdfProperty(FOAF.account)] + public float AccountBalance { get; set; } + + [RdfProperty(FOAF.interest)] + public List Interests { get; set; } + + [RdfProperty(FOAF.made)] + public List Made { get; set; } + + #endregion + + #region Constructors + + public Person(Uri uri) : base(uri) {} + + #endregion + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/ex.cs b/Trinity.Tests/Linq/ObjectModel/ex.cs new file mode 100644 index 0000000..3d30cb2 --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/ex.cs @@ -0,0 +1,55 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2018 + +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + /// + ///Example vocabulary. + /// + public class ex : Ontology + { + public static readonly Uri Namespace = new Uri("http://example.org/test"); + public static Uri GetNamespace() { return Namespace; } + + public static readonly string Prefix = "ex"; + public static string GetPrefix() { return Prefix; } + + public static readonly Uri Alice = new Uri("http://example.org/test/Alice"); + + public static readonly Uri Bob = new Uri("http://example.org/test/Bob"); + + public static readonly Uri Eve = new Uri("http://example.org/test/Eve"); + + public static readonly Uri John = new Uri("http://example.org/test/John"); + + public static readonly Uri TheSpiders = new Uri("http://example.org/test/TheSpiders"); + + public static readonly Uri AlicaKeys = new Uri("http://example.org/test/AlicaKeys"); + } +} diff --git a/Trinity.Tests/Linq/ObjectModel/foaf.cs b/Trinity.Tests/Linq/ObjectModel/foaf.cs new file mode 100644 index 0000000..fe0267c --- /dev/null +++ b/Trinity.Tests/Linq/ObjectModel/foaf.cs @@ -0,0 +1,1087 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; + +namespace Semiodesk.Trinity.Test.Linq +{ + /// + ///Friend of a Friend (FOAF) vocabulary + /// + public class foaf : Ontology + { + public static readonly Uri Namespace = new Uri("http://xmlns.com/foaf/0.1/"); + public static Uri GetNamespace() { return Namespace; } + + public static readonly string Prefix = "foaf"; + public static string GetPrefix() { return Prefix; } + + /// + /// + /// + /// + public static readonly Property date = new Property(new Uri("http://purl.org/dc/elements/1.1/date")); + + /// + /// + /// + /// + public static readonly Property description = new Property(new Uri("http://purl.org/dc/elements/1.1/description")); + + /// + /// + /// + /// + public static readonly Property title = new Property(new Uri("http://purl.org/dc/elements/1.1/title")); + + /// + /// + /// + /// + public static readonly Class Class = new Class(new Uri("http://www.w3.org/2000/01/rdf-schema#Class")); + + /// + /// + /// + /// + public static readonly Resource Thing = new Resource(new Uri("http://www.w3.org/2002/07/owl#Thing")); + + /// + /// + /// + /// + public static readonly Class SpatialThing = new Class(new Uri("http://www.w3.org/2003/01/geo/wgs84_pos#SpatialThing")); + + /// + /// + /// + /// + public static readonly Property term_status = new Property(new Uri("http://www.w3.org/2003/06/sw-vocab-status/ns#term_status")); + + /// + /// + /// + /// + public static readonly Resource Concept = new Resource(new Uri("http://www.w3.org/2004/02/skos/core#Concept")); + + /// + /// + /// + /// + public static readonly Resource _0_1 = new Resource(new Uri("http://xmlns.com/foaf/0.1/")); + + /// + ///An agent (eg. person, group, software or physical artifact). + /// + /// + public static readonly Class Agent = new Class(new Uri("http://xmlns.com/foaf/0.1/Agent")); + + /// + ///A document. + /// + /// + public static readonly Class Document = new Class(new Uri("http://xmlns.com/foaf/0.1/Document")); + + /// + ///A class of Agents. + /// + /// + public static readonly Class Group = new Class(new Uri("http://xmlns.com/foaf/0.1/Group")); + + /// + ///An image. + /// + /// + public static readonly Class Image = new Class(new Uri("http://xmlns.com/foaf/0.1/Image")); + + /// + ///A foaf:LabelProperty is any RDF property with texual values that serve as labels. + /// + /// + public static readonly Class LabelProperty = new Class(new Uri("http://xmlns.com/foaf/0.1/LabelProperty")); + + /// + ///An online account. + /// + /// + public static readonly Class OnlineAccount = new Class(new Uri("http://xmlns.com/foaf/0.1/OnlineAccount")); + + /// + ///An online chat account. + /// + /// + public static readonly Class OnlineChatAccount = new Class(new Uri("http://xmlns.com/foaf/0.1/OnlineChatAccount")); + + /// + ///An online e-commerce account. + /// + /// + public static readonly Class OnlineEcommerceAccount = new Class(new Uri("http://xmlns.com/foaf/0.1/OnlineEcommerceAccount")); + + /// + ///An online gaming account. + /// + /// + public static readonly Class OnlineGamingAccount = new Class(new Uri("http://xmlns.com/foaf/0.1/OnlineGamingAccount")); + + /// + ///An organization. + /// + /// + public static readonly Class Organization = new Class(new Uri("http://xmlns.com/foaf/0.1/Organization")); + + /// + ///A person. + /// + /// + public static readonly Class Person = new Class(new Uri("http://xmlns.com/foaf/0.1/Person")); + + /// + ///A personal profile RDF document. + /// + /// + public static readonly Class PersonalProfileDocument = new Class(new Uri("http://xmlns.com/foaf/0.1/PersonalProfileDocument")); + + /// + ///A project (a collective endeavour of some kind). + /// + /// + public static readonly Class Project = new Class(new Uri("http://xmlns.com/foaf/0.1/Project")); + + /// + ///Indicates an account held by this agent. + /// + /// + public static readonly Property account = new Property(new Uri("http://xmlns.com/foaf/0.1/account")); + + /// + ///Indicates the name (identifier) associated with this online account. + /// + /// + public static readonly Property accountName = new Property(new Uri("http://xmlns.com/foaf/0.1/accountName")); + + /// + ///Indicates a homepage of the service provide for this online account. + /// + /// + public static readonly Property accountServiceHomepage = new Property(new Uri("http://xmlns.com/foaf/0.1/accountServiceHomepage")); + + /// + ///The age in years of some agent. + /// + /// + public static readonly Property age = new Property(new Uri("http://xmlns.com/foaf/0.1/age")); + + /// + ///An AIM chat ID + /// + /// + public static readonly Property aimChatID = new Property(new Uri("http://xmlns.com/foaf/0.1/aimChatID")); + + /// + ///A location that something is based near, for some broadly human notion of near. + /// + /// + public static readonly Property based_near = new Property(new Uri("http://xmlns.com/foaf/0.1/based_near")); + + /// + ///The birthday of this Agent, represented in mm-dd string form, eg. '12-31'. + /// + /// + public static readonly Property birthday = new Property(new Uri("http://xmlns.com/foaf/0.1/birthday")); + + /// + ///A current project this person works on. + /// + /// + public static readonly Property currentProject = new Property(new Uri("http://xmlns.com/foaf/0.1/currentProject")); + + /// + ///A depiction of some thing. + /// + /// + public static readonly Property depiction = new Property(new Uri("http://xmlns.com/foaf/0.1/depiction")); + + /// + ///A thing depicted in this representation. + /// + /// + public static readonly Property depicts = new Property(new Uri("http://xmlns.com/foaf/0.1/depicts")); + + /// + ///A checksum for the DNA of some thing. Joke. + /// + /// + public static readonly Property dnaChecksum = new Property(new Uri("http://xmlns.com/foaf/0.1/dnaChecksum")); + + /// + ///The family name of some person. + /// + /// + public static readonly Property familyName = new Property(new Uri("http://xmlns.com/foaf/0.1/familyName")); + + /// + ///The family name of some person. + /// + /// + public static readonly Property family_name = new Property(new Uri("http://xmlns.com/foaf/0.1/family_name")); + + /// + ///The first name of a person. + /// + /// + public static readonly Property firstName = new Property(new Uri("http://xmlns.com/foaf/0.1/firstName")); + + /// + ///The underlying or 'focal' entity associated with some SKOS-described concept. + /// + /// + public static readonly Property focus = new Property(new Uri("http://xmlns.com/foaf/0.1/focus")); + + /// + ///An organization funding a project or person. + /// + /// + public static readonly Property fundedBy = new Property(new Uri("http://xmlns.com/foaf/0.1/fundedBy")); + + /// + ///A textual geekcode for this person, see http://www.geekcode.com/geek.html + /// + /// + public static readonly Property geekcode = new Property(new Uri("http://xmlns.com/foaf/0.1/geekcode")); + + /// + ///The gender of this Agent (typically but not necessarily 'male' or 'female'). + /// + /// + public static readonly Property gender = new Property(new Uri("http://xmlns.com/foaf/0.1/gender")); + + /// + ///The given name of some person. + /// + /// + public static readonly Property givenName = new Property(new Uri("http://xmlns.com/foaf/0.1/givenName")); + + /// + ///The given name of some person. + /// + /// + public static readonly Property givenname = new Property(new Uri("http://xmlns.com/foaf/0.1/givenname")); + + /// + ///Indicates an account held by this agent. + /// + /// + public static readonly Property holdsAccount = new Property(new Uri("http://xmlns.com/foaf/0.1/holdsAccount")); + + /// + ///A homepage for some thing. + /// + /// + public static readonly Property homepage = new Property(new Uri("http://xmlns.com/foaf/0.1/homepage")); + + /// + ///An ICQ chat ID + /// + /// + public static readonly Property icqChatID = new Property(new Uri("http://xmlns.com/foaf/0.1/icqChatID")); + + /// + ///An image that can be used to represent some thing (ie. those depictions which are particularly representative of something, eg. one's photo on a homepage). + /// + /// + public static readonly Property img = new Property(new Uri("http://xmlns.com/foaf/0.1/img")); + + /// + ///A page about a topic of interest to this person. + /// + /// + public static readonly Property interest = new Property(new Uri("http://xmlns.com/foaf/0.1/interest")); + + /// + ///A document that this thing is the primary topic of. + /// + /// + public static readonly Property isPrimaryTopicOf = new Property(new Uri("http://xmlns.com/foaf/0.1/isPrimaryTopicOf")); + + /// + ///A jabber ID for something. + /// + /// + public static readonly Property jabberID = new Property(new Uri("http://xmlns.com/foaf/0.1/jabberID")); + + /// + ///A person known by this person (indicating some level of reciprocated interaction between the parties). + /// + /// + public static readonly Property knows = new Property(new Uri("http://xmlns.com/foaf/0.1/knows")); + + /// + ///The last name of a person. + /// + /// + public static readonly Property lastName = new Property(new Uri("http://xmlns.com/foaf/0.1/lastName")); + + /// + ///A logo representing some thing. + /// + /// + public static readonly Property logo = new Property(new Uri("http://xmlns.com/foaf/0.1/logo")); + + /// + ///Something that was made by this agent. + /// + /// + public static readonly Property made = new Property(new Uri("http://xmlns.com/foaf/0.1/made")); + + /// + ///An agent that made this thing. + /// + /// + public static readonly Property maker = new Property(new Uri("http://xmlns.com/foaf/0.1/maker")); + + /// + ///A personal mailbox, ie. an Internet mailbox associated with exactly one owner, the first owner of this mailbox. This is a 'static inverse functional property', in that there is (across time and change) at most one individual that ever has any particular value for foaf:mbox. + /// + /// + public static readonly Property mbox = new Property(new Uri("http://xmlns.com/foaf/0.1/mbox")); + + /// + ///The sha1sum of the URI of an Internet mailbox associated with exactly one owner, the first owner of the mailbox. + /// + /// + public static readonly Property mbox_sha1sum = new Property(new Uri("http://xmlns.com/foaf/0.1/mbox_sha1sum")); + + /// + ///Indicates a member of a Group + /// + /// + public static readonly Property member = new Property(new Uri("http://xmlns.com/foaf/0.1/member")); + + /// + ///Indicates the class of individuals that are a member of a Group + /// + /// + public static readonly Property membershipClass = new Property(new Uri("http://xmlns.com/foaf/0.1/membershipClass")); + + /// + ///An MSN chat ID + /// + /// + public static readonly Property msnChatID = new Property(new Uri("http://xmlns.com/foaf/0.1/msnChatID")); + + /// + ///A Myers Briggs (MBTI) personality classification. + /// + /// + public static readonly Property myersBriggs = new Property(new Uri("http://xmlns.com/foaf/0.1/myersBriggs")); + + /// + ///A name for some thing. + /// + /// + public static readonly Property name = new Property(new Uri("http://xmlns.com/foaf/0.1/name")); + + /// + ///A short informal nickname characterising an agent (includes login identifiers, IRC and other chat nicknames). + /// + /// + public static readonly Property nick = new Property(new Uri("http://xmlns.com/foaf/0.1/nick")); + + /// + ///An OpenID for an Agent. + /// + /// + public static readonly Property openid = new Property(new Uri("http://xmlns.com/foaf/0.1/openid")); + + /// + ///A page or document about this thing. + /// + /// + public static readonly Property page = new Property(new Uri("http://xmlns.com/foaf/0.1/page")); + + /// + ///A project this person has previously worked on. + /// + /// + public static readonly Property pastProject = new Property(new Uri("http://xmlns.com/foaf/0.1/pastProject")); + + /// + ///A phone, specified using fully qualified tel: URI scheme (refs: http://www.w3.org/Addressing/schemes.html#tel). + /// + /// + public static readonly Property phone = new Property(new Uri("http://xmlns.com/foaf/0.1/phone")); + + /// + ///A .plan comment, in the tradition of finger and '.plan' files. + /// + /// + public static readonly Property plan = new Property(new Uri("http://xmlns.com/foaf/0.1/plan")); + + /// + ///The primary topic of some page or document. + /// + /// + public static readonly Property primaryTopic = new Property(new Uri("http://xmlns.com/foaf/0.1/primaryTopic")); + + /// + ///A link to the publications of this person. + /// + /// + public static readonly Property publications = new Property(new Uri("http://xmlns.com/foaf/0.1/publications")); + + /// + ///A homepage of a school attended by the person. + /// + /// + public static readonly Property schoolHomepage = new Property(new Uri("http://xmlns.com/foaf/0.1/schoolHomepage")); + + /// + ///A sha1sum hash, in hex. + /// + /// + public static readonly Property sha1 = new Property(new Uri("http://xmlns.com/foaf/0.1/sha1")); + + /// + ///A Skype ID + /// + /// + public static readonly Property skypeID = new Property(new Uri("http://xmlns.com/foaf/0.1/skypeID")); + + /// + ///A string expressing what the user is happy for the general public (normally) to know about their current activity. + /// + /// + public static readonly Property status = new Property(new Uri("http://xmlns.com/foaf/0.1/status")); + + /// + ///The surname of some person. + /// + /// + public static readonly Property surname = new Property(new Uri("http://xmlns.com/foaf/0.1/surname")); + + /// + ///A theme. + /// + /// + public static readonly Property theme = new Property(new Uri("http://xmlns.com/foaf/0.1/theme")); + + /// + ///A derived thumbnail image. + /// + /// + public static readonly Property thumbnail = new Property(new Uri("http://xmlns.com/foaf/0.1/thumbnail")); + + /// + ///A tipjar document for this agent, describing means for payment and reward. + /// + /// + public static readonly Property tipjar = new Property(new Uri("http://xmlns.com/foaf/0.1/tipjar")); + + /// + ///Title (Mr, Mrs, Ms, Dr. etc) + /// + /// + public static readonly Property title_0 = new Property(new Uri("http://xmlns.com/foaf/0.1/title")); + + /// + ///A topic of some page or document. + /// + /// + public static readonly Property topic = new Property(new Uri("http://xmlns.com/foaf/0.1/topic")); + + /// + ///A thing of interest to this person. + /// + /// + public static readonly Property topic_interest = new Property(new Uri("http://xmlns.com/foaf/0.1/topic_interest")); + + /// + ///A weblog of some thing (whether person, group, company etc.). + /// + /// + public static readonly Property weblog = new Property(new Uri("http://xmlns.com/foaf/0.1/weblog")); + + /// + ///A work info homepage of some person; a page about their work for some organization. + /// + /// + public static readonly Property workInfoHomepage = new Property(new Uri("http://xmlns.com/foaf/0.1/workInfoHomepage")); + + /// + ///A workplace homepage of some person; the homepage of an organization they work for. + /// + /// + public static readonly Property workplaceHomepage = new Property(new Uri("http://xmlns.com/foaf/0.1/workplaceHomepage")); + + /// + ///A Yahoo chat ID + /// + /// + public static readonly Property yahooChatID = new Property(new Uri("http://xmlns.com/foaf/0.1/yahooChatID")); + + /// + /// + /// + /// + public static readonly Property assurance = new Property(new Uri("http://xmlns.com/wot/0.1/assurance")); + + /// + /// + /// + /// + public static readonly Property src_assurance = new Property(new Uri("http://xmlns.com/wot/0.1/src_assurance")); + } + + /// + ///Friend of a Friend (FOAF) vocabulary + /// + public static class FOAF + { + public static readonly Uri Namespace = new Uri("http://xmlns.com/foaf/0.1/"); + public static Uri GetNamespace() { return Namespace; } + + public static readonly string Prefix = "FOAF"; + public static string GetPrefix() { return Prefix; } + + /// + /// + /// + /// + public const string date = "http://purl.org/dc/elements/1.1/date"; + + /// + /// + /// + /// + public const string description = "http://purl.org/dc/elements/1.1/description"; + + /// + /// + /// + /// + public const string title = "http://purl.org/dc/elements/1.1/title"; + + /// + /// + /// + /// + public const string Class = "http://www.w3.org/2000/01/rdf-schema#Class"; + + /// + /// + /// + /// + public const string Thing = "http://www.w3.org/2002/07/owl#Thing"; + + /// + /// + /// + /// + public const string SpatialThing = "http://www.w3.org/2003/01/geo/wgs84_pos#SpatialThing"; + + /// + /// + /// + /// + public const string term_status = "http://www.w3.org/2003/06/sw-vocab-status/ns#term_status"; + + /// + /// + /// + /// + public const string Concept = "http://www.w3.org/2004/02/skos/core#Concept"; + + /// + /// + /// + /// + public const string _0_1 = "http://xmlns.com/foaf/0.1/"; + + /// + ///An agent (eg. person, group, software or physical artifact). + /// + /// + public const string Agent = "http://xmlns.com/foaf/0.1/Agent"; + + /// + ///A document. + /// + /// + public const string Document = "http://xmlns.com/foaf/0.1/Document"; + + /// + ///A class of Agents. + /// + /// + public const string Group = "http://xmlns.com/foaf/0.1/Group"; + + /// + ///An image. + /// + /// + public const string Image = "http://xmlns.com/foaf/0.1/Image"; + + /// + ///A foaf:LabelProperty is any RDF property with texual values that serve as labels. + /// + /// + public const string LabelProperty = "http://xmlns.com/foaf/0.1/LabelProperty"; + + /// + ///An online account. + /// + /// + public const string OnlineAccount = "http://xmlns.com/foaf/0.1/OnlineAccount"; + + /// + ///An online chat account. + /// + /// + public const string OnlineChatAccount = "http://xmlns.com/foaf/0.1/OnlineChatAccount"; + + /// + ///An online e-commerce account. + /// + /// + public const string OnlineEcommerceAccount = "http://xmlns.com/foaf/0.1/OnlineEcommerceAccount"; + + /// + ///An online gaming account. + /// + /// + public const string OnlineGamingAccount = "http://xmlns.com/foaf/0.1/OnlineGamingAccount"; + + /// + ///An organization. + /// + /// + public const string Organization = "http://xmlns.com/foaf/0.1/Organization"; + + /// + ///A person. + /// + /// + public const string Person = "http://xmlns.com/foaf/0.1/Person"; + + /// + ///A personal profile RDF document. + /// + /// + public const string PersonalProfileDocument = "http://xmlns.com/foaf/0.1/PersonalProfileDocument"; + + /// + ///A project (a collective endeavour of some kind). + /// + /// + public const string Project = "http://xmlns.com/foaf/0.1/Project"; + + /// + ///Indicates an account held by this agent. + /// + /// + public const string account = "http://xmlns.com/foaf/0.1/account"; + + /// + ///Indicates the name (identifier) associated with this online account. + /// + /// + public const string accountName = "http://xmlns.com/foaf/0.1/accountName"; + + /// + ///Indicates a homepage of the service provide for this online account. + /// + /// + public const string accountServiceHomepage = "http://xmlns.com/foaf/0.1/accountServiceHomepage"; + + /// + ///The age in years of some agent. + /// + /// + public const string age = "http://xmlns.com/foaf/0.1/age"; + + /// + ///An AIM chat ID + /// + /// + public const string aimChatID = "http://xmlns.com/foaf/0.1/aimChatID"; + + /// + ///A location that something is based near, for some broadly human notion of near. + /// + /// + public const string based_near = "http://xmlns.com/foaf/0.1/based_near"; + + /// + ///The birthday of this Agent, represented in mm-dd string form, eg. '12-31'. + /// + /// + public const string birthday = "http://xmlns.com/foaf/0.1/birthday"; + + /// + ///A current project this person works on. + /// + /// + public const string currentProject = "http://xmlns.com/foaf/0.1/currentProject"; + + /// + ///A depiction of some thing. + /// + /// + public const string depiction = "http://xmlns.com/foaf/0.1/depiction"; + + /// + ///A thing depicted in this representation. + /// + /// + public const string depicts = "http://xmlns.com/foaf/0.1/depicts"; + + /// + ///A checksum for the DNA of some thing. Joke. + /// + /// + public const string dnaChecksum = "http://xmlns.com/foaf/0.1/dnaChecksum"; + + /// + ///The family name of some person. + /// + /// + public const string familyName = "http://xmlns.com/foaf/0.1/familyName"; + + /// + ///The family name of some person. + /// + /// + public const string family_name = "http://xmlns.com/foaf/0.1/family_name"; + + /// + ///The first name of a person. + /// + /// + public const string firstName = "http://xmlns.com/foaf/0.1/firstName"; + + /// + ///The underlying or 'focal' entity associated with some SKOS-described concept. + /// + /// + public const string focus = "http://xmlns.com/foaf/0.1/focus"; + + /// + ///An organization funding a project or person. + /// + /// + public const string fundedBy = "http://xmlns.com/foaf/0.1/fundedBy"; + + /// + ///A textual geekcode for this person, see http://www.geekcode.com/geek.html + /// + /// + public const string geekcode = "http://xmlns.com/foaf/0.1/geekcode"; + + /// + ///The gender of this Agent (typically but not necessarily 'male' or 'female'). + /// + /// + public const string gender = "http://xmlns.com/foaf/0.1/gender"; + + /// + ///The given name of some person. + /// + /// + public const string givenName = "http://xmlns.com/foaf/0.1/givenName"; + + /// + ///The given name of some person. + /// + /// + public const string givenname = "http://xmlns.com/foaf/0.1/givenname"; + + /// + ///Indicates an account held by this agent. + /// + /// + public const string holdsAccount = "http://xmlns.com/foaf/0.1/holdsAccount"; + + /// + ///A homepage for some thing. + /// + /// + public const string homepage = "http://xmlns.com/foaf/0.1/homepage"; + + /// + ///An ICQ chat ID + /// + /// + public const string icqChatID = "http://xmlns.com/foaf/0.1/icqChatID"; + + /// + ///An image that can be used to represent some thing (ie. those depictions which are particularly representative of something, eg. one's photo on a homepage). + /// + /// + public const string img = "http://xmlns.com/foaf/0.1/img"; + + /// + ///A page about a topic of interest to this person. + /// + /// + public const string interest = "http://xmlns.com/foaf/0.1/interest"; + + /// + ///A document that this thing is the primary topic of. + /// + /// + public const string isPrimaryTopicOf = "http://xmlns.com/foaf/0.1/isPrimaryTopicOf"; + + /// + ///A jabber ID for something. + /// + /// + public const string jabberID = "http://xmlns.com/foaf/0.1/jabberID"; + + /// + ///A person known by this person (indicating some level of reciprocated interaction between the parties). + /// + /// + public const string knows = "http://xmlns.com/foaf/0.1/knows"; + + /// + ///The last name of a person. + /// + /// + public const string lastName = "http://xmlns.com/foaf/0.1/lastName"; + + /// + ///A logo representing some thing. + /// + /// + public const string logo = "http://xmlns.com/foaf/0.1/logo"; + + /// + ///Something that was made by this agent. + /// + /// + public const string made = "http://xmlns.com/foaf/0.1/made"; + + /// + ///An agent that made this thing. + /// + /// + public const string maker = "http://xmlns.com/foaf/0.1/maker"; + + /// + ///A personal mailbox, ie. an Internet mailbox associated with exactly one owner, the first owner of this mailbox. This is a 'static inverse functional property', in that there is (across time and change) at most one individual that ever has any particular value for foaf:mbox. + /// + /// + public const string mbox = "http://xmlns.com/foaf/0.1/mbox"; + + /// + ///The sha1sum of the URI of an Internet mailbox associated with exactly one owner, the first owner of the mailbox. + /// + /// + public const string mbox_sha1sum = "http://xmlns.com/foaf/0.1/mbox_sha1sum"; + + /// + ///Indicates a member of a Group + /// + /// + public const string member = "http://xmlns.com/foaf/0.1/member"; + + /// + ///Indicates the class of individuals that are a member of a Group + /// + /// + public const string membershipClass = "http://xmlns.com/foaf/0.1/membershipClass"; + + /// + ///An MSN chat ID + /// + /// + public const string msnChatID = "http://xmlns.com/foaf/0.1/msnChatID"; + + /// + ///A Myers Briggs (MBTI) personality classification. + /// + /// + public const string myersBriggs = "http://xmlns.com/foaf/0.1/myersBriggs"; + + /// + ///A name for some thing. + /// + /// + public const string name = "http://xmlns.com/foaf/0.1/name"; + + /// + ///A short informal nickname characterising an agent (includes login identifiers, IRC and other chat nicknames). + /// + /// + public const string nick = "http://xmlns.com/foaf/0.1/nick"; + + /// + ///An OpenID for an Agent. + /// + /// + public const string openid = "http://xmlns.com/foaf/0.1/openid"; + + /// + ///A page or document about this thing. + /// + /// + public const string page = "http://xmlns.com/foaf/0.1/page"; + + /// + ///A project this person has previously worked on. + /// + /// + public const string pastProject = "http://xmlns.com/foaf/0.1/pastProject"; + + /// + ///A phone, specified using fully qualified tel: URI scheme (refs: http://www.w3.org/Addressing/schemes.html#tel). + /// + /// + public const string phone = "http://xmlns.com/foaf/0.1/phone"; + + /// + ///A .plan comment, in the tradition of finger and '.plan' files. + /// + /// + public const string plan = "http://xmlns.com/foaf/0.1/plan"; + + /// + ///The primary topic of some page or document. + /// + /// + public const string primaryTopic = "http://xmlns.com/foaf/0.1/primaryTopic"; + + /// + ///A link to the publications of this person. + /// + /// + public const string publications = "http://xmlns.com/foaf/0.1/publications"; + + /// + ///A homepage of a school attended by the person. + /// + /// + public const string schoolHomepage = "http://xmlns.com/foaf/0.1/schoolHomepage"; + + /// + ///A sha1sum hash, in hex. + /// + /// + public const string sha1 = "http://xmlns.com/foaf/0.1/sha1"; + + /// + ///A Skype ID + /// + /// + public const string skypeID = "http://xmlns.com/foaf/0.1/skypeID"; + + /// + ///A string expressing what the user is happy for the general public (normally) to know about their current activity. + /// + /// + public const string status = "http://xmlns.com/foaf/0.1/status"; + + /// + ///The surname of some person. + /// + /// + public const string surname = "http://xmlns.com/foaf/0.1/surname"; + + /// + ///A theme. + /// + /// + public const string theme = "http://xmlns.com/foaf/0.1/theme"; + + /// + ///A derived thumbnail image. + /// + /// + public const string thumbnail = "http://xmlns.com/foaf/0.1/thumbnail"; + + /// + ///A tipjar document for this agent, describing means for payment and reward. + /// + /// + public const string tipjar = "http://xmlns.com/foaf/0.1/tipjar"; + + /// + ///Title (Mr, Mrs, Ms, Dr. etc) + /// + /// + public const string title_0 = "http://xmlns.com/foaf/0.1/title"; + + /// + ///A topic of some page or document. + /// + /// + public const string topic = "http://xmlns.com/foaf/0.1/topic"; + + /// + ///A thing of interest to this person. + /// + /// + public const string topic_interest = "http://xmlns.com/foaf/0.1/topic_interest"; + + /// + ///A weblog of some thing (whether person, group, company etc.). + /// + /// + public const string weblog = "http://xmlns.com/foaf/0.1/weblog"; + + /// + ///A work info homepage of some person; a page about their work for some organization. + /// + /// + public const string workInfoHomepage = "http://xmlns.com/foaf/0.1/workInfoHomepage"; + + /// + ///A workplace homepage of some person; the homepage of an organization they work for. + /// + /// + public const string workplaceHomepage = "http://xmlns.com/foaf/0.1/workplaceHomepage"; + + /// + ///A Yahoo chat ID + /// + /// + public const string yahooChatID = "http://xmlns.com/foaf/0.1/yahooChatID"; + + /// + /// + /// + /// + public const string assurance = "http://xmlns.com/wot/0.1/assurance"; + + /// + /// + /// + /// + public const string src_assurance = "http://xmlns.com/wot/0.1/src_assurance"; + } +} diff --git a/Trinity.Tests/ModelGroupTest.cs b/Trinity.Tests/ModelGroupTest.cs index c1d891a..ca82c3e 100644 --- a/Trinity.Tests/ModelGroupTest.cs +++ b/Trinity.Tests/ModelGroupTest.cs @@ -25,12 +25,10 @@ // // Copyright (c) Semiodesk GmbH 2015 -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; using NUnit.Framework; using Semiodesk.Trinity.Ontologies; +using System; +using System.Linq; namespace Semiodesk.Trinity.Test { diff --git a/Trinity.Tests/ModelTest.cs b/Trinity.Tests/ModelTest.cs index c85c842..4369bba 100644 --- a/Trinity.Tests/ModelTest.cs +++ b/Trinity.Tests/ModelTest.cs @@ -30,12 +30,8 @@ using System.Linq; using System.Text; using NUnit.Framework; -using Semiodesk.Trinity; -using System.Diagnostics; using Semiodesk.Trinity.Ontologies; using System.IO; -using System.Threading; -using Semiodesk.Trinity.Test; namespace Semiodesk.Trinity.Test { @@ -420,12 +416,11 @@ public void GetTypedResourcesTest() contact2.Fullname = "Hans"; contact2.Commit(); - var res = Model.GetResources(); - - Assert.AreEqual(2, res.Count()); - Assert.IsTrue(res.Contains(contact)); - Assert.IsTrue(res.Contains(contact2)); + var r = Model.GetResources(); + Assert.AreEqual(2, r.Count()); + Assert.IsTrue(r.Contains(contact)); + Assert.IsTrue(r.Contains(contact2)); Model.Clear(); @@ -433,15 +428,15 @@ public void GetTypedResourcesTest() personContact.Fullname = "Peter"; personContact.Commit(); - res = Model.GetResources(); - Assert.AreEqual(0, res.Count()); + r = Model.GetResources(); + Assert.AreEqual(0, r.Count()); - res = Model.GetResources(true); - Assert.AreEqual(1, res.Count()); + r = Model.GetResources(true); + Assert.AreEqual(1, r.Count()); var x = Model.GetResource(uriResource); - Assert.AreEqual(typeof(PersonContact), x.GetType()); + Assert.AreEqual(typeof(PersonContact), x.GetType()); } [Test] @@ -562,7 +557,7 @@ public void ReadFromStringTest() [Test] public void TestAddMultipleResources() { - Assert.Inconclusive("Reevaluate with more recent version of virtuoso client library."); + Assert.Inconclusive("This test should work, it just takes too long."); Model.Clear(); for (int j = 1; j < 7; j++) { diff --git a/Trinity.Tests/PerformanceTest.cs b/Trinity.Tests/PerformanceTest.cs new file mode 100644 index 0000000..234679f --- /dev/null +++ b/Trinity.Tests/PerformanceTest.cs @@ -0,0 +1,101 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2015 + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using NUnit.Framework; +using Semiodesk.Trinity; +using System.Diagnostics; + +namespace Semiodesk.Trinity.Test +{ + [TestFixture] + public class PerformanceTest + { + #region Members + + IStore _store; + + IModel _model; + + #endregion + + [SetUp] + public void SetUp() + { + _store = StoreFactory.CreateStore("provider=virtuoso;host=localhost;port=1111;uid=dba;pw=dba"); + + UriRef uri = new UriRef("http://localhost:8899/models/PerformanceTest"); + + if (_store.ContainsModel(uri)) + { + _model = _store.GetModel(uri); + _model.Clear(); + } + else + { + _model = _store.CreateModel(uri); + } + + if (_model == null) + { + throw new Exception(string.Format("Error: Unable to create model <{0}>.", uri)); + } + + if (_model.IsEmpty) + { + PersonContact c = null; + + for (int i = 1; i < 1000; i++) + { + c = _model.CreateResource(); + c.Fullname = "Contact" + i; + c.BirthDate = DateTime.UtcNow; + c.Commit(); + } + } + } + + [Test] + public void TestGenerateResources() + { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.Start(); + + int i = 0; + + foreach (PersonContact c in _model.GetResources()) + { + i++; + } + + stopwatch.Stop(); + } + } +} diff --git a/Trinity.Tests/SetupClass.cs b/Trinity.Tests/SetupClass.cs index 104eabe..0965c03 100644 --- a/Trinity.Tests/SetupClass.cs +++ b/Trinity.Tests/SetupClass.cs @@ -49,6 +49,5 @@ public void TearDown() { instance.Stop(); } - } } diff --git a/Trinity.Tests/SparqlQueryTest.cs b/Trinity.Tests/SparqlQueryTest.cs index 766d555..9362710 100644 --- a/Trinity.Tests/SparqlQueryTest.cs +++ b/Trinity.Tests/SparqlQueryTest.cs @@ -57,6 +57,7 @@ public void SetUp() string connectionString = SetupClass.ConnectionString; Store = StoreFactory.CreateStore(string.Format("{0};rule=urn:semiodesk/test/ruleset", connectionString)); + Store.LoadOntologySettings(); Model = Store.GetModel(new Uri("http://example.org/TestModel")); if (!Model.IsEmpty) @@ -85,30 +86,33 @@ public void SetUp() resource0.AddProperty(nco.blogUrl, "http://blog.com/Hans"); resource0.Commit(); - IResource resource1 = Model.CreateResource(new Uri("http://example.org/Task")); - resource1.AddProperty(rdf.type, tmo.Task); - resource1.AddProperty(tmo.taskName, "Eine Aufgabe."); + IResource resource1 = Model.CreateResource(new Uri("http://example.org/Organization")); + resource1.AddProperty(rdf.type, nco.OrganizationContact); + resource1.AddProperty(nco.fullname, "ACME"); resource1.AddProperty(nco.creator, resource0); resource1.Commit(); - IResource resource2 = Model.CreateResource(new Uri("http://example.org/Doc#1")); - resource2.AddProperty(rdf.type, nfo.Document); + IResource resource2 = Model.CreateResource(new Uri("http://example.org/PhoneNumber")); + resource2.AddProperty(rdf.type, nco.PhoneNumber); resource2.AddProperty(dc.date, DateTime.Today); resource2.AddProperty(nco.creator, resource0); resource2.Commit(); // NOTE: The different name influences the ordering of the resource in query results. IResource resource3 = Model.CreateResource(new Uri("http://example.org/Boc#2")); - resource3.AddProperty(rdf.type, nfo.Document); + resource3.AddProperty(rdf.type, nco.PagerNumber); resource3.AddProperty(dc.date, DateTime.Today.AddHours(1)); resource3.AddProperty(nco.creator, resource0); resource3.Commit(); IResource resource4 = Model.CreateResource(new Uri("http://example.org/Doc#3")); - resource4.AddProperty(rdf.type, nfo.Document); + resource4.AddProperty(rdf.type, nco.PhoneNumber); resource4.AddProperty(dc.date, DateTime.Today.AddHours(2)); resource4.AddProperty(nco.creator, resource0); resource4.Commit(); + + resource0.AddProperty(nco.hasPhoneNumber, resource2); + resource0.Commit(); } [TearDown] @@ -285,7 +289,7 @@ public void TestInferencing() // Retrieving resources using the model API. Assert.AreEqual(true, Model.ContainsResource(new Uri("http://example.org/Hans"))); - Assert.AreEqual(true, Model.ContainsResource(new Uri("http://example.org/Task"))); + Assert.AreEqual(true, Model.ContainsResource(new Uri("http://example.org/PhoneNumber"))); SparqlQuery query; ISparqlQueryResult result; @@ -308,7 +312,7 @@ public void TestInferencing() result = Model.ExecuteQuery(query, true); Assert.AreEqual(1, result.GetBindings().Count()); - query = new SparqlQuery("ASK WHERE { sfo:isRelated . }"); + query = new SparqlQuery("ASK WHERE { nco:hasContactMedium . }"); result = Model.ExecuteQuery(query); Assert.IsFalse(result.GetAnwser()); @@ -316,16 +320,16 @@ public void TestInferencing() result = Model.ExecuteQuery(query, true); Assert.IsTrue(result.GetAnwser()); - query = new SparqlQuery("DESCRIBE ?element WHERE { ?element sfo:isRelated . }"); + query = new SparqlQuery("DESCRIBE ?element WHERE { ?element nco:hasContactMedium . }"); result = Model.ExecuteQuery(query); Assert.AreEqual(0, result.GetResources().Count()); result = Model.ExecuteQuery(query, true); - Assert.AreEqual(4, result.GetResources().Count()); + Assert.AreEqual(1, result.GetResources().Count()); - query = new SparqlQuery("DESCRIBE ?doc WHERE { ?doc rdf:type nfo:Document . ?doc nco:creator . ?doc dc:date ?date . } ORDER BY ASC(?date)"); - result = Model.ExecuteQuery(query); + query = new SparqlQuery("DESCRIBE ?doc WHERE { ?doc rdf:type nco:ContactMedium . ?doc nco:creator . ?doc dc:date ?date . } ORDER BY ASC(?date)"); + result = Model.ExecuteQuery(query, true); Assert.AreEqual(3, result.GetResources().Count()); DateTime? c = null; @@ -335,7 +339,7 @@ public void TestInferencing() { d = (DateTime)r.GetValue(dc.date); - if (c != null) { Assert.Greater(c, d); } + if (c != null) { Assert.Greater(d, c); } c = d; } @@ -416,26 +420,26 @@ public void TestQueryParameters() [Test] public void TestSelectCount() { - SparqlQuery query = new SparqlQuery("SELECT COUNT(?s) AS ?count WHERE { ?s rdf:type nfo:Document. }"); + SparqlQuery query = new SparqlQuery("SELECT COUNT(?s) AS ?count WHERE { ?s rdf:type nco:PhoneNumber. }"); ISparqlQueryResult result = Model.ExecuteQuery(query); var bindings = result.GetBindings(); Assert.AreEqual(1, bindings.Count()); - Assert.AreEqual(3, bindings.First()["count"]); + Assert.AreEqual(2, bindings.First()["count"]); } [Test] public void TestCount() { - SparqlQuery query = new SparqlQuery("SELECT ?s ?p ?o WHERE { ?s ?p ?o. ?s rdf:type nfo:Document. }"); + SparqlQuery query = new SparqlQuery("SELECT ?s ?p ?o WHERE { ?s ?p ?o. ?s rdf:type nco:PhoneNumber. }"); ISparqlQueryResult result = Model.ExecuteQuery(query); - Assert.AreEqual(3, result.Count()); + Assert.AreEqual(2, result.Count()); - query = new SparqlQuery("SELECT ?s ?p ?o WHERE { ?s ?p ?o. ?s rdf:type nfo:Document. }"); + query = new SparqlQuery("SELECT ?s ?p ?o WHERE { ?s ?p ?o. ?s rdf:type nco:PhoneNumber. }"); result = Model.ExecuteQuery(query); - Assert.AreEqual(3, result.Count()); + Assert.AreEqual(2, result.Count()); } [Test] @@ -521,89 +525,6 @@ ORDER BY ?o List resources = result.GetResources().ToList(); } - [Test] - public void TestIsOrdered() - { - Assert.Inconclusive("Reevaluate with more recent version of virtuoso client library."); - - MethodInfo isOrdered; - - SparqlQuery query = new SparqlQuery(@" - SELECT ?s0 ?p0 ?o0 WHERE - { - ?s0 ?p0 ?o0 . - { - SELECT DISTINCT ?s0 WHERE - { - ?s ?p ?o. - ?s @type @class . - - { - ?s ?p1 ?o1 . - FILTER ISLITERAL(?o1) . FILTER REGEX(STR(?o1), '', 'i') . - } - UNION - { - ?s ?p1 ?s1 . - ?s1 ?p2 ?o2 . - FILTER ISLITERAL(?o2) . FILTER REGEX(STR(?o2), '', 'i') . - } - } - ORDER BY ?o - } - }"); - - query.Bind("@type", rdf.type); - query.Bind("@class", tmo.Task); - - isOrdered = query.GetType().GetMethod("IsOrdered", BindingFlags.NonPublic | BindingFlags.Instance); - Assert.AreEqual(true, isOrdered.Invoke(query, null)); - - query = new SparqlQuery(@" - SELECT ?s0 ?p0 ?o0 WHERE - { - ?s0 ?p0 ?o0 . - { - SELECT DISTINCT ?s0 WHERE - { - ?s ?p ?o. - ?s @type @class . - - { - ?s ?p1 ?o1 . - FILTER ISLITERAL(?o1) . FILTER REGEX(STR(?o1), '', 'i') . - } - UNION - { - ?s ?p1 ?s1 . - ?s1 ?p2 ?o2 . - FILTER ISLITERAL(?o2) . FILTER REGEX(STR(?o2), '', 'i') . - } - } - } - }"); - - query.Bind("@type", rdf.type); - query.Bind("@class", tmo.Task); - - isOrdered = query.GetType().GetMethod("IsOrdered", BindingFlags.NonPublic | BindingFlags.Instance); - Assert.AreEqual(false, isOrdered.Invoke(query, null)); - - query = new SparqlQuery(@" - SELECT DISTINCT ?s0 - FROM - WHERE - { - ?s0 ?p0 ?o0 . - ?s0 a nfo:Visual . - ?s0 nexif:dateTime ?o1 . - } - ORDER BY ASC(?o1) LIMIT 50"); - - isOrdered = query.GetType().GetMethod("IsOrdered", BindingFlags.NonPublic | BindingFlags.Instance); - Assert.AreEqual(true, isOrdered.Invoke(query, null)); - } - [Test] public void TestModelGroup() { diff --git a/Trinity.Tests/Stardog/ResourceMappingTest.cs b/Trinity.Tests/Stardog/ResourceMappingTest.cs index 015eec7..73bf7ad 100644 --- a/Trinity.Tests/Stardog/ResourceMappingTest.cs +++ b/Trinity.Tests/Stardog/ResourceMappingTest.cs @@ -897,27 +897,6 @@ public void MappingTypeTest() Assert.AreEqual(t3, r3); } - [Test] - public void MappingTypeWithInferencingTest() - { - Assert.Inconclusive(); - //IModel m = GetModel(); - //m.Clear(); - - //Uri t3Uri = new Uri("semio:test:testInstance3"); - //PersonContact t3 = m.CreateResource(t3Uri); - //t3.NameGiven = "Hans"; - //t3.Commit(); - - //ResourceQuery q = new ResourceQuery(nco.Contact); - - //var queryResult = m.ExecuteQuery(q, true); - - //var resources = queryResult.GetResources().ToList(); - - } - - [Test] public void RollbackTest() diff --git a/Trinity.Tests/Stardog/StoreTest.cs b/Trinity.Tests/Stardog/StoreTest.cs index c1e2a2f..160f946 100644 --- a/Trinity.Tests/Stardog/StoreTest.cs +++ b/Trinity.Tests/Stardog/StoreTest.cs @@ -55,11 +55,13 @@ namespace Semiodesk.Trinity.Test.Stardog class StardogStoreTest { IStore Store; + Uri testModel = new Uri("ex:Test"); [SetUp] public void SetUp() { Store = StoreFactory.CreateStore("provider=stardog;host=http://localhost:5820;uid=admin;pw=admin;sid=test"); + Store.RemoveModel(testModel); } [TearDown] @@ -72,7 +74,7 @@ public void TearDown() [Test] public void LoadOntologiesTest() { - Uri testModel = new Uri("ex:Test"); + Store.InitializeFromConfiguration(); @@ -82,12 +84,13 @@ public void LoadOntologiesTest() [Test] public void LoadOntologiesFromFileTest() { - Uri testModel = new Uri("ex:Test"); + Assert.Inconclusive("How to make sure we have an empty store"); + /* string configFile = Path.Combine(Environment.CurrentDirectory, "custom.config"); Store.InitializeFromConfiguration(configFile); Assert.AreEqual(4, Store.ListModels().Count()); - + */ } @@ -102,7 +105,6 @@ public void ListModelsTest() [Test] public void AddModelTest() { - Uri testModel = new Uri("ex:Test"); IModel m = Store.CreateModel(testModel); @@ -113,7 +115,6 @@ public void AddModelTest() [Test] public void ContainsModelTest() { - Uri testModel = new Uri("ex:Test"); Store.RemoveModel(testModel); Assert.IsFalse(Store.ContainsModel(testModel)); @@ -135,9 +136,10 @@ public void ContainsModelTest() [Test] public void GetModelTest() { - Uri testModel = new Uri("ex:Test"); IModel m1 = Store.CreateModel(testModel); + Assert.IsTrue(m1.IsEmpty); + IResource r = m1.CreateResource(new Uri("ex:test:resource")); r.AddProperty(new Property(new Uri("ex:test:property")), "var"); @@ -152,7 +154,6 @@ public void GetModelTest() [Test] public void RemoveModelTest() { - Uri testModel = new Uri("ex:Test"); Store.RemoveModel(testModel); diff --git a/Trinity.Tests/Trinity.Tests.csproj b/Trinity.Tests/Trinity.Tests.csproj index 495ef44..2c9bd13 100644 --- a/Trinity.Tests/Trinity.Tests.csproj +++ b/Trinity.Tests/Trinity.Tests.csproj @@ -20,7 +20,7 @@ - + diff --git a/Trinity.Tests/UriRefTest.cs b/Trinity.Tests/UriRefTest.cs index 511209c..1577e65 100644 --- a/Trinity.Tests/UriRefTest.cs +++ b/Trinity.Tests/UriRefTest.cs @@ -25,12 +25,7 @@ // // Copyright (c) Semiodesk GmbH 2015 -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; using NUnit.Framework; -using Semiodesk.Trinity; namespace Semiodesk.Trinity.Test { diff --git a/Trinity.Tests/dotnetrdf/ResourceMappingTest.cs b/Trinity.Tests/dotnetrdf/ResourceMappingTest.cs index a042efd..aa59ada 100644 --- a/Trinity.Tests/dotnetrdf/ResourceMappingTest.cs +++ b/Trinity.Tests/dotnetrdf/ResourceMappingTest.cs @@ -901,27 +901,6 @@ public void MappingTypeTest() Assert.AreEqual(t3, r3); } - [Test] - public void MappingTypeWithInferencingTest() - { - Assert.Inconclusive(); - //IModel m = GetModel(); - //m.Clear(); - - //Uri t3Uri = new Uri("semio:test:testInstance3"); - //PersonContact t3 = m.CreateResource(t3Uri); - //t3.NameGiven = "Hans"; - //t3.Commit(); - - //ResourceQuery q = new ResourceQuery(nco.Contact); - - //var queryResult = m.ExecuteQuery(q, true); - - //var resources = queryResult.GetResources().ToList(); - - } - - [Test] public void RollbackTest() diff --git a/Trinity.Tests/dotnetrdf/StoreTest.cs b/Trinity.Tests/dotnetrdf/StoreTest.cs index d635dee..0b808e0 100644 --- a/Trinity.Tests/dotnetrdf/StoreTest.cs +++ b/Trinity.Tests/dotnetrdf/StoreTest.cs @@ -59,7 +59,9 @@ public void LoadOntologiesTest() Store.InitializeFromConfiguration(); + // Note: the NCO ontology contains a metadata graph Assert.AreEqual(7, Store.ListModels().Count()); + } [Test] diff --git a/Trinity.Tests/dotnetrdf_ResourceQueryTest.cs b/Trinity.Tests/dotnetrdf_ResourceQueryTest.cs index 1bce8b3..188dd01 100644 --- a/Trinity.Tests/dotnetrdf_ResourceQueryTest.cs +++ b/Trinity.Tests/dotnetrdf_ResourceQueryTest.cs @@ -175,6 +175,7 @@ public void TestSort() { Assert.Inconclusive("Test with newer version of dotNetRDF"); + ResourceQuery b = new ResourceQuery(nco.PersonContact); b.Where(nco.birthDate).LessThan(new DateTime(1990, 1, 1)).SortAscending(); diff --git a/Trinity.Tests/dotnetrdf_SparqlQueryTest.cs b/Trinity.Tests/dotnetrdf_SparqlQueryTest.cs index 4e971b2..327d6ff 100644 --- a/Trinity.Tests/dotnetrdf_SparqlQueryTest.cs +++ b/Trinity.Tests/dotnetrdf_SparqlQueryTest.cs @@ -198,7 +198,7 @@ public void TestDescribe() [Test] public void TestConstruct() { -// Assert.Inconclusive("Blank nodes are currently problematic."); + // Assert.Inconclusive("Blank nodes are currently problematic."); SparqlQuery query = new SparqlQuery(@" CONSTRUCT { @@ -219,6 +219,8 @@ public void TestConstruct() [Test] public void TestInferencing() { + Assert.Inconclusive("Inferencing in dotNetRDF Memory store is still not really supported."); + Store = StoreFactory.CreateStore("provider=dotnetrdf;schema=Models/test-vocab.rdf"); diff --git a/Trinity/MappingDiscovery.cs b/Trinity/MappingDiscovery.cs index 3204c6a..59c35cc 100644 --- a/Trinity/MappingDiscovery.cs +++ b/Trinity/MappingDiscovery.cs @@ -152,6 +152,30 @@ public static void AddMappingClass(Type _class) } } + internal static IEnumerable ListMappings(Type _class) + { + Type propertyMappingType = typeof(IPropertyMapping); + + Resource resource; + + try + { + resource = (Resource)Activator.CreateInstance(_class, new UriRef("semio:empty")); + } + catch (Exception e) + { + throw new Exception(string.Format("Initialisation of mapping class {0} failed. For the reason please consult the inner exception.", _class.ToString()), e); + } + + foreach (var x in _class.GetFields()) + { + if (propertyMappingType.IsAssignableFrom(x.FieldType)) + { + yield return x.GetValue(resource) as IPropertyMapping; + } + } + } + public static void GetBaseTypes(Type _class, ref List baseTypes) { if (_class.BaseType == typeof(Resource) || _class.BaseType == typeof(Object)) @@ -162,7 +186,6 @@ public static void GetBaseTypes(Type _class, ref List baseTypes) baseTypes.AddRange(r.GetTypes()); GetBaseTypes(_class.BaseType, ref baseTypes); - } /// @@ -170,11 +193,11 @@ public static void GetBaseTypes(Type _class, ref List baseTypes) /// public static void RegisterCallingAssembly() { - Assembly asm = Assembly.GetCallingAssembly(); + Assembly a = Assembly.GetCallingAssembly(); - if (!RegisteredAssemblies.Contains(asm.GetName().FullName)) + if (!RegisteredAssemblies.Contains(a.GetName().FullName)) { - MappingDiscovery.RegisterAssembly(asm); + RegisterAssembly(a); } } @@ -187,7 +210,9 @@ public static void RegisterAllCurrentAssemblies() foreach (Assembly a in AppDomain.CurrentDomain.GetAssemblies()) { if (!RegisteredAssemblies.Contains(a.GetName().FullName)) + { RegisterAssembly(a); + } } } @@ -198,7 +223,9 @@ public static void RegisterAllCurrentAssemblies() public static void RegisterAssembly(Assembly asm) { RegisteredAssemblies.Add(asm.GetName().FullName); + IList l = GetMappingClasses(asm); + AddMappingClasses(l); } @@ -206,9 +233,7 @@ private static IList GetMappingClasses(Assembly asm) { try { - return (IList)(from t in asm.GetTypes() - where typeof(Resource).IsAssignableFrom(t) - select t).ToList(); + return (from t in asm.GetTypes() where typeof(Resource).IsAssignableFrom(t) select t).ToList(); } catch { @@ -224,12 +249,12 @@ where typeof(Resource).IsAssignableFrom(t) /// Should inferencing be factored in. public static Type[] GetMatchingTypes(IEnumerable classes, Type type, bool inferencingEnabled = false) { - if( !inferencingEnabled ) + if (!inferencingEnabled) { return (from t in MappingClasses where t.RdfClasses.Intersect(classes).Count() == t.RdfClasses.Length && type.IsAssignableFrom(t.MappingClassType) orderby t.BaseClassCount descending - select t.MappingClassType).ToArray(); + select t.MappingClassType).ToArray(); } else { diff --git a/Trinity/Model/IModel.cs b/Trinity/Model/IModel.cs index 77f20b8..41ee61c 100644 --- a/Trinity/Model/IModel.cs +++ b/Trinity/Model/IModel.cs @@ -30,6 +30,7 @@ using System.Collections.Generic; using System.Data; using System.IO; +using System.Linq; namespace Semiodesk.Trinity { @@ -273,6 +274,13 @@ public interface IModel /// An enumeration of resources that match the given query. IEnumerable GetResources(bool inferenceEnabled = false, ITransaction transaction = null) where T : Resource; + /// + /// TODO + /// + /// + /// + IQueryable AsQueryable(bool inferenceEnabled = false) where T : Resource; + /// /// Executes a SPARQL query and provides an enumeration of matching resources. /// diff --git a/Trinity/Model/Model.cs b/Trinity/Model/Model.cs index 932dc56..dcbc8e2 100644 --- a/Trinity/Model/Model.cs +++ b/Trinity/Model/Model.cs @@ -31,6 +31,8 @@ using System.IO; using System.Reflection; using Newtonsoft.Json; +using Semiodesk.Trinity.Query; +using Remotion.Linq.Parsing.Structure; namespace Semiodesk.Trinity { @@ -642,6 +644,20 @@ public IEnumerable GetResources(bool inferenceEnabled = false, ITransactio return GetResources(query, inferenceEnabled, transaction); } + /// + /// TODO + /// + /// + /// + public IQueryable AsQueryable(bool inferenceEnabled = false) where T : Resource + { + SparqlQueryExecutor executor = new SparqlQueryExecutor(this, inferenceEnabled); + + QueryParser queryParser = QueryParser.CreateDefault(); + + return new SparqlQueryable(queryParser, executor); + } + /// /// Executes a SPARQL-select query and provides a list of binding sets. This method /// implements transparent type marshalling and delivers the bound variables in C# diff --git a/Trinity/Model/ModelGroup.cs b/Trinity/Model/ModelGroup.cs index c756878..d9c3507 100644 --- a/Trinity/Model/ModelGroup.cs +++ b/Trinity/Model/ModelGroup.cs @@ -25,6 +25,8 @@ // // Copyright (c) Semiodesk GmbH 2017 +using Remotion.Linq.Parsing.Structure; +using Semiodesk.Trinity.Query; using System; using System.Collections; using System.Collections.Generic; @@ -94,6 +96,11 @@ public ModelGroup(IStore store, IEnumerable models) } } } + + public ModelGroup(IStore store, params IModel[] models) : this(store, (IEnumerable)models) + { + + } #endregion #region Methods @@ -598,6 +605,20 @@ public IEnumerable GetResources(bool inferenceEnabled = false, ITransactio return null; } + /// + /// TODO + /// + /// + /// + public IQueryable AsQueryable(bool inferenceEnabled = false) where T : Resource + { + SparqlQueryExecutor executor = new SparqlQueryExecutor(this, inferenceEnabled); + + QueryParser queryParser = QueryParser.CreateDefault(); + + return new SparqlQueryable(queryParser, executor); + } + /// /// Executes a SPARQL query and provides an enumeration of matching resources. /// diff --git a/Trinity/OntologyDiscovery.cs b/Trinity/OntologyDiscovery.cs index 2e5c18d..a60ec88 100644 --- a/Trinity/OntologyDiscovery.cs +++ b/Trinity/OntologyDiscovery.cs @@ -28,9 +28,7 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Text; using System.Reflection; -using System.Diagnostics; namespace Semiodesk.Trinity { diff --git a/Trinity/Query/Linq/ExpressionTreeVisitor.cs b/Trinity/Query/Linq/ExpressionTreeVisitor.cs new file mode 100644 index 0000000..dec0c18 --- /dev/null +++ b/Trinity/Query/Linq/ExpressionTreeVisitor.cs @@ -0,0 +1,564 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses; +using Remotion.Linq.Clauses.Expressions; +using Remotion.Linq.Clauses.ResultOperators; +using Remotion.Linq.Parsing; +using System; +using System.Linq.Expressions; +using System.Text.RegularExpressions; +using VDS.RDF.Query; +using VDS.RDF.Query.Builder; +#if NET35 +using Semiodesk.Trinity.Utility; +#endif + +namespace Semiodesk.Trinity.Query +{ + class ExpressionTreeVisitor : ThrowingExpressionVisitor + { +#region Members + + protected ISparqlQueryModelVisitor QueryModelVisitor; + + protected ISparqlQueryGeneratorTree QueryGeneratorTree; + +#endregion + +#region Constructors + + public ExpressionTreeVisitor(ISparqlQueryModelVisitor queryModelVisitor, ISparqlQueryGeneratorTree queryGeneratorTree) + { + QueryModelVisitor = queryModelVisitor; + QueryGeneratorTree = queryGeneratorTree; + } + +#endregion + +#region Methods + + private void HandleRegexMethodCallExpression(Expression expression, string regex, bool ignoreCase = false) + { + ISparqlQueryGenerator currentGenerator = QueryGeneratorTree.CurrentGenerator; + + if (expression is MemberExpression) + { + MemberExpression member = expression as MemberExpression; + + currentGenerator.FilterRegex(member, regex, ignoreCase); + } + else if (expression is SubQueryExpression) + { + currentGenerator.FilterRegex(currentGenerator.ObjectVariable, regex, ignoreCase); + } + } + + private void VisitBinaryAndAlsoExpression(BinaryExpression expression) + { + Visit(expression.Left); + Visit(expression.Right); + } + + private void VisitBinaryOrElseExpression(BinaryExpression expression) + { + ISparqlQueryGenerator currentGenerator = QueryGeneratorTree.CurrentGenerator; + + // Get the currently active pattern builder so that we can reset it after we're done. + // This will build nested UNIONS ({{x} UNION {y}} UNION {z}) for multiple alternative + // OR expressions. While this is not elegant, it is logically correct and can be optimized + // by the storage backend. + IGraphPatternBuilder patternBuilder = currentGenerator.PatternBuilder; + + currentGenerator.Union( + (left) => + { + currentGenerator.PatternBuilder = left; + Visit(expression.Left); + }, + (right) => + { + currentGenerator.PatternBuilder = right; + Visit(expression.Right); + } + ); + + // Reset the pattern builder that was used before implementing the unions. + currentGenerator.PatternBuilder = patternBuilder; + } + + private void VisitBinaryConstantExpression(BinaryExpression expression) + { + ConstantExpression constant = expression.TryGetExpressionOfType(); + + if (expression.HasExpressionOfType()) + { + MemberExpression member = expression.TryGetExpressionOfType(); + + VisitBinaryMemberExpression(expression.NodeType, member, constant); + } + else if (expression.HasExpressionOfType()) + { + SubQueryExpression subQuery = expression.TryGetExpressionOfType(); + + VisitBinarySubQueryExpression(expression.NodeType, subQuery, constant); + } + else if(expression.HasExpressionOfType()) + { + QuerySourceReferenceExpression querySource = expression.TryGetExpressionOfType(); + + VisitBinaryQuerySourceReferenceExpression(expression.NodeType, querySource, constant); + } + } + + private void VisitBinaryQuerySourceReferenceExpression(ExpressionType type, QuerySourceReferenceExpression sourceExpression, ConstantExpression constant) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + SparqlVariable s = g.VariableGenerator.TryGetSubjectVariable(sourceExpression) ?? g.VariableGenerator.GlobalSubject; + + switch (type) + { + case ExpressionType.Equal: + g.WhereEqual(s, constant); + break; + case ExpressionType.NotEqual: + g.WhereNotEqual(s, constant); + break; + default: + throw new NotSupportedException(type.ToString()); + } + } + + private void VisitBinaryMemberExpression(ExpressionType type, MemberExpression member, ConstantExpression constant) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + switch (type) + { + case ExpressionType.Equal: + g.WhereEqual(member, constant); + break; + case ExpressionType.NotEqual: + g.WhereNotEqual(member, constant); + break; + case ExpressionType.GreaterThan: + g.WhereGreaterThan(member, constant); + break; + case ExpressionType.GreaterThanOrEqual: + g.WhereGreaterThanOrEqual(member, constant); + break; + case ExpressionType.LessThan: + g.WhereLessThan(member, constant); + break; + case ExpressionType.LessThanOrEqual: + g.WhereLessThanOrEqual(member, constant); + break; + default: + throw new NotSupportedException(type.ToString()); + } + } + + private void VisitBinarySubQueryExpression(ExpressionType type, SubQueryExpression subQuery, ConstantExpression constant) + { + if (!QueryGeneratorTree.HasQueryGenerator(subQuery)) + { + VisitSubQuery(subQuery); + } + + ISparqlQueryGenerator g = QueryGeneratorTree.GetQueryGenerator(subQuery); + + // Note: We write the filter into the sub query generator which is writing into it's + // enclosing graph group pattern rather than the query itself. This is required for + // supporting OpenLink Virtuoso (see SparqlQueryGenerator.Child()). + switch (type) + { + case ExpressionType.Equal: + g.WhereEqual(g.ObjectVariable, constant); + break; + case ExpressionType.NotEqual: + g.WhereNotEqual(g.ObjectVariable, constant); + break; + case ExpressionType.GreaterThan: + g.WhereGreaterThan(g.ObjectVariable, constant); + break; + case ExpressionType.GreaterThanOrEqual: + g.WhereGreaterThanOrEqual(g.ObjectVariable, constant); + break; + case ExpressionType.LessThan: + g.WhereLessThan(g.ObjectVariable, constant); + break; + case ExpressionType.LessThanOrEqual: + g.WhereLessThanOrEqual(g.ObjectVariable, constant); + break; + default: + throw new NotSupportedException(type.ToString()); + } + } + + protected override Expression VisitBinary(BinaryExpression expression) + { + switch (expression.NodeType) + { + case ExpressionType.Equal: + case ExpressionType.NotEqual: + case ExpressionType.GreaterThan: + case ExpressionType.GreaterThanOrEqual: + case ExpressionType.LessThan: + case ExpressionType.LessThanOrEqual: + { + if (expression.HasExpressionOfType()) + { + VisitBinaryConstantExpression(expression); + } + break; + } + case ExpressionType.AndAlso: + { + VisitBinaryAndAlsoExpression(expression); + break; + } + case ExpressionType.OrElse: + { + VisitBinaryOrElseExpression(expression); + break; + } + } + + return expression; + } + + protected override Expression VisitConditional(ConditionalExpression expression) + { + throw new NotImplementedException(); + } + + protected override Expression VisitConstant(ConstantExpression expression) + { + return expression; + } + + protected override Expression VisitInvocation(InvocationExpression expression) + { + throw new NotSupportedException(); + } + + protected override MemberAssignment VisitMemberAssignment(MemberAssignment memberAssigment) + { + return base.VisitMemberAssignment(memberAssigment); + } + + protected override MemberBinding VisitMemberBinding(MemberBinding expression) + { + return base.VisitMemberBinding(expression); + } + + protected override Expression VisitListInit(ListInitExpression expression) + { + throw new NotImplementedException(); + } + + protected override Expression VisitMember(MemberExpression expression) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + SparqlVariable o = g.VariableGenerator.TryGetObjectVariable(expression); + + if(o == null) + { + // We have not visited the member before. It might be accessed in an order by clause.. + if(g.QueryModel.HasOrdering(expression)) + { + o = g.VariableGenerator.CreateObjectVariable(expression); + + g.Where(expression, o); + } + else if(expression.Type == typeof(bool)) + { + ConstantExpression constantExpression = Expression.Constant(true); + + g.WhereEqual(expression, constantExpression); + } + } + else + { + // We have visited the member before, either in the FromExpression or a SubQueryExpression. + g.Where(expression, o); + } + + return expression; + } + + protected override Expression VisitMemberInit(MemberInitExpression expression) + { + throw new NotSupportedException(); + } + + protected override Expression VisitMethodCall(MethodCallExpression expression) + { + string method = expression.Method.Name; + + switch(method) + { + case "Equals": + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + ConstantExpression arg0 = expression.Arguments[0] as ConstantExpression; + + if (expression.Object is MemberExpression) + { + MemberExpression member = expression.Object as MemberExpression; + + g.WhereEqual(member, arg0); + } + else + { + g.WhereEqual(g.ObjectVariable, arg0); + } + + return expression; + } + case "Contains": + { + Expression o = expression.Object; + string pattern = expression.GetArgumentValue(0); + + HandleRegexMethodCallExpression(o, pattern); + + return expression; + } + case "StartsWith": + { + object[] args = new object[] + { + true, + StringComparison.CurrentCultureIgnoreCase, + StringComparison.InvariantCultureIgnoreCase + }; + + Expression o = expression.Object; + string pattern = "^" + expression.GetArgumentValue(0); + bool ignoreCase = expression.HasArgumentValueFromAlternatives(1, args); + + HandleRegexMethodCallExpression(o, pattern, ignoreCase); + + return expression; + } + case "EndsWith": + { + object[] args = new object[] + { + true, + StringComparison.CurrentCultureIgnoreCase, + StringComparison.InvariantCultureIgnoreCase + }; + + Expression o = expression.Object; + string pattern = expression.GetArgumentValue(0) + "$"; + bool ignoreCase = expression.HasArgumentValueFromAlternatives(1, args); + + HandleRegexMethodCallExpression(o, pattern, ignoreCase); + + return expression; + } + case "IsMatch": + { + if (expression.Method.DeclaringType == typeof(Regex)) + { + Expression o = expression.Arguments[0]; + string pattern = expression.GetArgumentValue(1) + "$"; + RegexOptions options = expression.GetArgumentValue(2, RegexOptions.None); + + HandleRegexMethodCallExpression(o, pattern, options.HasFlag(RegexOptions.IgnoreCase)); + + return expression; + } + + break; + } + } + + throw new NotSupportedException(); + } + + protected override Expression VisitNew(NewExpression expression) + { + throw new NotSupportedException(); + } + + protected override Expression VisitNewArray(NewArrayExpression expression) + { + throw new NotSupportedException(); + } + + protected override Expression VisitParameter(ParameterExpression expression) + { + throw new NotImplementedException(); + } + + protected override Expression VisitQuerySourceReference(QuerySourceReferenceExpression expression) + { + throw new NotImplementedException(); + } + + protected override Expression VisitSubQuery(SubQueryExpression expression) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + ISparqlQueryGenerator sg = QueryGeneratorTree.CreateSubQueryGenerator(g, expression); + + // Set the sub query generator as the current query generator to implement the sub query. + QueryGeneratorTree.CurrentGenerator = sg; + + // Descend the query tree and implement the sub query. + expression.QueryModel.Accept(QueryModelVisitor); + + // Register the sub query expression with the variable generator (used for ORDER BYs in the outer query). + if(sg.ObjectVariable != null && sg.ObjectVariable.IsResultVariable) + { + // Note: We make a copy of the variable here so that aggregate variables are selected by their names only. + SparqlVariable o = new SparqlVariable(sg.ObjectVariable.Name); + + g.VariableGenerator.SetObjectVariable(expression, o); + } + + // Reset the query generator and continue with implementing the outer query. + QueryGeneratorTree.CurrentGenerator = g; + + // Note: This will set pattern builder of the sub generator to the enclosing graph group builder. + g.Child(sg); + + return expression; + } + + protected override Expression VisitTypeBinary(TypeBinaryExpression expression) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + g.WhereResourceOfType(g.SubjectVariable, expression.TypeOperand); + + return expression; + } + + protected override Expression VisitUnary(UnaryExpression expression) + { + if(expression.NodeType == ExpressionType.Not) + { + if (expression.Operand is MemberExpression) + { + MemberExpression memberExpression = expression.Operand as MemberExpression; + + if(memberExpression.Type == typeof(bool)) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + ConstantExpression constantExpression = Expression.Constant(false); + + g.WhereEqual(memberExpression, constantExpression); + + return expression; + } + } + else if(expression.Operand is MethodCallExpression) + { + // Equals, Contains, StartsWith, EndsWith, IsMatch (see VisitMethodCall). + throw new NotImplementedException(); + } + else if(expression.Operand is SubQueryExpression) + { + // Any. + throw new NotImplementedException(); + } + } + + throw new NotSupportedException(expression.Operand.ToString()); + } + + protected override Exception CreateUnhandledItemException(T unhandledItem, string visitMethod) + { + return null; + } + + public Expression VisitOrdering(Ordering ordering, int index) + { + Visit(ordering.Expression); + + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + // Either the member or aggregate variable has already been created previously by a SubQuery or a SelectClause.. + SparqlVariable o = g.VariableGenerator.TryGetObjectVariable(ordering.Expression); + + if(o != null) + { + // In case the query has a LastResultOperator, we invert the direction of the first + // ordering to retrieve the last element of the result set. + // See: SelectQueryGenerator.SetObjectOperator() + if (g.QueryModel.HasResultOperator() && index == 0) + { + if (ordering.OrderingDirection == OrderingDirection.Asc) g.OrderByDescending(o); else g.OrderBy(o); + } + else + { + if (ordering.OrderingDirection == OrderingDirection.Asc) g.OrderBy(o); else g.OrderByDescending(o); + } + + return ordering.Expression; + } + else + { + throw new ArgumentException(ordering.Expression.ToString()); + } + } + + public Expression VisitFromExpression(Expression expression, string itemName, Type itemType) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + g.VariableGenerator.AddVariableMapping(expression, itemName); + + if (expression is MemberExpression) + { + MemberExpression memberExpression = expression as MemberExpression; + + if (memberExpression.Expression is SubQueryExpression) + { + // First, implement the subquery.. + SubQueryExpression subQueryExpression = memberExpression.Expression as SubQueryExpression; + + Visit(subQueryExpression); + } + + // ..then implement the member expression. + Visit(memberExpression); + } + + return expression; + } + +#endregion + } +} diff --git a/Trinity/Query/Linq/Extensions/AggregateExtensions.cs b/Trinity/Query/Linq/Extensions/AggregateExtensions.cs new file mode 100644 index 0000000..b89a857 --- /dev/null +++ b/Trinity/Query/Linq/Extensions/AggregateExtensions.cs @@ -0,0 +1,43 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System.Linq; +using VDS.RDF.Query; +using VDS.RDF.Query.Aggregates; + +namespace Semiodesk.Trinity.Query +{ + internal static class AggregateExtensions + { + public static SparqlVariable AsSparqlVariable(this ISparqlAggregate aggregate) + { + string variableName = aggregate.Expression.Variables.First(); + + return new SparqlVariable(aggregate.GetProjectedName(variableName), aggregate); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/BinaryExpressionExtensions.cs b/Trinity/Query/Linq/Extensions/BinaryExpressionExtensions.cs new file mode 100644 index 0000000..4a0600f --- /dev/null +++ b/Trinity/Query/Linq/Extensions/BinaryExpressionExtensions.cs @@ -0,0 +1,44 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal static class BinaryExpressionExtensions + { + public static bool HasExpressionOfType(this BinaryExpression expression) where T : Expression + { + return expression.Right is T || expression.Left is T; + } + + public static T TryGetExpressionOfType(this BinaryExpression expression) where T : Expression + { + return expression.Right is T ? expression.Right as T : expression.Left as T; + } + } +} diff --git a/Trinity/Query/Linq/Extensions/ConstantExpressionExtensions.cs b/Trinity/Query/Linq/Extensions/ConstantExpressionExtensions.cs new file mode 100644 index 0000000..b601c90 --- /dev/null +++ b/Trinity/Query/Linq/Extensions/ConstantExpressionExtensions.cs @@ -0,0 +1,130 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; +using System.Linq.Expressions; +using System.Xml; +using VDS.RDF; +using VDS.RDF.Query.Builder.Expressions; +using VDS.RDF.Query.Expressions; +using VDS.RDF.Query.Expressions.Primary; + +namespace Semiodesk.Trinity.Query +{ + public static class ConstantExpressionExtensions + { + public static ISparqlExpression AsSparqlExpression(this ConstantExpression constant) + { + return new ConstantTerm(constant.AsNode()); + } + + public static IriExpression AsIriExpression(this ConstantExpression constant) + { + return new IriExpression(constant.AsSparqlExpression()); + } + + public static LiteralExpression AsLiteralExpression(this ConstantExpression constant) + { + return new LiteralExpression(constant.AsSparqlExpression()); + } + + public static NumericExpression AsNumericExpression(this ConstantExpression constant) + { + return new NumericExpression(constant.AsSparqlExpression()); + } + + public static INode AsNode(this ConstantExpression constant) + { + if (typeof(Uri).IsAssignableFrom(constant.Type)) + { + // If we have a URI constant, return a URI node. + return new NodeFactory().CreateUriNode(constant.Value as Uri); + } + else if (XsdTypeMapper.HasXsdTypeUri(constant.Type) || constant.Value is string) + { + // If we have a literal value, return literal nodes. + string value = GetValue(constant); + Uri datatype = GetDataType(constant); + + if (datatype == null) + { + return new NodeFactory().CreateLiteralNode(value); + } + else + { + return new NodeFactory().CreateLiteralNode(value, datatype); + } + } + else if(typeof(Resource).IsAssignableFrom(constant.Type)) + { + // If we have a constant of type Resource, return a URI node. + Resource resource = constant.Value as Resource; + + return new NodeFactory().CreateUriNode(resource.Uri); + } + else + { + // We cannot determine the Uri of generic reference types. + string msg = string.Format("Unsupported constant type: {0}", constant.Type); + throw new ArgumentException(msg); + } + } + + private static string GetValue(ConstantExpression constant) + { + if(constant.Type == typeof(DateTime)) + { + return XmlConvert.ToString((DateTime)constant.Value, XmlDateTimeSerializationMode.Utc); + } + else if(constant.Type == typeof(bool)) + { + return XmlConvert.ToString((bool)constant.Value); + } + else + { + return constant.Value.ToString(); + } + } + + private static Uri GetDataType(ConstantExpression constant) + { + if(constant.Type == typeof(string)) + { + return null; + } + else + { + return XsdTypeMapper.GetXsdTypeUri(constant.Type); + } + } + + public static bool IsNullOrFalse(this ConstantExpression constant) + { + return constant.Value == null || constant.Value is bool && ((bool)constant.Value) == false; + } + } +} diff --git a/Trinity/Query/Linq/Extensions/ExpressionExstensions.cs b/Trinity/Query/Linq/Extensions/ExpressionExstensions.cs new file mode 100644 index 0000000..da6a8a2 --- /dev/null +++ b/Trinity/Query/Linq/Extensions/ExpressionExstensions.cs @@ -0,0 +1,101 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Diagnostics; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal static class ExpressionExstensions + { + public static QuerySourceReferenceExpression TryGetQuerySourceReference(this Expression expression) + { + if (expression is QuerySourceReferenceExpression) + { + QuerySourceReferenceExpression sourceExpression = expression as QuerySourceReferenceExpression; + + return sourceExpression; + } + else if (expression is MemberExpression) + { + MemberExpression memberExpression = expression as MemberExpression; + + return TryGetQuerySourceReference(memberExpression.Expression); + } + else if (expression is SubQueryExpression) + { + SubQueryExpression subQueryExpression = expression as SubQueryExpression; + + return TryGetQuerySourceReference(subQueryExpression.QueryModel.MainFromClause.FromExpression); + } + + return null; + } + + /// + /// Indicate if an expression contains antoher or is equal to it. + /// + /// An expression. + /// Expression to be evaluated. + /// true if e is equal to the given expression or one of its query sources, false otherwise. + public static bool ContainsOrEquals(this Expression expression, Expression e) + { + if (expression != null) + { + if (expression.Equals(e)) + { + return true; + } + else + { + QuerySourceReferenceExpression sourceExpression = expression.TryGetQuerySourceReference(); + + if (sourceExpression != null && sourceExpression != expression) + { + return sourceExpression.ContainsOrEquals(e); + } + } + } + + return false; + } + + public static string GetKey(this Expression expression) + { + string key = expression.ToString().Trim(); + + if (key.EndsWith(".Uri")) + { + key = key.Substring(0, key.LastIndexOf(".Uri")); + } + + return key; + } + } +} diff --git a/Trinity/Query/Linq/Extensions/MemberExpressionExtensions.cs b/Trinity/Query/Linq/Extensions/MemberExpressionExtensions.cs new file mode 100644 index 0000000..b07fd4d --- /dev/null +++ b/Trinity/Query/Linq/Extensions/MemberExpressionExtensions.cs @@ -0,0 +1,55 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; + +namespace Semiodesk.Trinity.Query +{ + internal static class MemberExpressionExtensions + { + public static RdfPropertyAttribute TryGetRdfPropertyAttribute(this MemberExpression expression) + { + Type attributeType = typeof(RdfPropertyAttribute); + + MemberInfo member; + + if(expression.Member.DeclaringType.IsInterface) + { + member = expression.Expression.Type.GetMember(expression.Member.Name).First(); + } + else + { + member = expression.Member; + } + + return member.GetCustomAttributes(attributeType, true).FirstOrDefault() as RdfPropertyAttribute; + } + } +} diff --git a/Trinity/Query/Linq/Extensions/MemberInfoExtensions.cs b/Trinity/Query/Linq/Extensions/MemberInfoExtensions.cs new file mode 100644 index 0000000..9442b8e --- /dev/null +++ b/Trinity/Query/Linq/Extensions/MemberInfoExtensions.cs @@ -0,0 +1,57 @@ +using System; +using System.Linq; +using System.Reflection; +using System.Collections.Generic; + +namespace Semiodesk.Trinity.Query +{ + public static class MemberInfoExtensions + { + public static TAttribute TryGetCustomAttribute(this MemberInfo member) where TAttribute : Attribute + { + return member.GetCustomAttributes(typeof(TAttribute), true).FirstOrDefault() as TAttribute; + } + + public static Type GetMemberType(this MemberInfo member) + { + switch (member.MemberType) + { + case MemberTypes.Event: + return (member as EventInfo).EventHandlerType; + case MemberTypes.Field: + return (member as FieldInfo).FieldType; + case MemberTypes.Method: + return (member as MethodInfo).ReturnType; + case MemberTypes.Property: + return (member as PropertyInfo).PropertyType; + default: + throw new ArgumentException("Input MemberInfo must be if type EventInfo, FieldInfo, MethodInfo, or PropertyInfo"); + } + } + + public static bool IsUriType(this MemberInfo member) + { + PropertyInfo property = member as PropertyInfo; + + if(property != null) + { + return typeof(Uri).IsAssignableFrom(property.PropertyType); + } + else + { + return false; + } + } + + public static bool IsBuiltInCall(this MemberInfo member) + { + HashSet systemTypes = new HashSet() + { + typeof(DateTime), + typeof(String) + }; + + return systemTypes.Contains(member.DeclaringType); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/MethodCallExpressionExtensions.cs b/Trinity/Query/Linq/Extensions/MethodCallExpressionExtensions.cs new file mode 100644 index 0000000..90cef0d --- /dev/null +++ b/Trinity/Query/Linq/Extensions/MethodCallExpressionExtensions.cs @@ -0,0 +1,87 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System.Linq; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + public static class MethodCallExpressionExtensions + { + public static bool HasArgumentValue(this MethodCallExpression expression, int index, object value) + { + if (index < expression.Arguments.Count) + { + ConstantExpression arg = expression.Arguments[index] as ConstantExpression; + + return arg.Value == value; + } + + return false; + } + + public static bool HasArgumentValueFromAlternatives(this MethodCallExpression expression, int index, params object[] values) + { + if (index < expression.Arguments.Count) + { + ConstantExpression arg = expression.Arguments[index] as ConstantExpression; + + if(arg.Value != null) + { + return values.Any(v => arg.Value.Equals(v)); + } + else + { + return values.Any(v => arg.Value == v); + } + } + + return false; + } + + public static T GetArgumentValue(this MethodCallExpression expression, int index) + { + ConstantExpression arg = expression.Arguments[index] as ConstantExpression; + + return (T)arg.Value; + } + + public static T GetArgumentValue(this MethodCallExpression expression, int index, T defaultValue) + { + if (index < expression.Arguments.Count) + { + ConstantExpression arg = expression.Arguments[index] as ConstantExpression; + + return (T)arg.Value; + } + else + { + return defaultValue; + } + } + } +} diff --git a/Trinity/Query/Linq/Extensions/NodeFactoryExtensions.cs b/Trinity/Query/Linq/Extensions/NodeFactoryExtensions.cs new file mode 100644 index 0000000..8698021 --- /dev/null +++ b/Trinity/Query/Linq/Extensions/NodeFactoryExtensions.cs @@ -0,0 +1,43 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System.Xml; +using VDS.RDF; + +namespace Semiodesk.Trinity.Query +{ + public static class NodeFactoryExtensions + { + public static INode CreateLiteralNode(this NodeFactory factory, bool value) + { + var literalValue = XmlConvert.ToString(value); + var literalType = XsdTypeMapper.GetXsdTypeUri(typeof(bool)); + + return factory.CreateLiteralNode(literalValue, literalType); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/QueryModelExtensions.cs b/Trinity/Query/Linq/Extensions/QueryModelExtensions.cs new file mode 100644 index 0000000..b565c2b --- /dev/null +++ b/Trinity/Query/Linq/Extensions/QueryModelExtensions.cs @@ -0,0 +1,75 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Clauses; +using Remotion.Linq.Clauses.ResultOperators; +using System.Linq; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal static class QueryModelExtensions + { + public static bool HasOrdering(this QueryModel queryModel, Expression expression) + { + return queryModel.BodyClauses.OfType().Any(c => c.Orderings.Any(o => o.Expression == expression)); + } + + public static bool HasResultOperator(this QueryModel queryModel) + { + return queryModel.ResultOperators.Any(op => op is T); + } + + public static bool HasNumericResultOperator(this QueryModel queryModel) + { + return queryModel.ResultOperators.Any(op => IsNumericResultOperator(op)); + } + + private static bool IsNumericResultOperator(ResultOperatorBase op) + { + if (op is SumResultOperator + || op is CountResultOperator + || op is LongCountResultOperator + || op is AverageResultOperator + || op is MinResultOperator + || op is MaxResultOperator) + { + return true; + } + else + { + return false; + } + } + + public static bool HasSelect(this QueryModel queryModel, Expression expression) + { + return queryModel.BodyClauses.OfType().Any(c => c.Selector == expression); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/ResourceExtensions.cs b/Trinity/Query/Linq/Extensions/ResourceExtensions.cs new file mode 100644 index 0000000..d683ffe --- /dev/null +++ b/Trinity/Query/Linq/Extensions/ResourceExtensions.cs @@ -0,0 +1,28 @@ +using System; +using System.Collections.Generic; + +namespace Semiodesk.Trinity +{ + public static class ResourceExtensions + { + /// + /// Executes a breath first search on the property of the resource and returns all + /// + /// + /// + public static IEnumerable FindAll(this IResource resource, T relation, int depth = int.MaxValue) where T : IResource + { + throw new NotImplementedException(); + } + + /// + /// Executes a breath first search on the property of the resource and returns all + /// + /// + /// + public static IEnumerable FindAll(this IResource resource, IEnumerable relation, int depth = int.MaxValue) where T : IResource + { + throw new NotImplementedException(); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/SparqlVariableExtensions.cs b/Trinity/Query/Linq/Extensions/SparqlVariableExtensions.cs new file mode 100644 index 0000000..33577dc --- /dev/null +++ b/Trinity/Query/Linq/Extensions/SparqlVariableExtensions.cs @@ -0,0 +1,33 @@ +using VDS.RDF.Query; +using VDS.RDF.Query.Aggregates; + +namespace Semiodesk.Trinity.Query +{ + internal static class SparqlVariableExtensions + { + public static bool IsGlobal(this SparqlVariable variable) + { + return variable.Name.EndsWith("_"); + } + + public static string GetProjectedName(this SparqlVariable variable) + { + if (variable.IsAggregate) + { + return variable.Aggregate.GetProjectedName(variable.Name); + } + else + { + return variable.Name; + } + } + + public static string GetProjectedName(this ISparqlAggregate aggregate, string variableName) + { + string name = variableName; + string functor = aggregate.Functor.ToLowerInvariant(); + + return string.Format("{0}_{1}", name, functor); + } + } +} diff --git a/Trinity/Query/Linq/Extensions/StringExtensions.cs b/Trinity/Query/Linq/Extensions/StringExtensions.cs new file mode 100644 index 0000000..8f5372c --- /dev/null +++ b/Trinity/Query/Linq/Extensions/StringExtensions.cs @@ -0,0 +1,22 @@ +namespace Semiodesk.Trinity.Query +{ + internal static class StringExtensions + { + public static string ToCamelCase(this string s) + { + if(string.IsNullOrEmpty(s)) + { + return s; + } + + string result = s.Substring(0, 1).ToLowerInvariant(); + + if(s.Length > 1) + { + result += s.Substring(1); + } + + return result; + } + } +} diff --git a/Trinity/Query/Linq/ISparqlQueryGenerator.cs b/Trinity/Query/Linq/ISparqlQueryGenerator.cs new file mode 100644 index 0000000..83a74c0 --- /dev/null +++ b/Trinity/Query/Linq/ISparqlQueryGenerator.cs @@ -0,0 +1,150 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Clauses; +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using VDS.RDF.Query; +using VDS.RDF.Query.Builder; + +namespace Semiodesk.Trinity.Query +{ + internal interface ISparqlQueryGenerator + { + #region Members + + bool IsRoot { get; } + + bool IsBound { get; } + + SparqlVariable SubjectVariable { get; } + + SparqlVariable ObjectVariable { get; } + + IList SelectedVariables { get; } + + QueryModel QueryModel { get; } + + IQueryBuilder QueryBuilder { get; } + + IGraphPatternBuilder PatternBuilder { get; set; } + + ISparqlQueryGenerator ParentGenerator { get; set; } + + ISparqlVariableGenerator VariableGenerator { get; } + + #endregion + + #region Methods + + string BuildQuery(); + + void BindSelectVariables(); + + IGraphPatternBuilder Child(ISparqlQueryGenerator queryGenerator); + + IGraphPatternBuilder Child(GraphPatternBuilder patternBuilder); + + void SetObjectOperator(ResultOperatorBase resultOperator); + + void SetObjectVariable(SparqlVariable variable, bool select = false); + + void SetSubjectVariable(SparqlVariable variable, bool select = false); + + void SetSubjectOperator(ResultOperatorBase resultOperator); + + void DeselectVariable(SparqlVariable variable); + + void SelectVariable(SparqlVariable variable); + + bool IsSelectedVariable(SparqlVariable variable); + + void WhereResource(SparqlVariable s, SparqlVariable p = null, SparqlVariable o = null); + + void WhereResource(Expression expression, SparqlVariable p = null, SparqlVariable o = null); + + void WhereResourceOfType(SparqlVariable s, Type type); + + void WhereResourceOfType(Expression expression, Type type); + + void Where(MemberExpression member, SparqlVariable variable); + + void WhereEqual(SparqlVariable variable, ConstantExpression constant); + + void WhereEqual(MemberExpression member, ConstantExpression constant); + + void WhereNotEqual(SparqlVariable variable, ConstantExpression constant); + + void WhereNotEqual(MemberExpression member, ConstantExpression constant); + + void WhereGreaterThan(SparqlVariable variable, ConstantExpression constant); + + void WhereGreaterThan(MemberExpression member, ConstantExpression constant); + + void WhereGreaterThanOrEqual(SparqlVariable variable, ConstantExpression constant); + + void WhereGreaterThanOrEqual(MemberExpression member, ConstantExpression constant); + + void WhereLessThan(SparqlVariable variable, ConstantExpression constant); + + void WhereLessThan(MemberExpression member, ConstantExpression constant); + + void WhereLessThanOrEqual(SparqlVariable variable, ConstantExpression constant); + + void WhereLessThanOrEqual(MemberExpression member, ConstantExpression constant); + + void FilterRegex(SparqlVariable variable, string text, bool ignoreCase); + + void FilterRegex(MemberExpression member, string text, bool ignoreCase); + + void OrderBy(SparqlVariable variable); + + void OrderByDescending(SparqlVariable variable); + + void Offset(int offset); + + void Limit(int limit); + + void Union(GraphPatternBuilder firstBuilder, params GraphPatternBuilder[] otherBuilders); + + void Union(Action buildFirstPattern, params Action[] buildOtherPatterns); + + void SetQueryContext(ISparqlQueryGeneratorTree generatorTree, QueryModel queryModel); + + void OnBeforeFromClauseVisited(Expression expression); + + void OnFromClauseVisited(Expression expression); + + void OnBeforeSelectClauseVisited(Expression selector); + + void OnSelectClauseVisited(Expression selector); + + #endregion + } +} diff --git a/Trinity/Query/Linq/ISparqlQueryGeneratorTree.cs b/Trinity/Query/Linq/ISparqlQueryGeneratorTree.cs new file mode 100644 index 0000000..341d4f5 --- /dev/null +++ b/Trinity/Query/Linq/ISparqlQueryGeneratorTree.cs @@ -0,0 +1,59 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using System.Collections.Generic; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + // TODO: This can be decomposed into a tree and a factory class. + internal interface ISparqlQueryGeneratorTree + { + #region Members + + ISparqlQueryGenerator CurrentGenerator { get; set; } + + ISparqlQueryGenerator RootGenerator { get; } + + #endregion + + #region Methods + + void Bind(); + + ISparqlQueryGenerator CreateSubQueryGenerator(ISparqlQueryGenerator parentGenerator, Expression expression); + + void RegisterQueryExpression(ISparqlQueryGenerator generator, Expression expression); + + bool HasQueryGenerator(Expression expression); + + ISparqlQueryGenerator GetQueryGenerator(Expression expression); + + #endregion + } +} diff --git a/Trinity/Query/Linq/ISparqlQueryModelVisitor.cs b/Trinity/Query/Linq/ISparqlQueryModelVisitor.cs new file mode 100644 index 0000000..cc43d7d --- /dev/null +++ b/Trinity/Query/Linq/ISparqlQueryModelVisitor.cs @@ -0,0 +1,40 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; + +namespace Semiodesk.Trinity.Query +{ + internal interface ISparqlQueryModelVisitor : IQueryModelVisitor + { + #region Methods + + ISparqlQuery GetQuery(); + + #endregion + } +} diff --git a/Trinity/Query/Linq/ISparqlVariableGenerator.cs b/Trinity/Query/Linq/ISparqlVariableGenerator.cs new file mode 100644 index 0000000..6ace0dc --- /dev/null +++ b/Trinity/Query/Linq/ISparqlVariableGenerator.cs @@ -0,0 +1,80 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using System.Collections.Generic; +using System.Linq.Expressions; +using VDS.RDF.Query; + +namespace Semiodesk.Trinity.Query +{ + internal interface ISparqlVariableGenerator + { + #region Members + + Dictionary VariableCounters { get; } + + SparqlVariable GlobalSubject { get; } + + SparqlVariable GlobalPredicate { get; } + + SparqlVariable GlobalObject { get; } + + #endregion + + #region Methods + + void AddVariableMapping(Expression expression, string alias); + + bool HasSubjectVariable(Expression expression); + + bool HasPredicateVariable(Expression expression); + + bool HasObjectVariable(Expression expression); + + SparqlVariable TryGetSubjectVariable(Expression expression); + + SparqlVariable TryGetPredicateVariable(Expression expression); + + SparqlVariable TryGetObjectVariable(Expression expression); + + void SetSubjectVariable(Expression expression, SparqlVariable s); + + void SetPredicateVariable(Expression expression, SparqlVariable p); + + void SetObjectVariable(Expression expression, SparqlVariable o); + + SparqlVariable CreateSubjectVariable(Expression expression); + + SparqlVariable CreatePredicateVariable(); + + SparqlVariable CreateObjectVariable(); + + SparqlVariable CreateObjectVariable(Expression expression); + + #endregion + } +} diff --git a/Trinity/Query/Linq/QueryGenerators/AskQueryGenerator.cs b/Trinity/Query/Linq/QueryGenerators/AskQueryGenerator.cs new file mode 100644 index 0000000..797a527 --- /dev/null +++ b/Trinity/Query/Linq/QueryGenerators/AskQueryGenerator.cs @@ -0,0 +1,88 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Linq.Expressions; +using VDS.RDF.Query; + +namespace Semiodesk.Trinity.Query +{ + internal class AskQueryGenerator : SparqlQueryGenerator + { + #region Constructors + + public AskQueryGenerator() + : base(VDS.RDF.Query.Builder.QueryBuilder.Ask()) + { + IsRoot = true; + VariableGenerator = new SparqlVariableGenerator(null); + } + + #endregion + + #region Methods + + public override void OnBeforeSelectClauseVisited(Expression selector) + { + base.OnBeforeSelectClauseVisited(selector); + + // TODO: Add support for selecting literal types as a query result. + + if (selector is QuerySourceReferenceExpression) + { + SparqlVariable s_ = VariableGenerator.GlobalSubject; + SparqlVariable p_ = VariableGenerator.GlobalPredicate; + SparqlVariable o_ = VariableGenerator.GlobalObject; + + // Select all triples having the resource as subject. + SetSubjectVariable(s_); + SetObjectVariable(o_); + + // Add the type constraint on the referenced query source. + WhereResource(s_, p_, o_); + + // Constrain the type of resource, if it is a subclass of Resource. + QuerySourceReferenceExpression sourceExpression = selector as QuerySourceReferenceExpression; + + Type type = sourceExpression.ReferencedQuerySource.ItemType; + + if (type.IsSubclassOf(typeof(Resource))) + { + WhereResourceOfType(s_, type); + } + } + else + { + // TODO: Create unit tests an implement for ConstantExpression, MemberExpression and SubQueryExpression. + throw new NotImplementedException(selector.GetType().ToString()); + } + } + + #endregion + } +} \ No newline at end of file diff --git a/Trinity/Query/Linq/QueryGenerators/SelectBindingsQueryGenerator.cs b/Trinity/Query/Linq/QueryGenerators/SelectBindingsQueryGenerator.cs new file mode 100644 index 0000000..9f28707 --- /dev/null +++ b/Trinity/Query/Linq/QueryGenerators/SelectBindingsQueryGenerator.cs @@ -0,0 +1,170 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses; +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Linq; +using System.Linq.Expressions; +using VDS.RDF.Query; + +namespace Semiodesk.Trinity.Query +{ + /// + /// Generates SELECT queries which return binding sets. + /// + /// + /// This class is intended to be used as a root query generator. For generating SELECT queries + /// for sub-queries, refer to SubSelectQueryGenerator. + /// + internal class SelectBindingsQueryGenerator : SelectQueryGenerator + { + #region Constructors + + public SelectBindingsQueryGenerator() + { + IsRoot = true; + VariableGenerator = new SparqlVariableGenerator(null); + } + + #endregion + + #region Methods + + public override void OnBeforeFromClauseVisited(Expression expression) + { + base.OnBeforeFromClauseVisited(expression); + + // TODO: Move into OnBeforeSelectClauseVisited. + if (expression is ConstantExpression) + { + ConstantExpression constantExpression = expression as ConstantExpression; + + IQueryable queryable = constantExpression.Value as IQueryable; + + if (queryable != null && typeof(Resource).IsAssignableFrom(queryable.ElementType)) + { + SparqlVariable s = VariableGenerator.GlobalSubject; + + SetSubjectVariable(s); + + VariableGenerator.SetSubjectVariable(expression, s); + } + else + { + throw new NotSupportedException(constantExpression.Value.GetType().ToString()); + } + } + else + { + // TODO: Create unit test for QuerySourceReferenceExpression, SubQueryExpression + throw new NotImplementedException(); + } + } + + public override void OnBeforeSelectClauseVisited(Expression selector) + { + base.OnBeforeSelectClauseVisited(selector); + + QuerySourceReferenceExpression sourceExpression = selector.TryGetQuerySourceReference(); + + if (sourceExpression != null) + { + // Register the query source with the global variable for sub-queries. + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(sourceExpression) ?? VariableGenerator.GlobalSubject; + + // Assert the object type. + if (sourceExpression.Type.IsSubclassOf(typeof(Resource))) + { + WhereResourceOfType(s, sourceExpression.Type); + } + + if (selector is MemberExpression) + { + MemberExpression memberExpression = selector as MemberExpression; + + SparqlVariable o = VariableGenerator.CreateObjectVariable(memberExpression); + + // Select all triples having the resource as subject. + SetSubjectVariable(s); + SetObjectVariable(o, true); + + // If the member expression is not selected in the WHERE block, we add it here. + // Scenarios: + // - from x in Model.AsQueryable() select x.B + // - from x in Model.AsQueryable() where x.A select x.B + string e = memberExpression.ToString(); + + if (!QueryModel.BodyClauses.OfType().Any(c => c.Predicate.ToString().Contains(e))) + { + // We select the member without a constraint on its value. + QueryModel.BodyClauses.Add(new WhereClause(memberExpression)); + + // Since there is no constraint on the member, we also need to select the ones that are not bound. + Type memberType = memberExpression.Member.GetMemberType(); + + // TODO: There might be a different default value on the member using the DefaultValue() attribute. + object defaultValue = TypeHelper.GetDefaultValue(memberType); + + if(defaultValue != null) + { + ConstantExpression coalescedValue = Expression.Constant(defaultValue); + + // Mark the variable to be coalesced with the default value when selected. + CoalescedVariables[o] = coalescedValue.AsLiteralExpression(); + } + } + } + else if(QueryModel.HasNumericResultOperator()) + { + // If we have a numeric result operator on the root query, make the + // subject variable known so that the model visitor can handle it. + SetSubjectVariable(s); + } + } + } + + public override void OnSelectClauseVisited(Expression selector) + { + base.OnSelectClauseVisited(selector); + + // If we are in the root query generator and have not yet selected the + // subject variable, set it from the given selector. + if(IsRoot && !SelectedVariables.Any()) + { + SparqlVariable o = VariableGenerator.TryGetObjectVariable(selector); + + if (o != null && !IsSelectedVariable(o)) + { + SelectVariable(o); + } + } + } + + #endregion + } +} \ No newline at end of file diff --git a/Trinity/Query/Linq/QueryGenerators/SelectQueryGenerator.cs b/Trinity/Query/Linq/QueryGenerators/SelectQueryGenerator.cs new file mode 100644 index 0000000..07acc20 --- /dev/null +++ b/Trinity/Query/Linq/QueryGenerators/SelectQueryGenerator.cs @@ -0,0 +1,194 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses; +using Remotion.Linq.Clauses.ResultOperators; +using VDS.RDF.Query.Aggregates.Sparql; +using VDS.RDF.Query.Expressions.Primary; +using VDS.RDF.Query; +using System; +using System.Linq; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal class SelectQueryGenerator : SparqlQueryGenerator + { + #region Constructors + + public SelectQueryGenerator() + : base(VDS.RDF.Query.Builder.QueryBuilder.Select(new string[] {})) + { + } + + #endregion + + #region Methods + + public override void SetObjectOperator(ResultOperatorBase resultOperator) + { + base.SetObjectOperator(resultOperator); + + if (ObjectVariable != null) + { + if (resultOperator is AnyResultOperator) + { + // When using x.Any(), we add a LIMIT 1 the query results in the SparqlQueryGenerator. + + // When using .Any(x => ..), the variable x is locally scoped and cannot be + // used in outer queries. Therefore do not need to actually select it. + + // This avoids issues with Stardog: + // https://community.stardog.com/t/sparql-union-only-working-with-inferencing-enabled/1040/9 + } + else if (resultOperator is AverageResultOperator) + { + var aggregate = new AverageAggregate(new VariableTerm(ObjectVariable.Name)); + SetObjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is CountResultOperator) + { + var aggregate = new CountDistinctAggregate(new VariableTerm(ObjectVariable.Name)); + SetObjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is FirstResultOperator) + { + // "Using LIMIT and OFFSET to select different subsets of the query solutions + // will not be useful unless the order is made predictable by using ORDER BY." + // Source: https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#modOffset + + // Therefore, if no ordering exists we add an ordering on the current subject + // to make the query result predictable. + if (!QueryModel.BodyClauses.OfType().Any()) + { + OrderBy(SubjectVariable); + } + else + { + // In case the order was make explicit, we have to do nothing. + } + + Limit(1); + } + else if (resultOperator is LastResultOperator) + { + // "Using LIMIT and OFFSET to select different subsets of the query solutions + // will not be useful unless the order is made predictable by using ORDER BY." + // Source: https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#modOffset + + // Therefore, if no ordering exists we add an ordering on the current subject + // to make the query result predictable. + if (!QueryModel.BodyClauses.OfType().Any()) + { + OrderByDescending(SubjectVariable); + } + else + { + // Inverting the direction of the first ordering is handled in SparqlQueryModelVisitor.VisitOrdering(). + // This is because the orderings are not necessarily processed *before* the result operators.. + } + + Limit(1); + } + else if (resultOperator is MaxResultOperator) + { + var aggregate = new MaxAggregate(new VariableTerm(ObjectVariable.Name)); + SetObjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is MinResultOperator) + { + var aggregate = new MinAggregate(new VariableTerm(ObjectVariable.Name)); + SetObjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is SumResultOperator) + { + var aggregate = new SumAggregate(new VariableTerm(ObjectVariable.Name)); + SetObjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is OfTypeResultOperator) + { + OfTypeResultOperator ofType = resultOperator as OfTypeResultOperator; + RdfClassAttribute type = ofType.SearchedItemType.TryGetCustomAttribute(); + + if (type == null) + { + throw new ArgumentException("No RdfClass attrribute declared on type: " + ofType.SearchedItemType); + } + + SparqlVariable s = ObjectVariable; + SparqlVariable p = VariableGenerator.CreatePredicateVariable(); + SparqlVariable o = VariableGenerator.CreateObjectVariable(); + + WhereResource(s, p, o); + WhereResourceOfType(o, ofType.SearchedItemType); + } + else if (resultOperator is SkipResultOperator) + { + SkipResultOperator op = resultOperator as SkipResultOperator; + Offset(int.Parse(op.Count.ToString())); + } + else if(resultOperator is TakeResultOperator) + { + TakeResultOperator op = resultOperator as TakeResultOperator; + Limit(int.Parse(op.Count.ToString())); + } + else + { + throw new NotImplementedException(resultOperator.ToString()); + } + } + } + + public override void SetSubjectOperator(ResultOperatorBase resultOperator) + { + base.SetSubjectOperator(resultOperator); + + if (SubjectVariable != null) + { + if (resultOperator is CountResultOperator) + { + var aggregate = new CountDistinctAggregate(new VariableTerm(SubjectVariable.Name)); + SetSubjectVariable(aggregate.AsSparqlVariable(), true); + } + else if (resultOperator is FirstResultOperator) + { + if(!IsRoot) + { + // Note: We currently only support First operators on root queries. + throw new NotSupportedException(); + } + } + else + { + throw new NotImplementedException(resultOperator.ToString()); + } + } + } + + #endregion + } +} \ No newline at end of file diff --git a/Trinity/Query/Linq/QueryGenerators/SelectTriplesQueryGenerator.cs b/Trinity/Query/Linq/QueryGenerators/SelectTriplesQueryGenerator.cs new file mode 100644 index 0000000..a693d37 --- /dev/null +++ b/Trinity/Query/Linq/QueryGenerators/SelectTriplesQueryGenerator.cs @@ -0,0 +1,177 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2018 + +using Remotion.Linq.Clauses.Expressions; +using System.Linq.Expressions; +using VDS.RDF.Query; +using Remotion.Linq; +using Remotion.Linq.Clauses.ResultOperators; +using System; +using System.Linq; + +namespace Semiodesk.Trinity.Query +{ + internal class SelectTriplesQueryGenerator : SelectQueryGenerator + { + #region Constructors + + public SelectTriplesQueryGenerator() + { + IsRoot = true; + VariableGenerator = new SparqlVariableGenerator(null); + } + + #endregion + + #region Methods + + private Type TryGetSelectedType(Expression selector) + { + if (selector is ConstantExpression) + { + // We can either select a resource as a constant. + IQueryable queryable = (selector as ConstantExpression).Value as IQueryable; + + return (queryable != null) ? queryable.ElementType : null; + } + else if (selector is MemberExpression) + { + // Or we can select a resource from a member variable. + return (selector as MemberExpression).Type; + } + else if (selector is QuerySourceReferenceExpression) + { + // Or we can select resources from a query source reference. + return (selector as QuerySourceReferenceExpression).Type; + } + else + { + // TODO: Create unit test for handling SubQueryExpression. + throw new NotImplementedException(); + } + } + + public override void OnBeforeSelectClauseVisited(Expression selector) + { + base.OnBeforeSelectClauseVisited(selector); + + Type selectedType = TryGetSelectedType(selector); + + if (selectedType == null || !typeof(Resource).IsAssignableFrom(selectedType)) + { + throw new NotSupportedException(selectedType.ToString()); + } + + // 1. We always create an outer query which selects all triples that describe our resources. + SparqlVariable s_ = VariableGenerator.GlobalSubject; + SparqlVariable p_ = VariableGenerator.GlobalPredicate; + SparqlVariable o_ = VariableGenerator.GlobalObject; + + VariableGenerator.SetSubjectVariable(selector, s_); + VariableGenerator.SetPredicateVariable(selector, p_); + VariableGenerator.SetObjectVariable(selector, o_); + + SetSubjectVariable(s_); + SetObjectVariable(o_); + + SelectVariable(s_); + SelectVariable(p_); + SelectVariable(o_); + + WhereResource(s_, p_, o_); + + // If we are describing resources using a SKIP or TAKE operator, we need to make sure that + // these operations are on a per-resource basis and all triples for the described resources + // are contained in the result. + if (QueryModel.HasResultOperator() + || QueryModel.HasResultOperator() + || QueryModel.HasResultOperator() + || QueryModel.HasResultOperator()) + { + // ..which are described in an inner query on which the LIMIT and OFFSET operators are set. + // This results in a SELECT query that acts like a DESCRIBE but ist faster on most triple + // stores as the triples can be returend via bindings and must not be parsed. + ISparqlQueryGenerator subGenerator = QueryGeneratorTree.CreateSubQueryGenerator(this, selector); + + subGenerator.SetSubjectVariable(s_, true); + subGenerator.SetObjectVariable(o_); + + GenerateTypeConstraintOnSubject(subGenerator, selector); + + QueryGeneratorTree.CurrentGenerator = subGenerator; + + // NOTE: We set the subGenerator as a child *AFTER* the select clause and body clauses + // have been processed (see OnSelectClauseVisited). This is because the dotNetRDF + // query generator does not correctly handle result operators when it is already set as a child. + } + else + { + GenerateTypeConstraintOnSubject(this, selector); + } + } + + private void GenerateTypeConstraintOnSubject(ISparqlQueryGenerator generator, Expression selector) + { + Type type = null; + + if(selector is ConstantExpression) + { + type = (selector as ConstantExpression).Value.GetType(); + } + else if(selector is MemberExpression) + { + type = (selector as MemberExpression).Member.DeclaringType; + } + else if(selector is QuerySourceReferenceExpression) + { + type = (selector as QuerySourceReferenceExpression).ReferencedQuerySource.ItemType; + } + + if(type != null && type.IsSubclassOf(typeof(Resource))) + { + generator.WhereResourceOfType(VariableGenerator.GlobalSubject, type); + } + } + + public override void OnSelectClauseVisited(Expression selector) + { + if (QueryModel.HasResultOperator() + || QueryModel.HasResultOperator() + || QueryModel.HasResultOperator() + || QueryModel.HasResultOperator()) + { + // Finally, if we have a SKIP or TAKE operator on the root query, set the current + // query generator as a child. + ISparqlQueryGenerator subGenerator = QueryGeneratorTree.GetQueryGenerator(selector); + + Child(subGenerator); + } + } + + #endregion + } +} \ No newline at end of file diff --git a/Trinity/Query/Linq/QueryGenerators/SubSelectQueryGenerator.cs b/Trinity/Query/Linq/QueryGenerators/SubSelectQueryGenerator.cs new file mode 100644 index 0000000..48d59ac --- /dev/null +++ b/Trinity/Query/Linq/QueryGenerators/SubSelectQueryGenerator.cs @@ -0,0 +1,103 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2018 + +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Linq.Expressions; +using VDS.RDF.Query; +using VDS.RDF.Query.Builder; + +namespace Semiodesk.Trinity.Query +{ + internal class SubSelectQueryGenerator : SelectQueryGenerator + { + #region Constructors + + public SubSelectQueryGenerator(ISparqlQueryGenerator parent) + { + IsRoot = false; + VariableGenerator = new SparqlVariableGenerator(parent.VariableGenerator); + } + + #endregion + + #region Methods + + public override void OnBeforeFromClauseVisited(Expression expression) + { + SparqlVariable s = null; + SparqlVariable o = null; + + if(expression is MemberExpression) + { + QuerySourceReferenceExpression sourceExpression = expression.TryGetQuerySourceReference(); + + s = VariableGenerator.TryGetSubjectVariable(sourceExpression) ?? VariableGenerator.TryGetObjectVariable(sourceExpression); + o = VariableGenerator.CreateObjectVariable(expression); + + // The from clause is parsed first when handling a query. This allows us to detect if the + // query source is a subquery and proceed with implementing it _before_ hanlding its results. + MemberExpression memberExpression = expression as MemberExpression; + + if (s.IsGlobal()) + { + Type type = memberExpression.Member.DeclaringType; + + if(type.IsSubclassOf(typeof(Resource))) + { + WhereResourceOfType(s, type); + } + } + + // If the query model has a numeric result operator, we make all the following + // expressions optional in order to also allow to count zero occurences. + if (QueryModel.HasNumericResultOperator()) + { + GraphPatternBuilder optionalBuilder = new GraphPatternBuilder(GraphPatternType.Optional); + + Child(optionalBuilder); + + PatternBuilder = optionalBuilder; + } + } + else + { + s = VariableGenerator.TryGetSubjectVariable(expression); + o = VariableGenerator.TryGetObjectVariable(expression); + } + + if (s != null && o != null) + { + // Set the variable name of the query source reference as subject of the current query. + SetSubjectVariable(s, true); + SetObjectVariable(o, true); + } + } + + #endregion + } +} \ No newline at end of file diff --git a/Trinity/Query/Linq/SparqlQueryExecutor.cs b/Trinity/Query/Linq/SparqlQueryExecutor.cs new file mode 100644 index 0000000..3caf465 --- /dev/null +++ b/Trinity/Query/Linq/SparqlQueryExecutor.cs @@ -0,0 +1,131 @@ + + +using Remotion.Linq; +using Remotion.Linq.Clauses.ResultOperators; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace Semiodesk.Trinity.Query +{ + internal class SparqlQueryExecutor : IQueryExecutor + { + #region Members + + protected IModel Model { get; private set; } + + // A handle to the generic version of the GetResources method which is being used + // for implementing the ExecuteCollection(QueryModel) method that supports runtime type specification. + private MethodInfo _getResourceMethod; + + private bool _inferenceEnabled; + + #endregion + + #region Constructors + + public SparqlQueryExecutor(IModel model, bool inferenceEnabled) + { + Model = model; + + _inferenceEnabled = inferenceEnabled; + + // Searches for the generic method IEnumerable GetResources(ResourceQuery) and saves a handle + // for later use within ExecuteCollection(QueryModel); + _getResourceMethod = model.GetType().GetMethods().FirstOrDefault(m => m.IsGenericMethod && m.Name == "GetResources" && m.GetParameters().Any(p => p.ParameterType == typeof(ISparqlQuery))); + } + + #endregion + + #region Methods + + public IEnumerable ExecuteCollection(QueryModel queryModel) + { + Type t = queryModel.SelectClause.Selector.Type; + + if(typeof(Resource).IsAssignableFrom(t)) + { + // Handle queries which return instances of resources. + SparqlQueryModelVisitor visitor = new SparqlQueryModelVisitor(new SelectTriplesQueryGenerator()); + visitor.VisitQueryModel(queryModel); + + MethodInfo getResources = _getResourceMethod.MakeGenericMethod(typeof(T)); + object[] args = new object[] { visitor.GetQuery(), _inferenceEnabled, null }; + + foreach (T value in getResources.Invoke(Model, args) as IEnumerable) + { + yield return value; + } + } + else + { + // Handle queries which return value type objects. + SparqlQueryModelVisitor visitor = new SparqlQueryModelVisitor(new SelectBindingsQueryGenerator()); + visitor.VisitQueryModel(queryModel); + + ISparqlQuery query = visitor.GetQuery(); + ISparqlQueryResult result = Model.ExecuteQuery(query, _inferenceEnabled); + + // TODO: This works correctly for single bindings, check with multiple bindings. + foreach(BindingSet bindings in result.GetBindings()) + { + foreach(var value in bindings.Values.OfType()) + { + yield return value; + } + } + } + } + + public T ExecuteSingle(QueryModel queryModel, bool returnDefaultWhenEmpty) + { + var sequence = ExecuteCollection(queryModel); + + return returnDefaultWhenEmpty ? sequence.SingleOrDefault() : sequence.Single(); + } + + public T ExecuteScalar(QueryModel queryModel) + { + Type t = typeof(T); + + if(t == typeof(bool)) + { + // Generate and execute ASK query. + SparqlQueryModelVisitor visitor = new SparqlQueryModelVisitor(new AskQueryGenerator()); + visitor.VisitQueryModel(queryModel); + + ISparqlQuery query = visitor.GetQuery(); + ISparqlQueryResult result = Model.ExecuteQuery(query, _inferenceEnabled); + + return new object[] { result.GetAnwser() }.OfType().First(); + } + else if(queryModel.ResultOperators.Any(o => o is CountResultOperator)) + { + SparqlQueryModelVisitor visitor = new SparqlQueryModelVisitor(new SelectBindingsQueryGenerator()); + visitor.VisitQueryModel(queryModel); + + ISparqlQuery query = visitor.GetQuery(); + ISparqlQueryResult result = Model.ExecuteQuery(query, _inferenceEnabled); + + BindingSet b = result.GetBindings().FirstOrDefault(); + + if(b != null) + { + return new object[] { b.First().Value }.OfType().First(); + } + else + { + return new object[] { 0 }.OfType().First(); + } + } + else + { + // Unknown scalar type. + throw new NotImplementedException(); + } + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/SparqlQueryGenerator.cs b/Trinity/Query/Linq/SparqlQueryGenerator.cs new file mode 100644 index 0000000..6369627 --- /dev/null +++ b/Trinity/Query/Linq/SparqlQueryGenerator.cs @@ -0,0 +1,711 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Clauses; +using Remotion.Linq.Clauses.ResultOperators; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using VDS.RDF.Query; +using VDS.RDF.Query.Builder; +using VDS.RDF.Query.Builder.Expressions; +using VDS.RDF.Query.Patterns; + +namespace Semiodesk.Trinity.Query +{ + internal class SparqlQueryGenerator : ISparqlQueryGenerator + { + #region Members + + public bool IsRoot { get; protected set; } + + public bool IsBound { get; private set; } + + public QueryModel QueryModel { get; private set; } + + public SparqlVariable SubjectVariable { get; private set; } + + public SparqlVariable ObjectVariable { get; private set; } + + public IList SelectedVariables { get; private set; } + + protected Dictionary CoalescedVariables { get; private set; } + + public ISparqlVariableGenerator VariableGenerator { get; protected set; } + + protected ISparqlQueryGeneratorTree QueryGeneratorTree; + + public IQueryBuilder QueryBuilder { get; set; } + + protected ISelectBuilder SelectBuilder; + + public IGraphPatternBuilder PatternBuilder { get; set; } + + public ISparqlQueryGenerator ParentGenerator { get; set; } + + #endregion + + #region Constructors + + public SparqlQueryGenerator(IQueryBuilder queryBuilder) + { + SelectedVariables = new List(); + CoalescedVariables = new Dictionary(); + QueryBuilder = queryBuilder; + PatternBuilder = QueryBuilder.RootGraphPatternBuilder; + } + + public SparqlQueryGenerator(ISelectBuilder selectBuilder) + { + SelectedVariables = new List(); + CoalescedVariables = new Dictionary(); + SelectBuilder = selectBuilder; + QueryBuilder = selectBuilder.GetQueryBuilder(); + PatternBuilder = QueryBuilder.RootGraphPatternBuilder; + } + + #endregion + + #region Methods + + public string BuildQuery() + { + if(!IsBound) + { + BindSelectVariables(); + } + + return QueryBuilder.BuildQuery().ToString(); + } + + public void BindSelectVariables() + { + IsBound = true; + + if (SelectBuilder != null) + { + bool hasAggregate = SelectedVariables.Any(v => v.IsAggregate); + + foreach (SparqlVariable v in SelectedVariables) + { + if(CoalescedVariables.ContainsKey(v)) + { + SparqlExpression defaultValue = CoalescedVariables[v]; + + SelectBuilder.And(e => e.Coalesce(e.Variable(v.Name), defaultValue)).As(v.Name + '_'); + } + else + { + SelectBuilder.And(v); + } + + if (hasAggregate && !v.IsAggregate) + { + QueryBuilder.GroupBy(v.Name); + } + } + + if(hasAggregate && !IsRoot) + { + QueryBuilder.Distinct(); + } + } + } + + /// + /// Builds the triples required to access a given member and accociates its value with a variable. + /// + /// The member to be accessed. + /// The object variable associated with the member value. + protected SparqlVariable BuildMemberAccess(MemberExpression memberExpression) + { + var requiredBuilder = PatternBuilder; + + var so = BuildMemberAccess(memberExpression, requiredBuilder); + + return so; + } + + /// + /// Builds the triples required to access a given member and accociates its value with a variable. + /// + /// The member to be accessed. + /// The object variable associated with the member value. + protected SparqlVariable BuildMemberAccessOptional(MemberExpression memberExpression) + { + var optionalBuilder = new GraphPatternBuilder(GraphPatternType.Optional); + + var so = BuildMemberAccess(memberExpression, optionalBuilder); + + Child(optionalBuilder); + + return so; + } + + private SparqlVariable BuildMemberAccess(MemberExpression memberExpression, IGraphPatternBuilder patternBuilder) + { + MemberInfo member = memberExpression.Member; + + // If we do access a member of a system type, like string.Length we actually select the + // the declaring member and invoke a SPARQL built in call to get the value. + if (member.IsBuiltInCall()) + { + MemberExpression parentExpression = memberExpression.Expression as MemberExpression; + + return BuildMemberAccess(parentExpression, patternBuilder); + } + else if (memberExpression.Expression is MemberExpression) + { + MemberExpression parentExpression = memberExpression.Expression as MemberExpression; + + // Note: When we build an optional property path, we consider the relation to the + // parent properties of the accessed property to be non-optional. + IGraphPatternBuilder builder = member.IsUriType() ? patternBuilder : PatternBuilder; + + // We might encounter property paths (i.e. contact.Organization.Name). Therefore, + // implement the parent expression of the current member recursively.. + SparqlVariable po = BuildMemberAccess(parentExpression, builder); + + // If we are building a node on a property path (parentExpression != null), we associate + // the object variable with the parent expression so that it becomes the subject of the parent. + VariableGenerator.SetSubjectVariable(memberExpression, po); + } + + if(member.IsUriType()) + { + // When we access the .Uri member of a resource we do not need a property mapping and return the subject as the bound variable. + + // We create a triple pattern describing the resource in the local scope just in case it has not been described yet. + // Todo: Improve. Check if triples actually need to be asserted. + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(memberExpression) ?? SubjectVariable; + SparqlVariable p = VariableGenerator.CreatePredicateVariable(); + SparqlVariable o = VariableGenerator.CreateObjectVariable(memberExpression); + + patternBuilder.Where(t => t.Subject(s).Predicate(p).Object(o)); + + VariableGenerator.SetSubjectVariable(memberExpression, s); + + return s; + } + else + { + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(memberExpression) ?? SubjectVariable; + SparqlVariable o = VariableGenerator.TryGetObjectVariable(memberExpression) ?? VariableGenerator.CreateObjectVariable(memberExpression); + + // Now we have handled parent properties, built-in calls (.Length) and accesses to .Uri + RdfPropertyAttribute p = memberExpression.TryGetRdfPropertyAttribute(); + + if (p == null) + { + throw new Exception(string.Format("No RdfPropertyAttribute found for member: {0}", member.Name)); + } + + // Invoke the final user-handled member access triple builder callback. + patternBuilder.Where(t => t.Subject(s).PredicateUri(p.MappedUri).Object(o)); + + return o; + } + } + + protected void BuildBuiltInCall(MemberExpression memberExpression, Func buildFilter) + { + SparqlVariable o = VariableGenerator.TryGetObjectVariable(memberExpression.Expression) ?? ObjectVariable; + + MemberInfo member = memberExpression.Member; + + if (member.DeclaringType == typeof(String)) + { + switch (member.Name) + { + case "Length": + PatternBuilder.Filter(e => buildFilter(e.StrLen(e.Variable(o.Name)))); + break; + default: + throw new NotSupportedException(memberExpression.ToString()); + } + } + else if (member.DeclaringType == typeof(DateTime)) + { + // TODO: YEAR, MONTH, DAY, HOURS, MINUTES, SECONDS, TIMEZONE, TZ + throw new NotImplementedException(member.DeclaringType.ToString()); + } + } + + public virtual void SetObjectOperator(ResultOperatorBase resultOperator) {} + + public void SetObjectVariable(SparqlVariable v, bool select = false) + { + if (v == null) return; + + // If the new variable is to be selected, deselect the previous variable first. + if (select) + { + DeselectVariable(ObjectVariable); + SelectVariable(v); + } + + ObjectVariable = v; + } + + public virtual void SetSubjectOperator(ResultOperatorBase resultOperator) {} + + public void SetSubjectVariable(SparqlVariable v, bool select = false) + { + if (v == null) return; + + // If the new variable is to be selected, deselect the previous variable first. + if (select) + { + DeselectVariable(SubjectVariable); + SelectVariable(v); + } + + SubjectVariable = v; + } + + public void DeselectVariable(SparqlVariable v) + { + if (SelectBuilder != null) + { + if (v != null && SelectedVariables.Contains(v)) + { + SelectedVariables.Remove(v); + } + } + else + { + string msg = "Cannot deselect variables with non-SELECT query type."; + throw new Exception(msg); + } + } + + public void SelectVariable(SparqlVariable v) + { + if(SelectBuilder != null) + { + if (v != null && !SelectedVariables.Any(x => x.Name == v.Name)) + { + SelectedVariables.Add(v); + } + } + else + { + string msg = "Cannot select variables with non-SELECT query type."; + throw new Exception(msg); + } + } + + public bool IsSelectedVariable(SparqlVariable v) + { + return v != null && SelectBuilder != null && SelectedVariables.Contains(v); + } + + public void Where(MemberExpression member, SparqlVariable v) + { + if(CoalescedVariables.ContainsKey(v)) + { + // If the member may be unbound, we create an optional binding. + BuildMemberAccessOptional(member); + } + else + { + // Otherwise we create a normal binding. + BuildMemberAccess(member); + } + } + + public void WhereEqual(SparqlVariable v, ConstantExpression c) + { + if (c.Value == null) + { + PatternBuilder.Filter(e => !e.Bound(v.Name)); + } + else if(c.Type.IsValueType || c.Type == typeof(string)) + { + PatternBuilder.Filter(e => e.Variable(v.Name) == c.AsLiteralExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(v.Name) == c.AsIriExpression()); + } + } + + public void WhereEqual(MemberExpression expression, ConstantExpression c) + { + if (c.Value == null) + { + // If we want to filter for non-bound values we need to mark the properties as optional. + SparqlVariable so = BuildMemberAccessOptional(expression); + + // TODO: If we filter a resource, make sure it has been described with variables in the local scope. + + // Comparing with null means the variable is not bound. + PatternBuilder.Filter(e => !e.Bound(so.Name)); + } + else if(c.Type.IsValueType || c.Type == typeof(string)) + { + if(expression.Member.DeclaringType == typeof(string)) + { + BuildMemberAccess(expression); + + // If we are comparing a property of string we need to implement SPARQL built-in call on the variable such as STRLEN.. + BuildBuiltInCall(expression, e => e == c.AsNumericExpression()); + } + else + { + // TODO: The default value for a property may be overridden with the DefaultValue attribute. + object defaultValue = TypeHelper.GetDefaultValue(c.Type); + + // If the value IS the default value, WhereEquals includes the default value and therefore includes non-bound values.. + if (c.Value.Equals(defaultValue)) + { + // If we want to filter for non-bound values we need to mark the properties as optional. + SparqlVariable o = BuildMemberAccessOptional(expression); + + LiteralExpression literalExpression = c.AsLiteralExpression(); + + // Mark the variable to be coalesced with the default value when selected. + CoalescedVariables[o] = literalExpression; + + // Comparing with null means the variable is not bound. + PatternBuilder.Filter(e => e.Variable(o.Name) == literalExpression || !e.Bound(o.Name)); + } + else + { + // If we want to filter bound literal values, we still write them into a variable so they can be selected. + SparqlVariable o = BuildMemberAccess(expression); + + PatternBuilder.Filter(e => e.Variable(o.Name) == c.AsLiteralExpression()); + } + } + } + else + { + // We are comparing reference types / resources against a bound value here. + SparqlVariable so = BuildMemberAccess(expression); + + // TODO: If we filter a resource, make sure it has been described with variables in the local scope. + + PatternBuilder.Filter(e => e.Variable(so.Name) == c.AsIriExpression()); + } + } + + public void WhereNotEqual(SparqlVariable v, ConstantExpression c) + { + if(c.Value == null) + { + PatternBuilder.Filter(e => e.Bound(v.Name)); + } + else if(c.Type.IsValueType || c.Type == typeof(string)) + { + PatternBuilder.Filter(e => e.Variable(v.Name) != c.AsLiteralExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(v.Name) != c.AsIriExpression()); + } + } + + public void WhereNotEqual(MemberExpression expression, ConstantExpression c) + { + if (c.Value == null) + { + // If we want to filter for non-bound values we need to mark the properties as optional. + SparqlVariable o = BuildMemberAccessOptional(expression); + + // Comparing with null means the variable is not bound. + PatternBuilder.Filter(e => e.Bound(o.Name)); + } + else if (c.Type.IsValueType || c.Type == typeof(string)) + { + if (expression.Member.DeclaringType == typeof(string)) + { + BuildMemberAccess(expression); + + // If we are comparing a property of string we need to implement SPARQL built-in call on the variable such as STRLEN.. + BuildBuiltInCall(expression, e => e != c.AsNumericExpression()); + } + else + { + // TODO: The default value for a property may be overridden with the DefaultValue attribute. + object defaultValue = TypeHelper.GetDefaultValue(c.Type); + + // If the value is NOT the default value, WhereNotEquals includes the default value and therefore includes non-bound values.. + if (!c.Value.Equals(defaultValue)) + { + // If we want to filter for non-bound values we need to mark the properties as optional. + SparqlVariable o = BuildMemberAccessOptional(expression); + + LiteralExpression literalExpression = c.AsLiteralExpression(); + + // Mark the variable to be coalesced with the default value when selected. + CoalescedVariables[o] = literalExpression; + + // Comparing with null means the variable is not bound. + PatternBuilder.Filter(e => e.Variable(o.Name) != literalExpression || !e.Bound(o.Name)); + } + else + { + // If we want to filter bound literal values, we still write them into a variable so they can be selected. + SparqlVariable o = BuildMemberAccess(expression); + + PatternBuilder.Filter(e => e.Variable(o.Name) != c.AsLiteralExpression()); + } + } + } + else + { + // We are comparing reference types /resource against a bound value here. + // Note: If the compared values must not be equal, then the comapred value might also be not bound (optional). + SparqlVariable o = BuildMemberAccessOptional(expression); + + PatternBuilder.Filter(e => e.Variable(o.Name) != c.AsIriExpression()); + } + } + + public void WhereGreaterThan(SparqlVariable v, ConstantExpression c) + { + PatternBuilder.Filter(e => e.Variable(v.Name) > c.AsNumericExpression()); + } + + public void WhereGreaterThan(MemberExpression expression, ConstantExpression c) + { + SparqlVariable o = BuildMemberAccess(expression); + + if (expression.Member.IsBuiltInCall()) + { + BuildBuiltInCall(expression, e => e > c.AsNumericExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(o.Name) > c.AsNumericExpression()); + } + } + + public void WhereGreaterThanOrEqual(SparqlVariable v, ConstantExpression c) + { + PatternBuilder.Filter(e => e.Variable(v.Name) >= new LiteralExpression(c.AsSparqlExpression())); + } + + public void WhereGreaterThanOrEqual(MemberExpression expression, ConstantExpression c) + { + SparqlVariable o = BuildMemberAccess(expression); + + if (expression.Member.IsBuiltInCall()) + { + BuildBuiltInCall(expression, e => e >= c.AsNumericExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(o.Name) >= c.AsNumericExpression()); + } + } + + public void WhereLessThan(SparqlVariable v, ConstantExpression c) + { + PatternBuilder.Filter(e => e.Variable(v.Name) < new LiteralExpression(c.AsSparqlExpression())); + } + + public void WhereLessThan(MemberExpression expression, ConstantExpression c) + { + SparqlVariable o = BuildMemberAccess(expression); + + if (expression.Member.IsBuiltInCall()) + { + BuildBuiltInCall(expression, e => e < c.AsNumericExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(o.Name) < c.AsNumericExpression()); + } + } + + public void WhereLessThanOrEqual(SparqlVariable v, ConstantExpression c) + { + PatternBuilder.Filter(e => e.Variable(v.Name) <= new LiteralExpression(c.AsSparqlExpression())); + } + + public void WhereLessThanOrEqual(MemberExpression expression, ConstantExpression c) + { + SparqlVariable o = BuildMemberAccess(expression); + + if (expression.Member.IsBuiltInCall()) + { + BuildBuiltInCall(expression, e => e <= c.AsNumericExpression()); + } + else + { + PatternBuilder.Filter(e => e.Variable(o.Name) <= c.AsNumericExpression()); + } + } + + public void FilterRegex(SparqlVariable v, string pattern, bool ignoreCase) + { + if (ignoreCase) + { + PatternBuilder.Filter(e => e.Regex(e.Variable(v.Name), pattern, "i")); + } + else + { + PatternBuilder.Filter(e => e.Regex(e.Variable(v.Name), pattern)); + } + } + + public void FilterRegex(MemberExpression expression, string pattern, bool ignoreCase) + { + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(expression) ?? SubjectVariable; + SparqlVariable o = VariableGenerator.TryGetObjectVariable(expression) ?? VariableGenerator.CreateObjectVariable(expression); + + BuildMemberAccess(expression); + + FilterRegex(o, pattern, ignoreCase); + } + + public void WhereResource(SparqlVariable s, SparqlVariable p, SparqlVariable o) + { + PatternBuilder.Where(t => t.Subject(s).Predicate(p).Object(o)); + } + + public void WhereResource(Expression expression, SparqlVariable p, SparqlVariable o) + { + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(expression) ?? SubjectVariable; + + PatternBuilder.Where(t => t.Subject(s).Predicate(p).Object(o)); + } + + public void WhereResourceOfType(Expression expression, Type type) + { + SparqlVariable s = VariableGenerator.TryGetSubjectVariable(expression) ?? SubjectVariable; + + WhereResourceOfType(s, type); + } + + public void WhereResourceOfType(SparqlVariable s, Type type) + { + RdfClassAttribute t = type.TryGetCustomAttribute(); + + if (t != null) + { + Uri a = new Uri("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); + + PatternBuilder.Where(e => e.Subject(s).PredicateUri(a).Object(t.MappedUri)); + } + } + + public void OrderBy(SparqlVariable v) + { + QueryBuilder.OrderBy(v.Name); + } + + public void OrderByDescending(SparqlVariable v) + { + QueryBuilder.OrderByDescending(v.Name); + } + + public void Offset(int offset) + { + QueryBuilder.Offset(offset); + } + + public void Limit(int limit) + { + QueryBuilder.Limit(limit); + } + + public void Union(GraphPatternBuilder firstBuilder, params GraphPatternBuilder[] otherBuilders) + { + PatternBuilder.Union(firstBuilder, otherBuilders); + } + + public void Union(Action buildFirstPattern, params Action[] buildOtherPatterns) + { + PatternBuilder.Union(buildFirstPattern, buildOtherPatterns); + } + + public IGraphPatternBuilder Child(ISparqlQueryGenerator generator) + { + generator.BindSelectVariables(); + + var subQuery = generator.QueryBuilder.BuildQuery(); + + var childBuilder = new GraphPatternBuilder(); + childBuilder.Where(new SubQueryPattern(subQuery)); + + // Note: This sets the enclosing pattern builder as the current pattern + // builder in order to build subsequent FILTERs on the selected variables + // into the enclosing block, rather than the parent query. This is because + // for OpenLink Virtuoso the FILTERs need to be inside the enclosing group + // of the subquery.. + generator.PatternBuilder = childBuilder; + + return PatternBuilder.Child(childBuilder); + } + + public IGraphPatternBuilder Child(GraphPatternBuilder patternBuilder) + { + return PatternBuilder.Child(patternBuilder); + } + + public void SetQueryContext(ISparqlQueryGeneratorTree generatorTree, QueryModel queryModel) + { + QueryModel = queryModel; + QueryGeneratorTree = generatorTree; + } + + public virtual void OnBeforeFromClauseVisited(Expression expression) + { + // This is a workaround for a bug in OpenLink Virtuoso where it throws an exception + // when it receives a SPARQL query with a OFFSET but not LIMIT clause. + if (QueryModel.HasResultOperator() && !QueryModel.HasResultOperator()) + { + SkipResultOperator op = QueryModel.ResultOperators.OfType().First(); + + if (int.Parse(op.Count.ToString()) > 0) + { + QueryModel.ResultOperators.Insert(0, new TakeResultOperator(Expression.Constant(int.MaxValue))); + } + } + } + + public virtual void OnFromClauseVisited(Expression expression) + { + } + + public virtual void OnBeforeSelectClauseVisited(Expression selector) + { + } + + public virtual void OnSelectClauseVisited(Expression selector) + { + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/SparqlQueryGeneratorTree.cs b/Trinity/Query/Linq/SparqlQueryGeneratorTree.cs new file mode 100644 index 0000000..3cac882 --- /dev/null +++ b/Trinity/Query/Linq/SparqlQueryGeneratorTree.cs @@ -0,0 +1,151 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Collections.Generic; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal class SparqlQueryGeneratorTree : ISparqlQueryGeneratorTree + { + #region Members + + public ISparqlQueryGenerator CurrentGenerator { get; set; } + + public ISparqlQueryGenerator RootGenerator { get; private set; } + + private readonly Dictionary> _generatorTree = new Dictionary>(); + + private readonly Dictionary _expressionGenerators = new Dictionary(); + + #endregion + + #region Constructors + + public SparqlQueryGeneratorTree(ISparqlQueryGenerator root) + { + // The query generator of the outermost query. + RootGenerator = root; + + // The current (sub-)query generator. + CurrentGenerator = root; + } + + #endregion + + #region Methods + + private string GetKey(Expression expression) + { + return expression.ToString(); + } + + public void Bind() + { + Bind(RootGenerator); + } + + private void Bind(ISparqlQueryGenerator generator) + { + generator.BindSelectVariables(); + + if (_generatorTree.ContainsKey(generator)) + { + foreach (ISparqlQueryGenerator g in _generatorTree[generator]) + { + Bind(g); + } + } + } + + public ISparqlQueryGenerator CreateSubQueryGenerator(ISparqlQueryGenerator parent, Expression expression) + { + if (parent == null) throw new ArgumentNullException("parent"); + if (expression == null) throw new ArgumentNullException("expression"); + + ISparqlQueryGenerator g = new SubSelectQueryGenerator(parent); + g.SetQueryContext(this, parent.QueryModel); + + RegisterQueryExpression(g, expression); + + if (CurrentGenerator != null) + { + AddSubQueryGenerator(CurrentGenerator, g); + } + else if (RootGenerator != null) + { + AddSubQueryGenerator(RootGenerator, g); + } + + return g; + } + + private void AddSubQueryGenerator(ISparqlQueryGenerator parent, ISparqlQueryGenerator child) + { + // Add the sub query to the query tree. + if (_generatorTree.ContainsKey(parent)) + { + _generatorTree[parent].Add(child); + } + else + { + _generatorTree[parent] = new List() { child }; + } + + child.ParentGenerator = parent; + } + + public void RegisterQueryExpression(ISparqlQueryGenerator generator, Expression expression) + { + string key = GetKey(expression); + + if (!_expressionGenerators.ContainsKey(key)) + { + _expressionGenerators[key] = generator; + } + } + + public bool HasQueryGenerator(Expression expression) + { + string key = GetKey(expression); + + return _expressionGenerators.ContainsKey(key); + } + + public ISparqlQueryGenerator GetQueryGenerator(Expression expression) + { + string key = GetKey(expression); + + return _expressionGenerators[key]; + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/SparqlQueryModelVisitor.cs b/Trinity/Query/Linq/SparqlQueryModelVisitor.cs new file mode 100644 index 0000000..c9d09b5 --- /dev/null +++ b/Trinity/Query/Linq/SparqlQueryModelVisitor.cs @@ -0,0 +1,184 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Clauses; +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace Semiodesk.Trinity.Query +{ + /// + /// Generates a SPARQL query from a LINQ query model by visiting all clauses and invoking + /// expression implementation using a ExpressionTreeVisitor. + /// + /// The result type. + internal class SparqlQueryModelVisitor : QueryModelVisitorBase, ISparqlQueryModelVisitor + { + #region Members + + /// + /// Allows to access query generators and sub query generators in a tree-like fashion. + /// + protected readonly ISparqlQueryGeneratorTree QueryGeneratorTree; + + /// + /// A common variable name generator for all query generators. + /// + //protected readonly SparqlVariableGenerator VariableGenerator = new SparqlVariableGenerator(null); + + /// + /// Visits all expressions in a query model and handles the query generation. + /// + protected readonly ExpressionTreeVisitor ExpressionVisitor; + + #endregion + + #region Constructors + + public SparqlQueryModelVisitor(ISparqlQueryGenerator queryGenerator) + { + // Add the root query builder to the query tree. + QueryGeneratorTree = new SparqlQueryGeneratorTree(queryGenerator); + + // The expression tree visitor needs to be initialized *after* the query builders. + ExpressionVisitor = new ExpressionTreeVisitor(this, QueryGeneratorTree); + } + + #endregion + + #region Methods + + public override void VisitAdditionalFromClause(AdditionalFromClause fromClause, QueryModel queryModel, int index) + { + throw new NotSupportedException(); + } + + public override void VisitGroupJoinClause(GroupJoinClause groupJoinClause, QueryModel queryModel, int index) + { + throw new NotSupportedException(); + } + + public override void VisitJoinClause(JoinClause joinClause, QueryModel queryModel, int index) + { + throw new NotSupportedException(); + } + + public override void VisitJoinClause(JoinClause joinClause, QueryModel queryModel, GroupJoinClause groupJoinClause) + { + throw new NotSupportedException(); + } + + public override void VisitMainFromClause(MainFromClause fromClause, QueryModel queryModel) + { + ISparqlQueryGenerator currentGenerator = QueryGeneratorTree.CurrentGenerator; + + currentGenerator.OnBeforeFromClauseVisited(fromClause.FromExpression); + + ExpressionVisitor.VisitFromExpression(fromClause.FromExpression, fromClause.ItemName, fromClause.ItemType); + + currentGenerator.OnFromClauseVisited(fromClause.FromExpression); + } + + public override void VisitQueryModel(QueryModel queryModel) + { + ISparqlQueryGenerator g = QueryGeneratorTree.CurrentGenerator; + + g.SetQueryContext(QueryGeneratorTree, queryModel); + + // Handle the main from clause before the select. + queryModel.MainFromClause.Accept(this, queryModel); + + // This possibly traverses into sub-queries. + queryModel.SelectClause.Accept(this, queryModel); + } + + public override void VisitResultOperator(ResultOperatorBase resultOperator, QueryModel queryModel, int index) + { + ISparqlQueryGenerator generator = QueryGeneratorTree.CurrentGenerator; + + // If we are in a sub query, apply the operator on the query object. + if(generator.ObjectVariable != null) + { + generator.SetObjectOperator(resultOperator); + } + else if(generator.SubjectVariable != null && generator.IsRoot) + { + generator.SetSubjectOperator(resultOperator); + } + } + + public override void VisitSelectClause(SelectClause selectClause, QueryModel queryModel) + { + ISparqlQueryGenerator currentGenerator = QueryGeneratorTree.CurrentGenerator; + + currentGenerator.OnBeforeSelectClauseVisited(selectClause.Selector); + + for (int i = 0; i < queryModel.BodyClauses.Count; i++) + { + IBodyClause c = queryModel.BodyClauses[i]; + + c.Accept(this, queryModel, i); + } + + for(int i = 0; i < queryModel.ResultOperators.Count; i++) + { + ResultOperatorBase o = queryModel.ResultOperators[i]; + + o.Accept(this, queryModel, i); + } + + currentGenerator.OnSelectClauseVisited(selectClause.Selector); + } + + public override void VisitWhereClause(WhereClause whereClause, QueryModel queryModel, int index) + { + ExpressionVisitor.Visit(whereClause.Predicate); + } + + public override void VisitOrdering(Ordering ordering, QueryModel queryModel, OrderByClause orderByClause, int index) + { + base.VisitOrdering(ordering, queryModel, orderByClause, index); + + ExpressionVisitor.VisitOrdering(ordering, index); + } + + public ISparqlQuery GetQuery() + { + string queryString = QueryGeneratorTree.RootGenerator.BuildQuery(); + + ISparqlQuery query = new SparqlQuery(queryString); + + Debug.WriteLine(query.ToString()); + + return query; + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/SparqlQueryable.cs b/Trinity/Query/Linq/SparqlQueryable.cs new file mode 100644 index 0000000..63faf9f --- /dev/null +++ b/Trinity/Query/Linq/SparqlQueryable.cs @@ -0,0 +1,51 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq; +using Remotion.Linq.Parsing.Structure; +using System.Linq; +using System.Linq.Expressions; + +namespace Semiodesk.Trinity.Query +{ + internal class SparqlQueryable : QueryableBase + { + #region Constructors + + public SparqlQueryable(IQueryParser queryParser, IQueryExecutor queryExecutor) + : base(new DefaultQueryProvider(typeof(SparqlQueryable<>), queryParser, queryExecutor)) + { + } + + public SparqlQueryable(IQueryProvider provider, Expression expression) + : base(provider, expression) + { + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/SparqlVariableGenerator.cs b/Trinity/Query/Linq/SparqlVariableGenerator.cs new file mode 100644 index 0000000..03a16ce --- /dev/null +++ b/Trinity/Query/Linq/SparqlVariableGenerator.cs @@ -0,0 +1,264 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2017 + +using Remotion.Linq.Clauses.Expressions; +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using VDS.RDF.Query; + +namespace Semiodesk.Trinity.Query +{ + internal class SparqlVariableGenerator : ISparqlVariableGenerator + { + #region Members + + public Dictionary VariableCounters { get; private set; } + + public SparqlVariable GlobalSubject { get; } = new SparqlVariable("s_"); + + public SparqlVariable GlobalPredicate { get; } = new SparqlVariable("p_"); + + public SparqlVariable GlobalObject { get; } = new SparqlVariable("o_"); + + private readonly Dictionary _subjectVariables = new Dictionary(); + + private readonly Dictionary _predicateVariables = new Dictionary(); + + private readonly Dictionary _objectVariables = new Dictionary(); + + private readonly Dictionary _expressionMappings = new Dictionary(); + + private readonly ISparqlVariableGenerator _parentGenerator; + + #endregion + + #region Constructors + + public SparqlVariableGenerator(ISparqlVariableGenerator parent) + { + _parentGenerator = parent; + + if(_parentGenerator != null) + { + VariableCounters = _parentGenerator.VariableCounters; + } + else + { + VariableCounters = new Dictionary(); + } + } + + #endregion + + #region Methods + + private string GetNextAvailableVariableName(string name) + { + int n = 0; + + if (VariableCounters.ContainsKey(name)) + { + n = VariableCounters[name] + 1; + } + + VariableCounters[name] = n; + + return name + n; + } + + private string GetKey(Expression expression) + { + //return expression.ToString().Trim(); + + string key = expression.ToString().Trim(); + + if (key.EndsWith(".Uri")) + { + key = key.Substring(0, key.LastIndexOf(".Uri")); + } + + return key; + } + + public void AddVariableMapping(Expression expression, string alias) + { + if(!string.IsNullOrEmpty(alias)) + { + string sourceKey = string.Format("[{0}]", alias); + string targetKey = GetKey(expression); + + _expressionMappings[sourceKey] = targetKey; + + if (_subjectVariables.ContainsKey(targetKey)) + { + _objectVariables[sourceKey] = _subjectVariables[targetKey]; + } + + if (_objectVariables.ContainsKey(targetKey)) + { + _subjectVariables[sourceKey] = _objectVariables[targetKey]; + } + } + else + { + throw new ArgumentNullException("alias"); + } + } + + public bool HasSubjectVariable(Expression expression) + { + return TryGetSubjectVariable(expression) != null; + } + + public bool HasPredicateVariable(Expression expression) + { + return TryGetPredicateVariable(expression) != null; + } + + public bool HasObjectVariable(Expression expression) + { + return TryGetObjectVariable(expression) != null; + } + + private SparqlVariable TryGetVariable(Dictionary source, params Expression[] expressions) + { + foreach(Expression expression in expressions) + { + string primaryKey = GetKey(expression); + + if (source.ContainsKey(primaryKey)) + { + return source[primaryKey]; + } + else if(_expressionMappings.ContainsKey(primaryKey)) + { + string mappedKey = _expressionMappings[primaryKey]; + + if(source.ContainsKey(mappedKey)) + { + return source[mappedKey]; + } + } + } + + return null; + } + + /// + /// Get a variable from an expression that can be used as a subject in triple patterns and represents resources. + /// + /// + /// + public SparqlVariable TryGetSubjectVariable(Expression expression) + { + if (expression is MemberExpression) + { + QuerySourceReferenceExpression sourceExpression = expression.TryGetQuerySourceReference(); + + return TryGetVariable(_subjectVariables, expression, sourceExpression) ?? _parentGenerator?.TryGetSubjectVariable(expression); + } + else + { + // For instances of ConstantExpression, QuerySourceReferenceExpression and SubQueryExpression there must be a direct mapping. + return TryGetVariable(_subjectVariables, expression) ?? _parentGenerator?.TryGetSubjectVariable(expression); + } + } + + public SparqlVariable TryGetPredicateVariable(Expression expression) + { + return TryGetVariable(_predicateVariables, expression) ?? _parentGenerator?.TryGetPredicateVariable(expression); + } + + public SparqlVariable TryGetObjectVariable(Expression expression) + { + return TryGetVariable(_objectVariables, expression) ?? _parentGenerator?.TryGetObjectVariable(expression); + } + + public void SetSubjectVariable(Expression expression, SparqlVariable s) + { + string key = GetKey(expression); + + if (!_subjectVariables.ContainsKey(key) || _subjectVariables[key] != s) + { + _subjectVariables[key] = s; + } + } + + public void SetPredicateVariable(Expression expression, SparqlVariable p) + { + string key = GetKey(expression); + + if (!_predicateVariables.ContainsKey(key) || _predicateVariables[key] != p) + { + _predicateVariables[key] = p; + } + } + + public void SetObjectVariable(Expression expression, SparqlVariable o) + { + string key = GetKey(expression); + + if(!_objectVariables.ContainsKey(key)) + { + _objectVariables[key] = o; + } + } + + public SparqlVariable CreateSubjectVariable(Expression expression) + { + SparqlVariable s = new SparqlVariable(GetNextAvailableVariableName("s")); + + SetSubjectVariable(expression, s); + + return s; + } + + // TODO: Should take a MemberExpression as argument. + public SparqlVariable CreatePredicateVariable() + { + return new SparqlVariable(GetNextAvailableVariableName("p")); + } + + // TODO: Deprecated. + public SparqlVariable CreateObjectVariable() + { + return new SparqlVariable(GetNextAvailableVariableName("o")); + } + + public SparqlVariable CreateObjectVariable(Expression expression) + { + SparqlVariable o = new SparqlVariable(GetNextAvailableVariableName("o")); + + SetObjectVariable(expression, o); + + return o; + } + + #endregion + } +} diff --git a/Trinity/Query/Linq/TypeHelper.cs b/Trinity/Query/Linq/TypeHelper.cs new file mode 100644 index 0000000..b9d9ab8 --- /dev/null +++ b/Trinity/Query/Linq/TypeHelper.cs @@ -0,0 +1,12 @@ +using System; + +namespace Semiodesk.Trinity.Query +{ + class TypeHelper + { + public static object GetDefaultValue(Type type) + { + return type.IsValueType ? Activator.CreateInstance(type) : null; + } + } +} diff --git a/Trinity/Query/ResourceQueryResult.cs b/Trinity/Query/ResourceQueryResult.cs index d409ca5..51e9881 100644 --- a/Trinity/Query/ResourceQueryResult.cs +++ b/Trinity/Query/ResourceQueryResult.cs @@ -98,34 +98,27 @@ public int Count() return -1; } - public IEnumerable GetResources(int offset = -1, int limit = -1) - { - return GetResources(offset, limit); - } - IEnumerable FetchUris(SparqlQuery query) { - List result = new List(); + HashSet result = new HashSet(); IEnumerable bindings = _model.ExecuteQuery(query, _inferenceEnabled).GetBindings(); - Uri uri = null; - if (bindings != null) - { - foreach (BindingSet binding in bindings) - { - Uri u = binding["s0"] as Uri; - if (u != uri) - { - result.Add(binding["s0"] as Uri); - } + foreach (BindingSet binding in bindings) + { + Uri u = binding["s0"] as Uri; - uri = u; - } + result.Add(u); } + return result; } + public IEnumerable GetResources(int offset = -1, int limit = -1) + { + return GetResources(offset, limit); + } + public IEnumerable GetResources(int offset = -1, int limit = -1) where T : Resource { _query.Offset = offset; @@ -136,12 +129,12 @@ public IEnumerable GetResources(int offset = -1, int limit = -1) where T : SparqlQuery uriQuery = new SparqlQuery(SparqlSerializer.Serialize(_model, _query, true)); StringBuilder uris = new StringBuilder(); + var uriList = FetchUris(uriQuery).ToList(); foreach (Uri u in uriList) { - if(u != null) - uris.Append(SparqlSerializer.SerializeUri(u)); + uris.Append(SparqlSerializer.SerializeUri(u)); } if (!uriList.Any()) diff --git a/Trinity/Resource.cs b/Trinity/Resource.cs index 6576bb0..6702ec1 100644 --- a/Trinity/Resource.cs +++ b/Trinity/Resource.cs @@ -392,7 +392,6 @@ public void AddProperty(Property property, string value, CultureInfo language) AddPropertyToMapping(property, aggregation, false); } - /// /// Add a property with a string and language as value. /// If this property is mapped with a compatible type, it will be filled with the given value. @@ -1095,7 +1094,7 @@ public bool HasPropertyMapping(Property property, Type type) /// Rdf property to be tested. /// Type of the mapping. /// - private IPropertyMapping GetPropertyMapping(Property property, Type type) + internal IPropertyMapping GetPropertyMapping(Property property, Type type) { foreach (IPropertyMapping mappingObject in _mappings.Values) { @@ -1108,6 +1107,14 @@ private IPropertyMapping GetPropertyMapping(Property property, Type type) return null; } + internal IPropertyMapping GetPropertyMapping(string propertyName) + { + if (_mappings.ContainsKey(propertyName)) + return _mappings[propertyName]; + else + return null; + } + /// /// Returns the value from the mapped property. /// diff --git a/Trinity/Stores/IStore.cs b/Trinity/Stores/IStore.cs index 238fc68..11f6e38 100644 --- a/Trinity/Stores/IStore.cs +++ b/Trinity/Stores/IStore.cs @@ -27,8 +27,6 @@ using System; using System.Collections.Generic; -using System.Linq; -using System.Text; using System.Data; using System.IO; @@ -128,7 +126,14 @@ public interface IStore : IDisposable /// /// IModelGroup CreateModelGroup(params Uri[] models); - + + /// + /// Creates a model group which allows for queries to be made on multiple models at once. + /// + /// + /// + IModelGroup CreateModelGroup(params IModel[] models); + /// /// Loads a serialized graph from the given location into the current store. See allowed formats. /// @@ -175,7 +180,7 @@ public interface IStore : IDisposable /// Load a specific configuration file. /// If given, this function tries to load the ontologies from this folder. [Obsolete("This method will be removed in the future. Use InitializeFromConfiguration() instead.")] - void LoadOntologies(string configPath = null, string sourceDir = null); + void LoadOntologySettings(string configPath = null, string sourceDir = null); #endregion } diff --git a/Trinity/Stores/IStorageSpecific.cs b/Trinity/Stores/IStoreSpecific.cs similarity index 88% rename from Trinity/Stores/IStorageSpecific.cs rename to Trinity/Stores/IStoreSpecific.cs index 46aedee..88c412c 100644 --- a/Trinity/Stores/IStorageSpecific.cs +++ b/Trinity/Stores/IStoreSpecific.cs @@ -25,16 +25,12 @@ // // Copyright (c) Semiodesk GmbH 2015 -using System; -using System.Collections.Generic; using System.ComponentModel; -using System.Linq; -using System.Text; namespace Semiodesk.Trinity { - - public interface IStorageSpecific + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IStoreSpecific { void Update(IStore store); } diff --git a/Trinity/Stores/SparqlEndpoint/SparqlEndpointStorage.cs b/Trinity/Stores/SparqlEndpoint/SparqlEndpointStorage.cs index 2182f37..d477993 100644 --- a/Trinity/Stores/SparqlEndpoint/SparqlEndpointStorage.cs +++ b/Trinity/Stores/SparqlEndpoint/SparqlEndpointStorage.cs @@ -85,13 +85,25 @@ public IModelGroup CreateModelGroup(params Uri[] models) return new ModelGroup(this, modelList); } + public IModelGroup CreateModelGroup(params IModel[] models) + { + List modelList = new List(); + + // This approach might seem a bit redundant, but we want to make sure to get the model from the right store. + foreach (var x in models) + { + this.GetModel(x.Uri); + } + + return new ModelGroup(this, modelList); + } public void InitializeFromConfiguration(string configPath = null, string sourceDir = null) { throw new NotSupportedException(); } - public void LoadOntologies(string configPath = null, string sourceDir = null) + public void LoadOntologySettings(string configPath = null, string sourceDir = null) { throw new NotSupportedException(); } diff --git a/Trinity/Stores/Stardog/StardogQueryResult.cs b/Trinity/Stores/Stardog/StardogQueryResult.cs index d19a1ca..ebd050f 100644 --- a/Trinity/Stores/Stardog/StardogQueryResult.cs +++ b/Trinity/Stores/Stardog/StardogQueryResult.cs @@ -52,6 +52,7 @@ class StardogQueryResult : ISparqlQueryResult #endregion #region Constructor + public StardogQueryResult(StardogStore store, ISparqlQuery query, StardogResultHandler resultHandler) { _resultHandler = resultHandler; @@ -72,8 +73,12 @@ public StardogQueryResult(StardogStore store, ISparqlQuery query, StardogResultH } _query = query; - if( _resultHandler.SparqlResultSet != null) + + if (_resultHandler.SparqlResultSet != null) + { _tripleProvider = new SparqlResultSetTripleProvider(_resultHandler.SparqlResultSet, s, p, o); + } + _model = query.Model; _resultHandler = resultHandler; _store = store; @@ -100,20 +105,23 @@ public bool GetAnwser() public IEnumerable GetBindings() { List result = new List(); + if (_query.QueryType == SparqlQueryType.Select) { foreach (var x in _resultHandler.SparqlResultSet) { BindingSet r = new BindingSet(); + foreach (var y in x) { if (y.Value != null) + { r.Add(y.Key, ParseCellValue(y.Value)); + } } + result.Add(r); } - - } return result; @@ -156,15 +164,14 @@ private IEnumerable GenerateResources() where T : Resource INode s, o; Property p; - s = _tripleProvider.S; predUri = _tripleProvider.P; o = _tripleProvider.O; + _tripleProvider.SetNext(); p = OntologyDiscovery.GetProperty(predUri); - if (s is IUriNode) { Uri sUri = (s as IUriNode).Uri; @@ -199,8 +206,7 @@ private IEnumerable GenerateResources() where T : Resource catch { #if DEBUG - Debug.WriteLine("[SparqlQueryResult] Info: Could not create resource " + - sUri.OriginalString); + Debug.WriteLine("[SparqlQueryResult] Info: Could not create resource " + sUri.OriginalString); #endif continue; @@ -210,9 +216,11 @@ private IEnumerable GenerateResources() where T : Resource else if(s is BlankNode) { //TODO + Debugger.Break(); } else { + Debugger.Break(); } if (o is IUriNode) @@ -242,9 +250,10 @@ private IEnumerable GenerateResources() where T : Resource currentResource.Model = _model; } } - else if( o is BlankNode ) + else if(o is BlankNode) { - }else + } + else { currentResource.AddPropertyToMapping(p, ParseCellValue(o), true); } @@ -260,20 +269,28 @@ private IEnumerable GenerateResources() where T : Resource private object ParseCellValue(INode p) { if (p.NodeType == NodeType.Uri) + { return (p as IUriNode).Uri; + } else if (p.NodeType == NodeType.Literal) { ILiteralNode literalNode = p as ILiteralNode; + if (literalNode.DataType == null) { - if(string.IsNullOrEmpty(literalNode.Language)) + if (string.IsNullOrEmpty(literalNode.Language)) + { return literalNode.Value; + } else + { return new Tuple(literalNode.Value, literalNode.Language); + } } return XsdTypeMapper.DeserializeString(literalNode.Value, literalNode.DataType); } + return null; } @@ -285,7 +302,7 @@ private Dictionary FindResourceTypes(bool inferencingEnabled) string p; INode s,o; - // _tripleProvider.Reset(); + // _tripleProvider.Reset(); // Collect all types for every resource in the types dictionary. // I was going to use _queryResults.Select(), but that doesn't work with Virtuoso. @@ -371,10 +388,10 @@ public virtual int Count() string countQuery = SparqlSerializer.SerializeCount(_model, _query); SparqlQuery query = new SparqlQuery(countQuery); + // TODO: Apply inferencing if enabled var result = _store.ExecuteQuery(query.ToString()).SparqlResultSet; - if (result.Count > 0 && result[0].Count > 0) { diff --git a/Trinity/Stores/Stardog/StardogRdfHandler.cs b/Trinity/Stores/Stardog/StardogRdfHandler.cs index 5bd046b..31fb46f 100644 --- a/Trinity/Stores/Stardog/StardogRdfHandler.cs +++ b/Trinity/Stores/Stardog/StardogRdfHandler.cs @@ -1,7 +1,5 @@ using System; using System.Collections.Generic; -using System.Linq; -using System.Text; using VDS.RDF; using VDS.RDF.Parsing.Handlers; using VDS.RDF.Query; @@ -15,6 +13,9 @@ public override bool AcceptsAll get { return true; } } + public void EndRdf(bool ok) + { + } protected override bool HandleTripleInternal(Triple t) { @@ -24,13 +25,15 @@ protected override bool HandleTripleInternal(Triple t) class StardogResultHandler : BaseResultsHandler { - public StardogResultHandler() - { - } public bool BoolResult { get; set; } + public SparqlResultSet SparqlResultSet { get { return new SparqlResultSet(_results); } } + private List _results = new List(); + public StardogResultHandler() + { + } protected override void HandleBooleanResultInternal(bool result) { @@ -40,6 +43,7 @@ protected override void HandleBooleanResultInternal(bool result) protected override bool HandleResultInternal(VDS.RDF.Query.SparqlResult result) { _results.Add(result); + return true; } @@ -52,6 +56,5 @@ public bool GetAnwser() { return BoolResult; } - } } diff --git a/Trinity/Stores/Stardog/StardogStore.cs b/Trinity/Stores/Stardog/StardogStore.cs index 7de34bc..5f273b5 100644 --- a/Trinity/Stores/Stardog/StardogStore.cs +++ b/Trinity/Stores/Stardog/StardogStore.cs @@ -25,39 +25,27 @@ // // Copyright (c) Semiodesk GmbH 2015 - using System; using System.Collections.Generic; using System.Configuration; -using System.Data; using System.IO; -using System.Linq; using System.Reflection; -using System.Text; using VDS.RDF; using VDS.RDF.Parsing; -using VDS.RDF.Query; -using VDS.RDF.Query.Datasets; -using VDS.RDF.Query.Inference; using VDS.RDF.Storage; -using VDS.RDF.Storage.Management; -using VDS.RDF.Update; using VDS.RDF.Writing; using TrinitySettings = Semiodesk.Trinity.Configuration.TrinitySettings; namespace Semiodesk.Trinity.Store.Stardog { - /// - /// - class StardogStore : StoreBase { #region Members - StardogConnector _connector; - StardogRdfHandler _rdfHandler; - + private StardogConnector _connector; + private StardogRdfHandler _rdfHandler; + #endregion #region Constructors @@ -72,8 +60,6 @@ public StardogStore(string host, string username, string password, string storeI #region Methods - #region IStore implementation - public override IModel CreateModel(Uri uri) { return new Model(this, new UriRef(uri)); @@ -81,32 +67,29 @@ public override IModel CreateModel(Uri uri) public override bool ContainsModel(Uri uri) { - if (uri != null) - { - - string query = string.Format("ASK {{ GRAPH <{0}> {{ ?s ?p ?o . }} }}", uri.AbsoluteUri); + string query = string.Format("ASK {{ GRAPH <{0}> {{ ?s ?p ?o . }} }}", uri.AbsoluteUri); - var result = ExecuteQuery(query); - { - return result.BoolResult; - } - + var result = ExecuteQuery(query); + { + return result.BoolResult; } - - return false; } public override void ExecuteNonQuery(SparqlUpdate query, ITransaction transaction = null) { if (!_connector.UpdateSupported) + { throw new Exception("This store does not support SPARQL update."); - this._connector.Update(query.ToString()); + } + + _connector.Update(query.ToString()); } public StardogResultHandler ExecuteQuery(string query, ITransaction transaction = null) { StardogResultHandler resultHandler = new StardogResultHandler(); - this._connector.Query(_rdfHandler, resultHandler, query); + + _connector.Query(_rdfHandler, resultHandler, query); return resultHandler; } @@ -114,8 +97,9 @@ public StardogResultHandler ExecuteQuery(string query, ITransaction transaction public override ISparqlQueryResult ExecuteQuery(ISparqlQuery query, ITransaction transaction = null) { bool reasoning = query.IsInferenceEnabled; + StardogResultHandler resultHandler = new StardogResultHandler(); - this._connector.Query(_rdfHandler, resultHandler, query.ToString(), reasoning); + _connector.Query(_rdfHandler, resultHandler, query.ToString(), reasoning); return new StardogQueryResult(this, query, resultHandler); } @@ -163,17 +147,17 @@ public static IRdfReader GetReader(RdfSerializationFormat format) switch (format) { case RdfSerializationFormat.N3: - return new Notation3Parser(); + return new Notation3Parser(); case RdfSerializationFormat.NTriples: - return new NTriplesParser(); + return new NTriplesParser(); case RdfSerializationFormat.Turtle: - return new TurtleParser(); + return new TurtleParser(); default: case RdfSerializationFormat.RdfXml: - return new RdfXmlParser(); + return new RdfXmlParser(); } } @@ -189,23 +173,21 @@ public static IRdfWriter GetWriter(RdfSerializationFormat format) case RdfSerializationFormat.Turtle: return new CompressingTurtleWriter(); + default: case RdfSerializationFormat.RdfXml: return new RdfXmlWriter(); - } } - public override Uri Read(Stream stream, Uri graphUri, RdfSerializationFormat format, bool update) { - return null; + throw new NotImplementedException(); } public override Uri Read(Uri graphUri, Uri url, RdfSerializationFormat format, bool update) { - - return null; + throw new NotImplementedException(); } public override void RemoveModel(Uri uri) @@ -214,7 +196,6 @@ public override void RemoveModel(Uri uri) { SparqlUpdate clear = new SparqlUpdate(string.Format("CLEAR GRAPH <{0}>", uri.AbsoluteUri)); ExecuteNonQuery(clear); - } catch (Exception) { @@ -223,17 +204,37 @@ public override void RemoveModel(Uri uri) public override void Write(Stream stream, Uri graphUri, RdfSerializationFormat format) { - return; + throw new NotImplementedException(); } public override ITransaction BeginTransaction(System.Data.IsolationLevel isolationLevel) { - return null; + throw new NotImplementedException(); } public IModelGroup CreateModelGroup(params Uri[] models) { - return null; + List modelList = new List(); + + foreach (var model in models) + { + modelList.Add(GetModel(model)); + } + + return new ModelGroup(this, modelList); + } + + public IModelGroup CreateModelGroup(params IModel[] models) + { + List modelList = new List(); + + // This approach might seem a bit redundant, but we want to make sure to get the model from the right store. + foreach (var model in models) + { + GetModel(model.Uri); + } + + return new ModelGroup(this, modelList); } public override void Dispose() @@ -241,8 +242,12 @@ public override void Dispose() _connector.Dispose(); } + + - #endregion + + + #endregion } } diff --git a/Trinity/Stores/StoreBase.cs b/Trinity/Stores/StoreBase.cs index 7402af1..a1b3647 100644 --- a/Trinity/Stores/StoreBase.cs +++ b/Trinity/Stores/StoreBase.cs @@ -50,13 +50,12 @@ public virtual bool ContainsModel(IModel model) public abstract void Write(System.IO.Stream fs, Uri graphUri, RdfSerializationFormat format); - public virtual void LoadOntologies(string configPath = null, string sourceDir = null) + public virtual void LoadOntologySettings(string configPath = null, string sourceDir = null) { var config = LoadConfiguration(configPath); LoadOntologies(config, sourceDir); } - public virtual void InitializeFromConfiguration(string configPath = null, string sourceDir = null) { var config = LoadConfiguration(configPath); @@ -125,8 +124,20 @@ public virtual IModelGroup CreateModelGroup(params Uri[] models) return ModelGroupFactory.CreateModelGroup(this, result); } + public virtual IModelGroup CreateModelGroup(params IModel[] models) + { + List result = new List(); + + foreach (var model in models) + { + result.Add(GetModel(model.Uri)); + } + + return new ModelGroup(this, result); + } + #endregion - + } } diff --git a/Trinity/Stores/StoreUpdater.cs b/Trinity/Stores/StoreUpdater.cs index cb450a6..dedc439 100644 --- a/Trinity/Stores/StoreUpdater.cs +++ b/Trinity/Stores/StoreUpdater.cs @@ -108,7 +108,7 @@ protected Uri GetPathFromLocation(string location) /// This method can be used to load storage specific configuration. /// /// - public void UpdateStorageSpecifics(IStorageSpecific storageSpecific) + public void UpdateStorageSpecifics(IStoreSpecific storageSpecific) { storageSpecific.Update(_store); } diff --git a/Trinity/Stores/Virtuoso/VirtuosoSpecific.cs b/Trinity/Stores/Virtuoso/VirtuosoSpecific.cs new file mode 100644 index 0000000..17ffb0a --- /dev/null +++ b/Trinity/Stores/Virtuoso/VirtuosoSpecific.cs @@ -0,0 +1,101 @@ +// LICENSE: +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// AUTHORS: +// +// Moritz Eberl +// Sebastian Faubel +// +// Copyright (c) Semiodesk GmbH 2015 + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Xml.Serialization; +using Semiodesk.Trinity; +using Semiodesk.Trinity.Configuration; +using Semiodesk.Trinity.Store; + +namespace Semiodesk.Trinity +{ + internal class VirtuosoSettings : IStoreSpecific + { + #region Members + public VirtuosoStoreSettings Settings { get; set; } + #endregion + + public VirtuosoSettings(VirtuosoStoreSettings settings) + { + Settings = settings; + } + + public void Update(IStore store) + { + if (store is VirtuosoStore) + { + VirtuosoStore virtuosoStore = (store as VirtuosoStore); + + foreach (RuleSet set in Settings.RuleSets) + { + ClearRuleSet(new Uri(set.Uri), virtuosoStore); + foreach (var item in set.Graphs) + { + AddGraphToRuleSet(new Uri(set.Uri), new Uri(item.Uri), virtuosoStore); + } + } + } + } + + private void ClearRuleSet(Uri ruleSet, VirtuosoStore store) + { + try + { + string query = string.Format("delete * from DB.DBA.SYS_RDF_SCHEMA where RS_NAME='{0}';", ruleSet.OriginalString); + store.ExecuteQuery(query); + }catch(Exception) + { + } + } + + private void RemoveGraphFromRuleSet(Uri ruleSet, Uri graph, VirtuosoStore store) + { + try + { + string query = string.Format("rdfs_rule_set ('{0}', '{1}', 1)", ruleSet, graph); + store.ExecuteQuery(query); + } + catch (Exception) + { + + } + } + + private void AddGraphToRuleSet(Uri ruleSet, Uri graph, VirtuosoStore store) + { + + string query = string.Format("rdfs_rule_set ('{0}', '{1}')", ruleSet, graph); + store.ExecuteQuery(query); + } + + } + + +} diff --git a/Trinity/Stores/dotNetRDF/GraphTripleProvider.cs b/Trinity/Stores/dotNetRDF/GraphTripleProvider.cs index b37e546..ac57ec8 100644 --- a/Trinity/Stores/dotNetRDF/GraphTripleProvider.cs +++ b/Trinity/Stores/dotNetRDF/GraphTripleProvider.cs @@ -25,61 +25,69 @@ // // Copyright (c) Semiodesk GmbH 2017 - using System; -using System.Collections.Generic; using System.Linq; -using System.Text; using VDS.RDF; namespace Semiodesk.Trinity.Store { - class GraphTripleProvider : ITripleProvider + internal class GraphTripleProvider : ITripleProvider { - IGraph _graph; - int counter; - public GraphTripleProvider(IGraph graph) + #region Members + + private int _n; + + private IGraph _graph; + + public INode S { - _graph = graph; - counter = 0; + get { return _graph.Triples.ElementAt(_n).Subject; } } - public int Count + public Uri P { - get { return _graph.Triples.Count; } + get { return (_graph.Triples.ElementAt(_n).Predicate as UriNode).Uri; } } - public void Reset() + public INode O { - counter = 0; + get { return _graph.Triples.ElementAt(_n).Object; } } - - public bool HasNext + public int Count { - get { return counter < _graph.Triples.Count; } + get { return _graph.Triples.Count; } } - public void SetNext() + public bool HasNext { - counter += 1; + get { return _n < _graph.Triples.Count; } } - public INode S + #endregion + + #region Constructors + + public GraphTripleProvider(IGraph graph) { - get { return _graph.Triples.ElementAt(counter).Subject; } + _n = 0; + _graph = graph; } - public Uri P + #endregion + + #region Methods + + public void Reset() { - get { return (_graph.Triples.ElementAt(counter).Predicate as UriNode).Uri; } + _n = 0; } - public INode O + public void SetNext() { - get { return _graph.Triples.ElementAt(counter).Object; } + _n += 1; } - } - + #endregion + } } diff --git a/Trinity/Stores/dotNetRDF/SparqlResultSetTripleProvider.cs b/Trinity/Stores/dotNetRDF/SparqlResultSetTripleProvider.cs index f97c988..c939c8e 100644 --- a/Trinity/Stores/dotNetRDF/SparqlResultSetTripleProvider.cs +++ b/Trinity/Stores/dotNetRDF/SparqlResultSetTripleProvider.cs @@ -26,68 +26,73 @@ // Copyright (c) Semiodesk GmbH 2017 using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; using VDS.RDF; using VDS.RDF.Query; namespace Semiodesk.Trinity.Store { - class SparqlResultSetTripleProvider : ITripleProvider + internal class SparqlResultSetTripleProvider : ITripleProvider { - SparqlResultSet _set; - string _subjectVar; - string _predicateVar; - string _objectVar; + private int _n; - int counter; - public SparqlResultSetTripleProvider(SparqlResultSet set, string subjectVar, string predicateVar, string objectVar) - { - _set = set; - counter = 0; + private SparqlResultSet _set; - _subjectVar = subjectVar; - _predicateVar = predicateVar; - _objectVar = objectVar; - } + private string _subjectKey; - public int Count + public INode S { - get { return _set.Count; } + get { return _set[_n][_subjectKey]; } } - public void Reset() + private string _predicateKey; + + public Uri P { - counter = 0; + get { return (_set[_n][_predicateKey] as UriNode).Uri; } } + private string _objectKey; - public bool HasNext + public INode O { - get { return counter < _set.Count; } + get { return _set[_n][_objectKey]; } } - public void SetNext() + public int Count { - counter += 1; + get { return _set.Count; } } - public INode S + public bool HasNext { - get { return _set[counter][_subjectVar]; } + get { return _n < _set.Count; } } - public Uri P + #region Constructors + + public SparqlResultSetTripleProvider(SparqlResultSet set, string subjectKey, string predicateKey, string objectKey) { - get { return (_set[counter][_predicateVar] as UriNode).Uri; } + _n = 0; + _set = set; + _subjectKey = subjectKey; + _predicateKey = predicateKey; + _objectKey = objectKey; } - public INode O + #endregion + + #region Methods + + public void Reset() { - get { return _set[counter][_objectVar]; } + _n = 0; } - } + public void SetNext() + { + _n += 1; + } + #endregion + } } diff --git a/Trinity/Stores/dotNetRDF/dotNetRDFQueryResult.cs b/Trinity/Stores/dotNetRDF/dotNetRDFQueryResult.cs index b86df75..086606a 100644 --- a/Trinity/Stores/dotNetRDF/dotNetRDFQueryResult.cs +++ b/Trinity/Stores/dotNetRDF/dotNetRDFQueryResult.cs @@ -28,30 +28,32 @@ using System; using System.Collections.Generic; using System.Diagnostics; -using System.Linq; -using System.Text; using VDS.RDF; using VDS.RDF.Query; #if NET35 using Semiodesk.Trinity.Utility; #endif - namespace Semiodesk.Trinity.Store { - class dotNetRDFQueryResult : ISparqlQueryResult + internal class dotNetRDFQueryResult : ISparqlQueryResult { #region Members private IModel _model; - private ISparqlQuery _query; + private ITripleProvider _tripleProvider; - private SparqlResultSet _resultSet; + + private ISparqlQuery _query; + + private SparqlResultSet _queryResults; + private dotNetRDFStore _store; #endregion #region Constructor + public dotNetRDFQueryResult(dotNetRDFStore store, ISparqlQuery query, SparqlResultSet resultSet) { string s = null; @@ -72,7 +74,7 @@ public dotNetRDFQueryResult(dotNetRDFStore store, ISparqlQuery query, SparqlResu _query = query; _tripleProvider = new SparqlResultSetTripleProvider(resultSet, s, p, o); _model = query.Model; - _resultSet = resultSet; + _queryResults = resultSet; _store = store; } @@ -83,17 +85,16 @@ public dotNetRDFQueryResult(dotNetRDFStore store, ISparqlQuery query, IGraph gra _model = query.Model; _store = store; } + #endregion #region Methods - #region ISparqlQueryResult - public bool GetAnwser() { if (_query.QueryType == SparqlQueryType.Ask) { - return _resultSet.Result; + return _queryResults.Result; } else { @@ -103,34 +104,54 @@ public bool GetAnwser() public IEnumerable GetBindings() { - List result = new List(); if (_query.QueryType == SparqlQueryType.Select) { - foreach (var x in _resultSet) + foreach (SparqlResult result in _queryResults) { - BindingSet r = new BindingSet(); - foreach (var y in x) + BindingSet b = new BindingSet(); + + foreach (var r in result) { - if (y.Value != null) - r.Add(y.Key, ParseCellValue(y.Value)); + if (r.Value != null) + { + b.Add(r.Key, ParseCellValue(r.Value)); + } } - result.Add(r); - } - + yield return b; + } + } + else + { + throw new ArgumentException("Cannot return bindings for queries of type " + _query.QueryType.ToString()); } + } - return result; + public IEnumerable GetResources() + { + return GetResources(); + } + + public IEnumerable GetResources(int offset = -1, int limit = -1) + { + throw new NotImplementedException(); } public IEnumerable GetResources() where T : Resource { - if (!_query.ProvidesStatements()) + if(_query.ProvidesStatements()) { - throw new ArgumentException("Error: The given query cannot be resolved into statements."); + return GenerateResources(); } + else + { + throw new ArgumentException("The given query cannot be resolved into statements."); + } + } - return GenerateResources(); + public IEnumerable GetResources(int offset = -1, int limit = -1) where T : Resource + { + throw new NotImplementedException(); } private IEnumerable GenerateResources() where T : Resource @@ -141,9 +162,8 @@ private IEnumerable GenerateResources() where T : Resource { // A dictionary mapping URIs to the generated resource objects. Dictionary cache = new Dictionary(); - Dictionary types = FindResourceTypes(_query.IsInferenceEnabled); - //Dictionary types = new Dictionary(); + _tripleProvider.Reset(); foreach (KeyValuePair resourceType in types) @@ -156,30 +176,23 @@ private IEnumerable GenerateResources() where T : Resource while (_tripleProvider.HasNext) { - Uri predUri; - INode s, o; - Property p; + INode s = _tripleProvider.S; + Property p = OntologyDiscovery.GetProperty(_tripleProvider.P); + INode o = _tripleProvider.O; - - s = _tripleProvider.S; - predUri = _tripleProvider.P; - o = _tripleProvider.O; _tripleProvider.SetNext(); - p = OntologyDiscovery.GetProperty(predUri); - - if (s is IUriNode) { - Uri sUri = (s as IUriNode).Uri; + Uri subjectUri = (s as IUriNode).Uri; - if (currentResource != null && currentResource.Uri.OriginalString == sUri.OriginalString) + if (currentResource != null && currentResource.Uri.OriginalString == subjectUri.OriginalString) { // We already have the handle to the resource which the property should be added to. } - else if (cache.ContainsKey(sUri.OriginalString)) + else if (cache.ContainsKey(subjectUri.OriginalString)) { - currentResource = cache[sUri.OriginalString] as T; + currentResource = cache[subjectUri.OriginalString] as T; // In this case we may have encountered a resource which was // added to the cache by the object value handler below. @@ -192,19 +205,19 @@ private IEnumerable GenerateResources() where T : Resource { try { - currentResource = (T)Activator.CreateInstance(typeof(T), sUri); + currentResource = (T)Activator.CreateInstance(typeof(T), subjectUri); currentResource.IsNew = false; currentResource.IsSynchronized = true; currentResource.Model = _model; - cache.Add(sUri.OriginalString, currentResource); + cache.Add(subjectUri.OriginalString, currentResource); + result.Add(currentResource); } catch { #if DEBUG - Debug.WriteLine("[SparqlQueryResult] Info: Could not create resource " + - sUri.OriginalString); + Debug.WriteLine("[SparqlQueryResult] Error: Could not create resource:", subjectUri.OriginalString); #endif continue; @@ -213,10 +226,7 @@ private IEnumerable GenerateResources() where T : Resource } else if(s is BlankNode) { - //TODO - } - else - { + // TODO: Implement blank node support. } if (o is IUriNode) @@ -246,9 +256,11 @@ private IEnumerable GenerateResources() where T : Resource currentResource.Model = _model; } } - else if( o is BlankNode ) + else if(o is BlankNode) { - }else + // TODO: Implement blank node support. + } + else { currentResource.AddPropertyToMapping(p, ParseCellValue(o), true); } @@ -264,31 +276,41 @@ private IEnumerable GenerateResources() where T : Resource private object ParseCellValue(INode p) { if (p.NodeType == NodeType.Uri) + { return (p as IUriNode).Uri; + } else if (p.NodeType == NodeType.Literal) { ILiteralNode literalNode = p as ILiteralNode; + if (literalNode.DataType == null) { - if(string.IsNullOrEmpty(literalNode.Language)) + if (string.IsNullOrEmpty(literalNode.Language)) + { return literalNode.Value; + } else + { return new Tuple(literalNode.Value, literalNode.Language); + } } return XsdTypeMapper.DeserializeString(literalNode.Value, literalNode.DataType); } + return null; } - private Dictionary FindResourceTypes(bool inferencingEnabled) - where T : Resource + private Dictionary FindResourceTypes(bool inferencingEnabled) where T : Resource { Dictionary result = new Dictionary(); Dictionary> types = new Dictionary>(); - string p; - INode s,o; + INode s; + string p; + INode o; + + //_tripleProvider.Reset(); // Collect all types for every resource in the types dictionary. // I was going to use _queryResults.Select(), but that doesn't work with Virtuoso. while (_tripleProvider.HasNext) @@ -297,7 +319,6 @@ private Dictionary FindResourceTypes(bool inferencingEnabled) p = _tripleProvider.P.ToString(); o = _tripleProvider.O; - _tripleProvider.SetNext(); if (o.NodeType == NodeType.Uri && p == "http://www.w3.org/1999/02/22-rdf-syntax-ns#type") @@ -363,27 +384,23 @@ private Dictionary FindResourceTypes(bool inferencingEnabled) return result; } - public IEnumerable GetResources() - { - return GetResources(); - } - public virtual int Count() { string countQuery = SparqlSerializer.SerializeCount(_model, _query); SparqlQuery query = new SparqlQuery(countQuery); - // TODO: Apply inferencing if enabled - var res = _store.ExecuteQuery(query.ToString()); + // TODO: Apply inferencing if enabled. - if (res is SparqlResultSet) + object result = _store.ExecuteQuery(query.ToString()); + + if (result is SparqlResultSet) { - SparqlResultSet result = res as SparqlResultSet; + SparqlResultSet set = result as SparqlResultSet; - if (result.Count > 0 && result[0].Count > 0) + if (set.Count > 0 && set[0].Count > 0) { - var value = ParseCellValue(result[0][0]); + var value = ParseCellValue(set[0][0]); if (value.GetType() == typeof(int)) { @@ -395,21 +412,11 @@ public virtual int Count() return -1; } - public IEnumerable GetResources(int offset = -1, int limit = -1) where T : Resource - { - throw new NotImplementedException(); - } - - public IEnumerable GetResources(int offset = -1, int limit = -1) - { - throw new NotImplementedException(); - } - public void Dispose() { throw new NotImplementedException(); } - #endregion + #endregion } } diff --git a/Trinity/Stores/dotNetRDF/dotNetRDFStore.cs b/Trinity/Stores/dotNetRDF/dotNetRDFStore.cs index 0eae861..3611970 100644 --- a/Trinity/Stores/dotNetRDF/dotNetRDFStore.cs +++ b/Trinity/Stores/dotNetRDF/dotNetRDFStore.cs @@ -25,28 +25,22 @@ // // Copyright (c) Semiodesk GmbH 2015 - using System; using System.Collections.Generic; using System.Configuration; using System.IO; -using System.Linq; using System.Reflection; -using System.Text; using VDS.RDF; using VDS.RDF.Parsing; using VDS.RDF.Query; -using VDS.RDF.Query.Datasets; using VDS.RDF.Query.Inference; using VDS.RDF.Update; using VDS.RDF.Writing; + using TrinitySettings = Semiodesk.Trinity.Configuration.TrinitySettings; namespace Semiodesk.Trinity.Store { - /// - /// - public class dotNetRDFStore : StoreBase { #region Members @@ -70,29 +64,28 @@ public class dotNetRDFStore : StoreBase /// Creates a new dotNetRDFStore. /// /// A list of ontology file paths relative to this assembly. The store will be populated with these ontologies. - public dotNetRDFStore(string[] schema) + public dotNetRDFStore(string[] schemes) { _store = new TripleStore(); _updateProcessor = new LeviathanUpdateProcessor(_store); _queryProcessor = new LeviathanQueryProcessor(_store); _parser = new SparqlUpdateParser(); - if (schema == null) + if (schemes != null) { - return; - } + _reasoner = new RdfsReasoner(); + _store.AddInferenceEngine(_reasoner); - _reasoner = new RdfsReasoner(); - _store.AddInferenceEngine(_reasoner); + foreach (string s in schemes) + { + var directory = new FileInfo(Assembly.GetExecutingAssembly().Location).Directory; + var file = new FileInfo(Path.Combine(directory.FullName, s)); - foreach (string m in schema) - { - var x = new FileInfo(Assembly.GetExecutingAssembly().Location).Directory; - FileInfo s = new FileInfo( Path.Combine(x.FullName, m)); - IGraph schemaGraph = LoadSchema(s.FullName); + IGraph schemaGraph = LoadSchema(file.FullName); - _store.Add(schemaGraph); - _reasoner.Initialise(schemaGraph); + _store.Add(schemaGraph); + _reasoner.Initialise(schemaGraph); + } } } @@ -103,6 +96,7 @@ public dotNetRDFStore(string[] schema) private IGraph LoadSchema(string schema) { IGraph graph = new Graph(); + graph.LoadFromFile(schema); string queryString = "SELECT ?s WHERE { ?s a . }"; @@ -187,8 +181,10 @@ public override ISparqlQueryResult ExecuteQuery(ISparqlQuery query, ITransaction /// public object ExecuteQuery(string query) { - SparqlQueryParser sparqlparser = new SparqlQueryParser(); - var q = sparqlparser.ParseFromString(query.ToString()); + SparqlQueryParser parser = new SparqlQueryParser(); + + var q = parser.ParseFromString(query); + return _queryProcessor.ProcessQuery(q); } @@ -199,7 +195,7 @@ public object ExecuteQuery(string query) /// public override IModel GetModel(Uri uri) { - return new Model(this, new UriRef(uri)); + return new Model(this, uri.ToUriRef()); } /// @@ -217,10 +213,12 @@ public override bool IsReady /// All handles to existing models. public override IEnumerable ListModels() { - foreach (var g in _store.Graphs) + foreach (var graph in _store.Graphs) { - if( g.BaseUri != null) - yield return new Model(this, new UriRef(g.BaseUri)); + if (graph.BaseUri != null) + { + yield return new Model(this, new UriRef(graph.BaseUri)); + } } } @@ -230,17 +228,17 @@ public static IRdfReader GetReader(RdfSerializationFormat format) switch (format) { case RdfSerializationFormat.N3: - return new Notation3Parser(); + return new Notation3Parser(); case RdfSerializationFormat.NTriples: - return new NTriplesParser(); + return new NTriplesParser(); case RdfSerializationFormat.Turtle: - return new TurtleParser(); + return new TurtleParser(); + default: case RdfSerializationFormat.RdfXml: - return new RdfXmlParser(); - + return new RdfXmlParser(); } } @@ -256,10 +254,10 @@ public static IRdfWriter GetWriter(RdfSerializationFormat format) case RdfSerializationFormat.Turtle: return new CompressingTurtleWriter(); + default: case RdfSerializationFormat.RdfXml: return new RdfXmlWriter(); - } } @@ -273,15 +271,22 @@ public static IRdfWriter GetWriter(RdfSerializationFormat format) /// public override Uri Read(Stream stream, Uri graphUri, RdfSerializationFormat format, bool update) { + IRdfReader parser = GetReader(format); TextReader reader = new StreamReader(stream); + IGraph graph = new Graph(); - IRdfReader parser = GetReader(format); parser.Load(graph, reader); + graph.BaseUri = graphUri; + if (!update) + { _store.Remove(graphUri); + } + _store.Add(graph, update); + return graphUri; } @@ -317,10 +322,13 @@ public override Uri Read(Uri graphUri, Uri url, RdfSerializationFormat format, b TripleStore s = new TripleStore(); s.LoadFromFile(path, new TriGParser()); - foreach (VDS.RDF.Graph g in s.Graphs) + foreach (Graph g in s.Graphs) { if (!update) + { _store.Remove(g.BaseUri); + } + _store.Add(g, update); } } @@ -335,14 +343,19 @@ public override Uri Read(Uri graphUri, Uri url, RdfSerializationFormat format, b else if (url.Scheme == "http") { graph = new Graph(); + UriLoader.Load(graph, url); + graph.BaseUri = graphUri; } if (graph != null) { if (!update) + { _store.Remove(graph.BaseUri); + } + _store.Add(graph, update); return graphUri; @@ -363,6 +376,7 @@ public override void Write(Stream stream, Uri graphUri, RdfSerializationFormat f if (_store.HasGraph(graphUri)) { IGraph graph = _store.Graphs[graphUri]; + using (StreamWriter writer = new StreamWriter(stream)) { graph.SaveToStream(writer, GetWriter(format)); @@ -397,6 +411,24 @@ public override IModelGroup CreateModelGroup(params Uri[] models) return new ModelGroup(this, modelList); } + /// + /// Creates a model group which allows for queries to be made on multiple models at once. + /// + /// + /// + public IModelGroup CreateModelGroup(params IModel[] models) + { + List modelList = new List(); + + // This approach might seem a bit redundant, but we want to make sure to get the model from the right store. + foreach (var x in models) + { + this.GetModel(x.Uri); + } + + return new ModelGroup(this, modelList); + } + /// /// Closes the store. It is not usable after this call. /// @@ -407,6 +439,11 @@ public override void Dispose() _store.Dispose(); } + + + + + #endregion } } diff --git a/Trinity/Stores/dotNetRDF/dotNetRDFStoreProvider.cs b/Trinity/Stores/dotNetRDF/dotNetRDFStoreProvider.cs index a22cee6..60a270b 100644 --- a/Trinity/Stores/dotNetRDF/dotNetRDFStoreProvider.cs +++ b/Trinity/Stores/dotNetRDF/dotNetRDFStoreProvider.cs @@ -36,9 +36,9 @@ namespace Semiodesk.Trinity.Store { - #if ! NET35 +#if !NET35 [Export(typeof(StoreProvider))] - #endif +#endif public class dotNetRDFStoreProvider : StoreProvider { #region Constructor @@ -50,24 +50,21 @@ public dotNetRDFStoreProvider() #endregion - + #region Methods public override IStore GetStore(Dictionary configurationDictionary) { - string schemaKey = "schema"; - string[] schema = null; - if (configurationDictionary.ContainsKey(schemaKey)) - schema = GetSchema(configurationDictionary[schemaKey]); - - return new dotNetRDFStore(schema); - } + string schemaKey = "schema"; + string[] schema = null; - private string[] GetSchema(string schemaString) - { - string[] schema = schemaString.Split(','); + if (configurationDictionary.ContainsKey(schemaKey)) + { + schema = configurationDictionary[schemaKey].Split(','); + } - return schema; - + return new dotNetRDFStore(schema); } + + #endregion } } diff --git a/Trinity/Transaction/TransactionEventArgs.cs b/Trinity/Transaction/TransactionEventArgs.cs index cc583af..a64a411 100644 --- a/Trinity/Transaction/TransactionEventArgs.cs +++ b/Trinity/Transaction/TransactionEventArgs.cs @@ -26,6 +26,7 @@ // Copyright (c) Semiodesk GmbH 2015 using System; +using System.Data; namespace Semiodesk.Trinity { diff --git a/Trinity/Trinity.csproj b/Trinity/Trinity.csproj index 0eab31d..2a41968 100644 --- a/Trinity/Trinity.csproj +++ b/Trinity/Trinity.csproj @@ -87,12 +87,13 @@ - + + - + diff --git a/Trinity/Utility/EnumExtensions.cs b/Trinity/Utility/EnumExtensions.cs new file mode 100644 index 0000000..263ec88 --- /dev/null +++ b/Trinity/Utility/EnumExtensions.cs @@ -0,0 +1,60 @@ +/* +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +Copyright (c) 2015 Semiodesk GmbH + +Authors: +Moritz Eberl +Sebastian Faubel +*/ + + +#if NET35 + +using System; + +namespace Semiodesk.Trinity.Utility +{ + /// + /// Extentions for enums. + /// + public static class EnumExtensions + { + /// + /// A FX 3.5 way to mimic the FX4 "HasFlag" method. + /// + /// The tested enum. + /// The value to test. + /// True if the flag is set. Otherwise false. + public static bool HasFlag(this Enum variable, Enum value) + { + // check if from the same type. + if (variable.GetType() != value.GetType()) + { + throw new ArgumentException("The checked flag is not from the same type as the checked variable."); + } + + ulong num = Convert.ToUInt64(value); + ulong num2 = Convert.ToUInt64(variable); + + return (num2 & num) == num; + } + } +} +#endif diff --git a/Trinity/XsdTypeMapper.cs b/Trinity/XsdTypeMapper.cs index d8beeed..0e27f6a 100644 --- a/Trinity/XsdTypeMapper.cs +++ b/Trinity/XsdTypeMapper.cs @@ -86,7 +86,7 @@ private struct xsd {typeof(UInt64), xsd._ulong}, {typeof(DateTime), xsd.datetime}, {typeof(byte[]), xsd.base64Binary}, - {typeof(bool), xsd.boolean_}, + {typeof(bool), xsd.boolean}, {typeof(decimal), xsd._decimal}, {typeof(double), xsd._double}, {typeof(float), xsd._float}, @@ -180,6 +180,16 @@ public static Uri GetXsdTypeUri(Type type) return NativeToXsd[type]; } + /// + /// Indicates if there is a registered XML Schema type URI for the given .NET type. + /// + /// A .NET type object. + /// true if there is a XML schema type, false otherwise. + public static bool HasXsdTypeUri(Type type) + { + return NativeToXsd.ContainsKey(type); + } + /// /// Provides the XML Schema type URI for a given .NET type. /// diff --git a/Trinity/packages.config b/Trinity/packages.config index 9fd6404..ca6d8e3 100644 --- a/Trinity/packages.config +++ b/Trinity/packages.config @@ -1,4 +1,5 @@  + \ No newline at end of file