Difference between revisions of "Data Transformation"
(→Dependencies) |
(→Transforming a single record using a XSLT) |
||
Line 220: | Line 220: | ||
The following examples show how some of the transformation programs contained in the metadata broker library can be used. For this purpose, the client-side code is shown, describing the necessary steps to invoke the operations of the metadata broker service. Furthermore, the full definition of the referenced programs and transformation programs is also given. These definitions can be used as the base for creating new programs and transformation programs by anyone who needs to do this. | The following examples show how some of the transformation programs contained in the metadata broker library can be used. For this purpose, the client-side code is shown, describing the necessary steps to invoke the operations of the metadata broker service. Furthermore, the full definition of the referenced programs and transformation programs is also given. These definitions can be used as the base for creating new programs and transformation programs by anyone who needs to do this. | ||
− | ==== Transforming a single record using a XSLT ==== | + | ==== Transforming a single record, ResultSet or collection using a XSLT ==== |
− | This is the <tt> | + | This is the <tt>XSLT_Transformer</tt> class (included in the metadata broker library), which performs the actual conversion: |
<pre> | <pre> | ||
− | package org. | + | package org.gcube.metadatamanagement.metadatabrokerlibrary.programs.XSLT_Transformer; |
− | + | ||
− | + | ||
− | + | ||
− | + | ||
− | + | ||
− | + | ||
import java.io.StringReader; | import java.io.StringReader; | ||
Line 240: | Line 234: | ||
import javax.xml.transform.Templates; | import javax.xml.transform.Templates; | ||
import javax.xml.transform.Transformer; | import javax.xml.transform.Transformer; | ||
− | |||
import javax.xml.transform.TransformerFactory; | import javax.xml.transform.TransformerFactory; | ||
import javax.xml.transform.stream.StreamResult; | import javax.xml.transform.stream.StreamResult; | ||
import javax.xml.transform.stream.StreamSource; | import javax.xml.transform.stream.StreamSource; | ||
− | public class | + | import org.gcube.common.core.utils.logging.GCUBELog; |
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataElement; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataSink; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataSource; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.RecordDataSource; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.programs.VariableType; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.util.GenericResourceRetriever; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.util.SecurityManager; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.util.stats.BrokerStatistics; | ||
+ | import org.gcube.metadatamanagement.metadatabrokerlibrary.util.stats.BrokerStatisticsConstants; | ||
+ | |||
+ | public class XSLT_Transformer { | ||
+ | |||
+ | /** The Logger this class uses */ | ||
+ | private static GCUBELog log = new GCUBELog(XSLT_Transformer.class); | ||
− | + | public <T extends DataElement, S extends DataElement> void transform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager) throws RemoteException { | |
− | + | ||
− | + | ||
− | + | /* If the input is a single record, there is no point in spawning a new thread for the | |
− | + | * transformation because it will not last for a long time. In any other case, the | |
− | + | * transformation is executed in a new thread. | |
− | + | */ | |
− | + | if (source instanceof RecordDataSource) { | |
− | + | log.debug("Input type is 'record', transforming in current thread."); | |
− | + | doTransform(source, xslt, sink, statistics, secManager); | |
− | + | } | |
− | + | else { | |
− | + | log.debug("Input type is not 'record', spawning new thread."); | |
− | + | Thread t = new Thread() { | |
− | + | public void run() { | |
+ | doTransform(source, xslt, sink, statistics, secManager); | ||
+ | } | ||
+ | }; | ||
− | + | /* Delegate this thread's credentials to the new thread, and start it */ | |
− | + | secManager.delegateCredentialsAndScopeToThread(t); | |
− | + | t.start(); | |
− | + | } | |
− | + | ||
− | + | ||
− | + | ||
− | + | ||
− | + | ||
− | + | ||
} | } | ||
− | + | private <T extends DataElement, S extends DataElement> void doTransform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager) { | |
− | return factory.newTemplates(new StreamSource(new StringReader(xslt))); | + | |
+ | log.debug("Starting transformation, scope is: " + secManager.getScope()); | ||
+ | |||
+ | /* Retrieve the XSLT from the IS and compile it so that the records will be transformed faster */ | ||
+ | String xsltdef = null; | ||
+ | try { | ||
+ | statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETORETRIEVEGR); | ||
+ | xsltdef = GenericResourceRetriever.retrieveGenericResource(xslt.getReference(), secManager); | ||
+ | statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETORETRIEVEGR); | ||
+ | } | ||
+ | catch (Exception e) { | ||
+ | log.error("XSLT_Transformer: Failed to retrieve the given XSLT from the IS (Generic Resource ID: " + | ||
+ | xslt.getReference() + ").", e); | ||
+ | return; | ||
+ | } | ||
+ | |||
+ | Templates compiledXSLT = null; | ||
+ | try { | ||
+ | TransformerFactory factory = TransformerFactory.newInstance(); | ||
+ | compiledXSLT = factory.newTemplates(new StreamSource(new StringReader(xsltdef))); | ||
+ | } catch (Exception e) { | ||
+ | log.error("XSLT_Transformer: Failed to compile the XSLT with ID: " + xslt.getReference(), e); | ||
+ | } | ||
+ | |||
+ | /* Loop through each source element, transform it, and store it in the resulting entity */ | ||
+ | statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMALL); | ||
+ | while (source.hasNext()) { | ||
+ | T sourceElement = null; | ||
+ | S destElement = null; | ||
+ | try { | ||
+ | sourceElement = source.getNext(); | ||
+ | } catch (Exception e) { | ||
+ | log.error("XSLT_Transformer: Failed to retrieve next element from DataSource. Aborting transformation", e); | ||
+ | return; | ||
+ | } | ||
+ | |||
+ | StringWriter output = new StringWriter(); | ||
+ | try { | ||
+ | statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMREC); | ||
+ | Transformer t = compiledXSLT.newTransformer(); | ||
+ | t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); | ||
+ | t.transform(new StreamSource(new StringReader(sourceElement.getPayload())), new StreamResult(output)); | ||
+ | statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMREC); | ||
+ | } catch (Exception e) { | ||
+ | log.error("Failed to transform current element (ID = " + sourceElement.getID() + ":\n" + | ||
+ | sourceElement.getPayload() + "\nContinuing with the next element."); | ||
+ | continue; | ||
+ | } | ||
+ | |||
+ | try { | ||
+ | destElement = sink.getNewDataElement(sourceElement, output.toString()); | ||
+ | sink.writeNext(destElement); | ||
+ | } catch (Exception e) { | ||
+ | log.error("Failed to store the transformed record to the data sink. Continuing with the next element.", e); | ||
+ | continue; | ||
+ | } | ||
+ | } | ||
+ | statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMALL); | ||
+ | |||
+ | /* Notify the data sink that there is no more data to be written */ | ||
+ | try { | ||
+ | sink.finishedWriting(); | ||
+ | } catch (Exception e) { | ||
+ | log.error("Failed to finalize writing of output data.", e); | ||
+ | } | ||
} | } | ||
− | |||
− | |||
− | |||
− | |||
} | } | ||
</pre> | </pre> | ||
− | The only transformation method that can be used externally (when this program is called by a transformation program) is | + | The only transformation method that can be used externally (when this program is called by a transformation program) is: |
+ | <pre> | ||
+ | public <T extends DataElement, S extends DataElement> void transform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager) | ||
+ | </pre> | ||
+ | Taking a closer look at the declaration of this method, we can see that the first parameter is a DataSource, followed by a VariableType and then by a DataSink. This means that the transformation rule that wraps this program has two inputs. The first one is a data input and the second one is a variable. Finally, a BrokerStatistics object and a SecurityManager object are passed to the method so that the program has access to the Metadata Broker statistics and security engine. Note that since we cannot know (at design time) the type of input and output (they may be records, resultsets or collections), we have to use Java generics. The correct way to write a transformation method is to follow the pattern shown in this example, i.e. using two generic types: T as a placeholder for the actual input data type and S as a placeholder for the output data type. | ||
+ | |||
This is the XML definition of the transformation program: | This is the XML definition of the transformation program: |
Revision as of 11:32, 29 August 2008
Contents
Metadata Broker
Introduction
The main functionality of the Metadata Broker is to convert XML documents from some input schema and/or language to another. The inputs and outputs of the transformation process can be single records, ResultSets or entire collections. In the special case where both the inputs and the output are collections, a persistent transformation is possible, meaning that whenever there is a change in the input collection(s), the new data will be automatically transformed in order for the change to be reflected to the output collection.
Transformation Programs
Complex transformation processes are described by transformation programs, which are XML documents. Transformation programs are stored in the IS. Each transformation program can reference other transformation programs and use them as “black-box” components in the transformation process it defines.
Each transformation program consists of:
- One or more data input definitions. Each one defines the schema, language and type (record, ResultSet or collection) of the data that must be mapped to the particular input.
- One or more input variables. Each one of them is placeholder for an additional string value which must be passed to the transformation program at run-time.
- Exactly one data output definition, which contains the output data type (record, ResultSet or collection), schema and language.
- One or more transformation rule definitions.
Note: The name of the input or output schema must be given in the format SchemaName=SchemaURI, where SchemaName is the name of the schema and SchemaURI is the URI of its definition, e.g. DC=http://dublincore.org/schemas/xmls/simpledc20021212.xsd.
Transformation Rules
Transformation rules are the building block of transformation programs. Each transformation program always contains at least one transformation rule. Transformation rules describe simple transformations and execute in the order in which they are defined inside the transformation program. Usually the output of a transformation rule is the input of the next one. So, a transformation program can be thought of as a chain of transformation rules which work together in order to perform the complex transformation defined by the whole transformation program.
Each transformation rule consists of:
- One or more data input definitions. Each definition contains the schema, language, type (record, ResultSet, collection or input variable) and data reference of the input it describes. Each one of these elements (except for the 'type' element) can be either a literal value, or a reference to another value defined inside the transformation program (using XPath syntax).
- Exactly one data output, which can be:
- A definition that contains the output data type (record, ResultSet or collection), schema and language.
- A reference to the transformation program‘s output (using XPath syntax). This is the way to express that the output of this transformation rule will also be the output of the whole transformation program, so such a reference is only valid for the transformation program‘s final rule.
- The name of the underlying program to execute in order to do the transformation, using standard 'packageName.className' syntax.
A transformation rule can also be a reference to another transformation program. This way, whole transformation programs can be used as parts of the execution of another transformation program. The reference can me made using the unique id of the transformation program being referenced and a set of value assignments to its data inputs and variables.
Note: The name of the input or output schema must be given in the format SchemaName=SchemaURI, where SchemaName is the name of the schema and SchemaURI is the URI of its definition, e.g. DC=http://dublincore.org/schemas/xmls/simpledc20021212.xsd.
Variable fields inside data input/output definitions
Inside the definition of data inputs and outputs of transformation programs and transformation rules, any field except for 'Type' can be declared as a variable field. Just like inputs variables, variable fields get their values by run-time assignments. In order to declare an element as a variable field of its parent element, one needs to include 'isVariable=true' in the element's definition. When the caller invokes a broker operation in order to transform some metadata, he/she can provide a set of value assignments to the input variables and variable fields of the transformation program definition. But the caller has access only to the variables of the whole transformation program, not the internal transformation rules. However, transformation rules can also contain variable fields in their input/output definitions. Since the caller cannot explicitly assign values to them, such variable fields must contain an XPath expression as their value, which points to another element inside the transformation program that contains the value to be assigned. These references are resolved when each transformation rule is executed, so if, for example, a variable field of a transformation rule's input definition points to a variable field of the previous transformation rule's output definition, it is guaranteed that the referenced element's value will be there at the time of execution of the second transformation rule. It is important to note that every XPath expression should specify an absolute location inside the document, which basically means it should start with '/'.
There is a special case where the language and schema fields of a transformation program's data input definition can be automatically get values assigned to them, without requiring the caller to do so. This can happen when the type of the particular data input is set to collection. In this case, the Metadata Broker Service automatically retrieves the format of the metadata collection described by the ID that is given through the Reference field of the data input definition and assigns the actual schema descriptor and language identifier of the collection to the respective variable fields of the data input definition. If any of these fields already contain values, these values are compared with the ones retrieved from the metadata collection's profile, and if they are different the execution of the transformation program stops and an exception is thrown by the Metadata Broker service. Note that the automatic value assignment works only on data inputs of transformation programs and NOT on data inputs of individual transformation rules.
Programs
A program (not to be confused with transformation program) is the Java class which performs the actual transformation on the input data. A transformation rule is just a XML description of the interface (inputs and output) of a program.
There are no specific methods that the Java class of a program should define in order to be invokable from the Metadata Broker. Each program can define any number of methods, but when the transformation rule which references it is executed, the Metadata Broker service will use reflection in order to locate the correct method to call based on the input and output types defined in the transformation rule that initiates the call to the program's transformation method. The execution process is the following:
- A client invokes the Metadata Broker requesting the execution of a transformation program.
- For each transformation rule found in the transformation program:
- The Metadata Broker reads the schema, language and type of the transformation rule's inputs, as well as the actual payloads given as inputs. The output format descriptor is also read.
- Based on this information, the Metadata Broker constructs one or more DataSource and/or VariableType objects and a DataSink object, which are wrapper classes around the transformation rule's input and output descriptors. For each input of type 'Record', 'Collection' or 'ResultSet', a DataSource object is created, while a VariableType object is created for every input of type 'Variable'.
- The program to be invoked for the transformation is read from the transformation rule.
- The Metadata Broker uses reflection in order to locate the transformation method to be called inside the program. This is done through the input and output descriptors of the transformation rule, based on the following rules:
- If the transformation rule defines N inputs and one output (where N>=1), the method that will be called should take N+3 parameters.
- When the method is called, the first N parameters are the constructed DataSource or VariableType objects that wrap the actual inputs of the transformation rule.
- Parameter N+1 is the constructed DataSink object that wraps the actual data output of the transformation rule.
- Parameter N+2 is a BrokerStatistics object, which can be used for the logging of performance metrics during the transformation.
- Parameter N+3 is a SecurityManager object, which can be used for the handling of credentials and scoping if other services need to be invoked during the transformation.
The DataSource class is defined as:
public abstract class DataSource<T extends DataElement> { /** * Returns true or false, depending on whether there are more elements to be read from * the data source or not. * @return */ public abstract boolean hasNext(); /** * Reads the next available element from the data source. * @return the next element */ public abstract T getNext() throws Exception; /** * Returns the language of the source data * @return the language */ public String getLanguage() { return format.getLanguage(); } /** * Returns the schema of the source data, in * '<NAME>=<URI>' format. * @return the schema */ public String getSchema() { return format.getSchema(); } /** * Returns the schema URI of the source data * @return the schema URI */ public String getSchemaURI() { return format.getSchemaURI(); } /** * Returns the schema name of the source data * @return the schema name */ public String getSchemaName() { return format.getSchemaName(); } }
This base class is further specialized for different data source types (record, ResultSet or collection) in three subclasses, but the author of a program does not need to be aware of this, since the data access interface is the same for every data source. A program can read the next record from a data source by calling the getNext() method, and check if there are more records to be read by calling the hasNext() method. The type of object returned by getNext() depends on the actual type of input wrapped by the DataSource and can be a RecordDataElement, a ResultSetDataElement or a CollectionDataElement.
The DataSink class is defined as:
public abstract class DataSink<T extends DataElement> { /** * Writes an element to the DataSink. * @param element the element to be written */ public void writeNext(T element) throws Exception; /** * Signals the end of writing. */ public abstract void finishedWriting() throws Exception; /** * Returns a handler to the written data. * @return */ public abstract String getWrittenDataHandle() throws Exception; /** * Creates a new DataElement derived from a given source DataElement, * with a different payload. * @param source the source DataElement * @param payload the new DataElement's payload * @return */ public abstract T getNewDataElement(DataElement source, String payload); /** * Returns the language of the output data * @return the language */ public String getLanguage() { return format.getLanguage(); } /** * Returns the schema of the output data, in * '<NAME>=<URI>' format. * @return the schema */ public String getSchema() { return format.getSchema(); } /** * Returns the schema URI of the output data * @return the schema URI */ public String getSchemaURI() { return format.getSchemaURI(); } /** * Returns the schema name of the output data * @return the schema name */ public String getSchemaName() { return format.getSchemaName(); } }
Just like data sources, this base class is further specialized for different data sink types (record, ResultSet or collection) in three subclasses. A program can write a record to the data sink by calling the writeNext() method. When the program completes the whole writing process, it has to inform the data sink simply by calling the finishedWriting() method. The writeNext() method accepts an object whose type is a subclass of the DataElement class. The actual type of the object depends on the actual type of output wrapped by the DataSink and can be a RecordDataElement, a ResultSetDataElement or a CollectionDataElement. But how does the program construct such objects? The DataSink class offers the getNewDataElement(DataElement source, String payload) method, whose purpose is to construct a new data element compatible with the specific data sink. The 'source' parameter is the original DataElement (the one read from a DataSource) being transformed, and the 'payload' parameter is the transformed payload that will be stored inside the produced DataElement. Finally, a handle to the output data can be retrieved by calling the getWrittenDataHandle() method on the DataSink. The output data handle is the transformed record payload if the sink is a RecordDataSink, a ResultSet locator if the sink is a ResultSetDataSink or a metadata collection ID if the sink is a CollectionDataSink.
Generally speaking, the main logic in a program will be something like this:
- while (source.hasNext()) do the following:
- sourceElement = source.getNext();
- (transform sourceElement to produce 'transformedPayload')
- destElement = sink.getNewDataElement(sourceElement, transformedPayload);
- sink.writeNext(destElement);
- sink.finishedWriting();
Implementation Overview
The metadata broker consists of two components:
- The metadata broker service
The metadata broker service provides the functionality of the metadata broker in the form of a stateless service. In the case of a persistent transformation, the service creates a WS-Resource holding information about this transformation and registers for notifications concerning changes in the input collection(s). The created resources are not published and remain completely invisible to the caller.
The service exposes the following operations:
- transform(TransformationProgramID, params) -> String
This operation takes the ID of a transformation program stored in the IS and a set of transformation parameters. The referenced transformation program is executed using the provided parameters, which are just a set of value assignments to variables defined inside the transformation program. The metadata broker library contains a helper class for creating such a parameter set. - transformWithNewTP(TransformationProgram, params) -> String
This operation offers the same functionality as the previous one. However, in this case the first parameter is the full XML definition of a transformation program in string format and not the ID of a stored one. - findPossibleTransformationPrograms (InputDesc, OutputDesc) -> TransformationProgram[]
This operation takes the description of some input format (type, language and schema) as well as the description of a desired output format, and returns an array of transformation programs definitions that could be used in order to perform the required conversion. These transformation programs may not exist before invoking this operation. They are produced on the fly, by combining all the existing transformation programs which are compatible with each other, trying to synthesize more complex transformation programs. Of course, if there is already an existing transformation program which is applicable for the requested type of transformation, it is included in the results. If the output format is null, then the returned array contain all transformation programs that can be applied to the specified input format, producing any possible output format.
- transform(TransformationProgramID, params) -> String
- The metadata broker library
The metadata broker library contains the definitions of the DataSource, RecordDataSource, ResultSetDataSource, CollectionDataSource, DataSink, RecordDataSink, ResultSetDataSink, CollectionDataSink, DataElement, RecordDataElement, ResultSetDataElement, CollectionDataElement, VariableType, SecurityManager and BrokerStatistics Java classes. The following programs are also included in it:- Generic XSLT transformer (XSLT_Transformer): transforms a given record, ResultSet or metadata collection using a given XSLT definition. The output is the transformed record, ResultSet or metadata collection.
- Custom programs transforming metadata to RowSet format, used for feeding the various type of indices in the infrastructure:
- ES metadata to geo RowSet format (ES_2_geoRowset): transforms ES metadata to RowSets suitable for feeding a geospatial index. The transformation is done using a predefined XSLT (given as a parameter to the program) and then the indexType is also injected in every produced RowSet so that the index can find this information.
- Metadata to fulltext RowSet format (Metadata_2_ftsIndexRowset): transforms metadata of any type to RowSets suitable for feeding a full text index. The transformation is done using a predefined XSLT (given as a parameter to the program) and then some custom processing is done over the produced RowSets so that the indexType as well as the OIDs of the original metadata are injected in the RowSets.
- The transformation of metadata using any of the above programs, is a non-blocking operation. This means that the caller will not block until the transformation is completed, since the process of transforming a big ResultSet or collection may be quite time-consuming. For this purpose, each program spawns a new thread to perform the transformation process in the background, while the output data handle is returned to the caller immediately since it's created before the transformation begins. The only exception to the thread spawning mechanism is the transformation of single records. Such transformations are pretty fast, so there is no need for background processing.
- Each program is placed in a java package of its own, beginning with ‘org.gcube.metadatamanagement.metadatabrokerlibrary.programs’. However, this is just a convention followed for the default programs contained in the metadata broker library. There is no restriction on the package names of user-defined programs. In order for user-defined programs to be accessible by the Metadata Broker, they should be put in JAR files and copied to the ‘lib’ directory under the installation directory of gCore (or to any directory that belongs to the CLASSPATH environment variable).
Dependencies
- Metadata Broker Service
- jdk 1.5
- gCore
- Metadata Broker Library
- Metadata Manager Library
- Metadata Manager Stubs
- Metadata Broker Library
- jdk 1.5
- gCore
- ResultSet bundle
- Metadata Manager stubs
- Metadata Manager library
Usage Examples
The following examples show how some of the transformation programs contained in the metadata broker library can be used. For this purpose, the client-side code is shown, describing the necessary steps to invoke the operations of the metadata broker service. Furthermore, the full definition of the referenced programs and transformation programs is also given. These definitions can be used as the base for creating new programs and transformation programs by anyone who needs to do this.
Transforming a single record, ResultSet or collection using a XSLT
This is the XSLT_Transformer class (included in the metadata broker library), which performs the actual conversion:
package org.gcube.metadatamanagement.metadatabrokerlibrary.programs.XSLT_Transformer; import java.io.StringReader; import java.io.StringWriter; import java.rmi.RemoteException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.gcube.common.core.utils.logging.GCUBELog; import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataElement; import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataSink; import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.DataSource; import org.gcube.metadatamanagement.metadatabrokerlibrary.datahandlers.RecordDataSource; import org.gcube.metadatamanagement.metadatabrokerlibrary.programs.VariableType; import org.gcube.metadatamanagement.metadatabrokerlibrary.util.GenericResourceRetriever; import org.gcube.metadatamanagement.metadatabrokerlibrary.util.SecurityManager; import org.gcube.metadatamanagement.metadatabrokerlibrary.util.stats.BrokerStatistics; import org.gcube.metadatamanagement.metadatabrokerlibrary.util.stats.BrokerStatisticsConstants; public class XSLT_Transformer { /** The Logger this class uses */ private static GCUBELog log = new GCUBELog(XSLT_Transformer.class); public <T extends DataElement, S extends DataElement> void transform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager) throws RemoteException { /* If the input is a single record, there is no point in spawning a new thread for the * transformation because it will not last for a long time. In any other case, the * transformation is executed in a new thread. */ if (source instanceof RecordDataSource) { log.debug("Input type is 'record', transforming in current thread."); doTransform(source, xslt, sink, statistics, secManager); } else { log.debug("Input type is not 'record', spawning new thread."); Thread t = new Thread() { public void run() { doTransform(source, xslt, sink, statistics, secManager); } }; /* Delegate this thread's credentials to the new thread, and start it */ secManager.delegateCredentialsAndScopeToThread(t); t.start(); } } private <T extends DataElement, S extends DataElement> void doTransform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager) { log.debug("Starting transformation, scope is: " + secManager.getScope()); /* Retrieve the XSLT from the IS and compile it so that the records will be transformed faster */ String xsltdef = null; try { statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETORETRIEVEGR); xsltdef = GenericResourceRetriever.retrieveGenericResource(xslt.getReference(), secManager); statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETORETRIEVEGR); } catch (Exception e) { log.error("XSLT_Transformer: Failed to retrieve the given XSLT from the IS (Generic Resource ID: " + xslt.getReference() + ").", e); return; } Templates compiledXSLT = null; try { TransformerFactory factory = TransformerFactory.newInstance(); compiledXSLT = factory.newTemplates(new StreamSource(new StringReader(xsltdef))); } catch (Exception e) { log.error("XSLT_Transformer: Failed to compile the XSLT with ID: " + xslt.getReference(), e); } /* Loop through each source element, transform it, and store it in the resulting entity */ statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMALL); while (source.hasNext()) { T sourceElement = null; S destElement = null; try { sourceElement = source.getNext(); } catch (Exception e) { log.error("XSLT_Transformer: Failed to retrieve next element from DataSource. Aborting transformation", e); return; } StringWriter output = new StringWriter(); try { statistics.startMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMREC); Transformer t = compiledXSLT.newTransformer(); t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); t.transform(new StreamSource(new StringReader(sourceElement.getPayload())), new StreamResult(output)); statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMREC); } catch (Exception e) { log.error("Failed to transform current element (ID = " + sourceElement.getID() + ":\n" + sourceElement.getPayload() + "\nContinuing with the next element."); continue; } try { destElement = sink.getNewDataElement(sourceElement, output.toString()); sink.writeNext(destElement); } catch (Exception e) { log.error("Failed to store the transformed record to the data sink. Continuing with the next element.", e); continue; } } statistics.doneMeasuringValue(BrokerStatisticsConstants.STAT_TIMETOTRANSFORMALL); /* Notify the data sink that there is no more data to be written */ try { sink.finishedWriting(); } catch (Exception e) { log.error("Failed to finalize writing of output data.", e); } } }
The only transformation method that can be used externally (when this program is called by a transformation program) is:
public <T extends DataElement, S extends DataElement> void transform(final DataSource<T> source, final VariableType xslt, final DataSink<S> sink, final BrokerStatistics statistics, final SecurityManager secManager)
Taking a closer look at the declaration of this method, we can see that the first parameter is a DataSource, followed by a VariableType and then by a DataSink. This means that the transformation rule that wraps this program has two inputs. The first one is a data input and the second one is a variable. Finally, a BrokerStatistics object and a SecurityManager object are passed to the method so that the program has access to the Metadata Broker statistics and security engine. Note that since we cannot know (at design time) the type of input and output (they may be records, resultsets or collections), we have to use Java generics. The correct way to write a transformation method is to follow the pattern shown in this example, i.e. using two generic types: T as a placeholder for the actual input data type and S as a placeholder for the output data type.
This is the XML definition of the transformation program:
<?xml version="1.0" encoding="UTF-8"?> <TransformationProgram> <Input name="TPInput"> <Schema isVariable="true" /> <Language isVariable="true" /> <Type>record</Type> <Reference isVariable="true" /> </Input> <Variable name="XSLT" /> <Output name="TPOutput"> <Schema isVariable="true" /> <Language isVariable="true" /> <Type>record</Type> </Output> <TransformationRule> <Definition> <Transformer>org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_Rec2Rec.GXSLT_Rec2Rec</Transformer> <Input name="Rule1Input1"> <Schema isVariable="true"> //Input[@name='TPInput']/Schema </Schema> <Language isVariable="true"> //Input[@name='TPInput']/Language </Language> <Type>record</Type> <Reference isVariable="true"> //Input[@name='TPInput']/Reference </Reference> </Input> <Input name="Rule1Input2"> <Schema /> <Language /> <Type>variable</Type> <Reference isVariable="true"> //Variable[@name='XSLT'] </Reference> </Input> <Output name="TPRule1Output"> <Reference>//Output[@name='TPOutput']</Reference> </Output> </Definition> </TransformationRule> </TransformationProgram>
In this example, the transformation program defined above is stored in the DIS as a profile with UniqueID=ce6b9860-ebfe-11db-8b69-dd428ed9686d. The input record that is going to be transformed is stored in a local file named input.xml, and the XSLT that will be used is stored as a generic resource with UniqueID=ed358e00-23f2-11dc-a35f-9c01d805f283 in the DIS. The following code fragment reads the input record from the file, creates a set of parameters which are used in order to assign the input data and the XSLT ID to the respective transformation program variable inputs, and then invokes the transform operation of the metadata broker service. The result is written to the console. The URI of the remote service is given as a command-line argument.
public class Client { public static void main(String[] args) { try { // Get the broker service porttype EndpointReferenceType endpoint = new EndpointReferenceType(); endpoint.setAddress(new Address(args[0])); MetadataBrokerPortType broker = new MetadataBrokerServiceAddressingLocator().getMetadataBrokerPortTypePort(endpoint); // Read the input data file into a string String inputData = readTextFile("input.xml"); // Create a set of transformation parameters, assigning values to variables // defined in the transformation program TransformationParameters tparams = TransformationParameters.newInstance(); tparams.addParameter("//Input[@name='TPInput']/Schema", "Schema1=URI1"); tparams.addParameter("//Input[@name='TPInput']/Language", "en"); tparams.addParameter("//Input[@name='TPInput']/Reference", inputData); tparams.addParameter("//Output[@name='TPOutput']/Schema", "Schema2=URI2"); tparams.addParameter("//Output[@name='TPOutput']/Language", "en"); tparams.addParameter("//Variable[@name='XSLT']", "ed358e00-23f2-11dc-a35f-9c01d805f283"); // Prepare the invocation parameters TransformWithNewTP params = new TransformWithNewTP(); params.setTransformationProgramID("ce6b9860-ebfe-11db-8b69-dd428ed9686d"); params.setParameters(tparams.getAsString()); // Invoke the remote operation and write the result to the console System.out.println(broker.transform(params)); } catch (Exception e) { e.printStackTrace(); } } private static String readTextFile(String filename) throws IOException { BufferedReader br = new BufferedReader(new FileReader(filename)); StringBuffer buf = new StringBuffer(); String tmp; while ((tmp = br.readLine()) != null) { buf.append(tmp + "\n"); } br.close(); return buf.toString(); } }
Transforming an entire ResultSet using a XSLT
This is the definition of the GXSLT_RS2RS class (included in the metadata broker library), which performs the actual conversion:
package org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_RS2RS; import java.rmi.RemoteException; import org.apache.log4j.Logger; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.Program; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.ResultSetType; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.VariableType; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_RS2RS.GXSLT_RS2RS_Worker; import org.diligentproject.searchservice.searchlibrary.rsclient.elements.RSResourceWSRFType; import org.diligentproject.searchservice.searchlibrary.rswriter.RSXMLWriter; public class GXSLT_RS2RS implements Program { private static Logger log = Logger.getLogger(GXSLT_RS2RS.class); private String output = null; public void transform(ResultSetType RS, VariableType xslt, ResultSetType outRS) throws RemoteException { try { RSXMLWriter writer = RSXMLWriter.getRSXMLWriter(); new GXSLT_RS2RS_Worker(RS, writer, xslt).start(); output = writer.getRSLocator(new RSResourceWSRFType()).getLocator(); } catch (Exception e) { log.error("GXSLT_RS2RS: Failed to create writer for output resultset.", e); throw new RemoteException("GXSLT_RS2RS: Failed to create writer for output resultset.", e); } } public String getOutput() { return this.output; } }
As stated before, bulk transformations are non-blocking. For this reason, the above code spawns a new thread to handle the transformation process. The definition of the GXSLT_RS2RS_Worker class (which extends the Thread class) follows.
package org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_RS2RS; import javax.xml.transform.Templates; import org.apache.log4j.Logger; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.ResultSetType; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.VariableType; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_Rec2Rec.GXSLT_Rec2Rec; import org.diligentproject.searchservice.searchlibrary.resultset.elements.ResultElementGeneric; import org.diligentproject.searchservice.searchlibrary.rsclient.elements.RSLocator; import org.diligentproject.searchservice.searchlibrary.rsclient.elements.RSResourceLocalType; import org.diligentproject.searchservice.searchlibrary.rsreader.RSXMLIterator; import org.diligentproject.searchservice.searchlibrary.rsreader.RSXMLReader; import org.diligentproject.searchservice.searchlibrary.rswriter.RSXMLWriter; class GXSLT_RS2RS_Worker extends Thread { private static Logger log = Logger.getLogger(GXSLT_RS2RS_Worker.class); private ResultSetType RS; private RSXMLWriter writer; private VariableType xslt; public GXSLT_RS2RS_Worker(ResultSetType resultSet, RSXMLWriter RSWriter, VariableType xsltToUse) { RS = resultSet; writer = RSWriter; xslt = xsltToUse; } public void run() { String element = null; int i = 0; try { /* Compile the XSLT so that the records in the resultset will be transformed faster */ String xsltdef = GenericResourceRetriever.retrieveGenericResource(xslt.getReference()); Templates compiledXSLT = GXSLT_Rec2Rec.compileXSLT(xsltdef); /* Read each record of the input ResultSet, use the GXSLT_Rec2Rec program in order to transform * it and add it to the output ResultSet. */ GXSLT_Rec2Rec GXSLTRecProgram = new GXSLT_Rec2Rec(); RSXMLReader reader = RSXMLReader.getRSXMLReader(new RSLocator(RS.toString())); RSXMLIterator iter = reader.makeLocalPatiently(new RSResourceLocalType(), 1200000).getRSIterator(1200000); while(iter.hasNext()) { if (writer.isTimerAlive()) writer.resetTimer(); ResultElementGeneric elem = (ResultElementGeneric)iter.next(ResultElementGeneric.class); if (elem == null) continue; element = elem.getPayload(); GXSLTRecProgram.transform(element, compiledXSLT); writer.addResults(new ResultElementGeneric(elem.getRecordAttributes(ResultElementGeneric.RECORD_ID_NAME)[0].getAttrValue(), elem.getRecordAttributes(ResultElementGeneric.RECORD_COLLECTION_NAME)[0].getAttrValue(), GXSLTRecProgram.getOutput())); i++; element = null; } writer.close(); } catch (Exception e) { if (element != null) i++; log.error("GXSLT_RS2RS: Failed to transform the given resultset. Stopped at element " + String.valueOf(i) + ":\n" + element, e); e.printStackTrace(); try { writer.close(); } catch (Exception e1) { log.error("GXSLT_RS2RS: Failed to close resultset."); } } } }
The above code uses the GXSLT_Rec2Rec program to compile the XSLT so that the transformation executes as fast as possible. Then it iterates over the whole set of elements contained in the ResultSet, transforming each one using the compiled XSLT. Each transformed element is then added to the output ResultSet.
The following is the XML definition of the transformation program used for this type of transformation.
<TransformationProgram> <Input name="TPInput"> <Schema isVariable="true" /> <Language isVariable="true" /> <Type>resultset</Type> <Reference isVariable="true" /> </Input> <Variable name="XSLT" /> <Output name="TPOutput"> <Schema isVariable="true" /> <Language isVariable="true" /> <Type>resultset</Type> </Output> <TransformationRule> <Definition> <Transformer>org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.GXSLT_RS2RS.GXSLT_RS2RS</Transformer> <Input name="Rule1Input1"> <Schema isVariable="true"> //Input[@name='TPInput']/Schema </Schema> <Language isVariable="true"> //Input[@name='TPInput']/Language </Language> <Type>resultset</Type> <Reference isVariable="true"> //Input[@name='TPInput']/Reference </Reference> </Input> <Input name="Rule1Input2"> <Schema /> <Language /> <Type>variable</Type> <Reference isVariable="true"> //Variable[@name='XSLT'] </Reference> </Input> <Output name="TPRule1Output"> <Reference>//Output[@name='TPOutput']</Reference> </Output> </Definition> </TransformationRule> </TransformationProgram>
In this example, the transformation program defined above is stored in the DIS as a profile with UniqueID=eb46fc40-ebfe-11db-8b6b-dd428ed9686d. The EPR of the input ResultSet that is going to be transformed is stored in a local file named input.xml, and the XSLT is the same used in the previous example. The URI of the remote service is given as a command-line argument. The client code that invokes the broker service and performs the transformation is the same as in the previous example. The only thing that changes is the ID of the transformation program that is called, which should be set to eb46fc40-ebfe-11db-8b6b-dd428ed9686d.
Using a transformation program within another transformation program
As stated before, whole transformation programs can be used as 'black-box' components inside another transformation program. This can be done by defining a transformation rule which describes the call to the second transformation program.
The transformation program that will be called from another transformation program in this example is defined below.
<TransformationProgram> <Input name="TPInput"> <Schema>SCH1=http://schema1.xsd</Schema> <Language>en</Language> <Type>resultset</Type> <Reference isVariable="true" /> </Input> <Output name="TPOutput"> <Schema>SCH3=http://schema3.xsd</Schema> <Language>en</Language> <Type>resultset</Type> </Output> <TransformationRule> <Definition> <Transformer>org.diligentproject.program2</Transformer> <Input name="Rule2Input"> <Schema isVariable="true">//Output[@name='TPRule1Output']/Definition/Schema</Schema> <Language isVariable="true">//Output[@name='TPRule1Output']/Definition/Language</Language> <Type>resultset</Type> <Reference isVariable="true">//Output[@name='TPRule1Output']/Definition/Reference</Reference> </Input> <Output name="Rule2Output"> <Reference>//Output[@name='TPOutput']</Reference> </Output> </Definition> </TransformationRule> </TransformationProgram>
The input and output schemas and languages are predefined inside this transformation program, so the only thing that should be specified at run-time is the actual input data reference. Let's say that this transformation program is stored in the DIS and its UniqueID is 910e0710-f251-11db-88f9-f971eaf0d653.
The transformation program that uses the above transformation program is defined below.
<TransformationProgram> <Input name="TPInput"> <Schema>SCH1=http://schema1.xsd</Schema> <Language>en</Language> <Type>resultset</Type> <Reference isVariable="true" /> </Input> <Variable name="var1"/> <Output name="TPOutput"> <Schema>SCH2=http://schema2.xsd</Schema> <Language>en</Language> <Type>resultset</Type> </Output> <TransformationRule> <Reference> <Program>910e0710-f251-11db-88f9-f971eaf0d653</Program> <Value isVariable="true" target="//Input[@name='TPInput']/Reference">//Input[@name='TPInput']/Reference</Value> <Output name="Rule1Output" /> </Reference> </TransformationRule> <TransformationRule> <Definition> <Transformer>org.diligentproject.program1</Transformer> <Input name="Rule2Input1"> <Schema isVariable="true">//Output[@name='TPRule1Output']/Definition/Schema</Schema> <Language isVariable="true">//Output[@name='TPRule1Output']/Definition/Language</Language> <Type>resultset</Type> <Reference isVariable="true">//Output[@name='TPRule1Output']/Definition/Reference</Reference> </Input> <Input name="Rule2Input2"> <Schema /> <Language /> <Type>variable</Type> <Reference isVariable="true"> //Variable[@name='var1'] </Reference> </Input> <Output name="Rule2Output"> <Reference>//Output[@name='TPOutput']</Reference> </Output> </Definition> </TransformationRule> </TransformationProgram>
The element that describes the call to the first transformation program is the first TransformationRule element. This element specifies the UniqueID of the transformation program to be called, as well as a mapping of values to the variable inputs of that transformation program. Since the first transformation program contains only one variable input (the input data reference), there is only one mapping in this example, described by a Value element. The target attribute of this element specifies the target element of the other transformation program whose value is to be set, and the element's content specifies the actual value to set. In this example, this is not a literal value but a reference to another element of the transformation program, where the value should be taken from. Specifically, we have specified that the first transformation program's input should be the input of the second transformation program. Since the value of the Value element is a XPath expression, the isVariable attribute is also set to true, meaning that the content should be interpreted as a reference to another element and not as a literal value. The output of the first transformation program becomes the output of the transformation rule that called it, and is named Rule1Output. This output is then used as the input of the next transformation rule.
Finding a set of transformation programs given a source and target metadata formats
This example demonstrates how one can get an array of transformation programs that could be used in order to transform metadata from a given source format to a given target format. The operation that can be used in order to accomplish this is 'findPossibleTransformationPrograms'. The caller must specify a source and target metadata format and the service searches for possible "chains" of existing transformation programs that could be used in order to carry out the transformation. There are three rules imposed by the Metadata Broker service:
- Only transformation programs with one data input are considered during the search
- Each transformation program can be used at most one time inside each chain of transformation programs (this is needed in order to avoid infinite loops)
- A transformation program that produces a collection as its output can only be the last one inside a chain of transformation programs
Each chain composed by the Metadata Broker service is converted to a transformation program, which "links" the individual transformation programs forming the chain. This transformation program contains a transformation rule for each transformation program in the chain. Each transformation rule describes a call to the corresponding transformation program. The result of the operation is an array of strings, where each string corresponds to a synthesized transformation program.
It is possible that some of the transformation programs included in a chain contain some input variables. For each found variable, the Metadata Broker service places a variable to the synthesized transformation program, and this variable is mapped to the original one. This way one can specify the values of the variables contained in every transformation program involved in the chain, by specifying the values of the corresponding variables of the synthesized transformation program. This mechanism is necessary because the individual transformation programs contained in the chain are not visible to the caller. The only entity that the caller sees is the synthesized transformation program that is responsible for calling the ones it is built from.
Consider the case where a transformation program whose output language is a variable is added to a chain. When the service searches for another transformation program to append to the chain after that one, it may find a transformation program whose input language is 'en' (English). Then, the value 'en' will be assigned to the variable field describing the previous transformation program's output language. The same happens if an output field (schema or language) of a transformation program contains a specific value and the corresponding input field of the next transformation program is a variable. But what happens if the two fields are both variables? In this case, an input variable is added to the synthesized transformation program. When the caller uses this transformation program, he/she will need to specify a value for this variable. That value will then be assigned automatically both to the output field of the first transformation program and to the input field of the second transformation program.
Now let's see how one can call the 'findPossibleTransformationPrograms' operation:
import org.apache.axis.message.addressing.Address; import org.apache.axis.message.addressing.EndpointReferenceType; import org.diligentproject.metadatamanagement.metadatabrokerlibrary.programs.TPIOType; import org.diligentproject.metadatamanagement.metadatabrokerservice.stubs.FindPossibleTransformationProgramsResponse; import org.diligentproject.metadatamanagement.metadatabrokerservice.stubs.MetadataBrokerPortType; import org.diligentproject.metadatamanagement.metadatabrokerservice.stubs.FindPossibleTransformationPrograms; import org.diligentproject.metadatamanagement.metadatabrokerservice.stubs.service.MetadataBrokerServiceAddressingLocator; public class TestFindPossibleTPs { public static void main(String[] args) { try { // Create endpoint reference to the service EndpointReferenceType endpoint = new EndpointReferenceType(); endpoint.setAddress(new Address(args[0])); MetadataBrokerPortType broker = new MetadataBrokerServiceAddressingLocator().getMetadataBrokerPortTypePort(endpoint); // Create the IO format descriptors TPIOType inFormat = TPIOType.fromParams(args[1], args[2], args[3], ""); TPIOType outFormat = TPIOType.fromParams(args[4], args[5], args[6], ""); // Prepare the invocation parameters FindPossibleTransformationPrograms params = new FindPossibleTransformationPrograms(); params.setInputFormat(inFormat.toXMLString()); params.setOutputFormat(outFormat.toXMLString()); // Invoke the remote operation FindPossibleTransformationProgramsResponse resp = broker.findPossibleTransformationPrograms(params); String[] TPs = resp.getTransformationProgram(); for (String TP : TPs) { System.out.println(TP); System.out.println(); } } catch (Exception e) { e.printStackTrace(); } } }
This code fragment assumes the following:
- args[0] = the Metadata Broker service URI
- args[1] = the source format type ('resultset', 'collection' or 'record')
- args[2] = the source format language
- args[3] = the source format schema (in 'schemaName=schemaURI' format)
- args[4] = the target format type ('resultset', 'collection' or 'record')
- args[5] = the target format language
- args[6] = the target format schema (in 'schemaName=schemaURI' format)
First, an endpoint reference to the metadata broker service is created. Then, we have to create the source and target format descriptors. The remote operation accepts two strings describing the two metadata formats. These strings are nothing more that the serialized form of two TPIOType objects. The TPIOType class is the base class of the CollectionType, ResultSetType and RecordType classes. This class defines the static method fromParams which creates and returns an object describing a metadata format based on given values for the format's schema, language, type and data reference. The returned object will be an instance of the correct class (derived from TPIOType), based on the given value for the 'type' attribute. Here, the 'reference' attribute is not used because we are interested in the metadata format itself and not in the data it describes. After constructing the two objects, we get their serialized form by calling the toXMLString() method on them. The returned strings are the ones that must be passed to the remote operation.
Next, we invoke the remote operation and then we just print the returned transformation programs.