Mercurial > hg > semantic-sia
changeset 70:fe08d8cdd150
new
author | stevenh |
---|---|
date | Fri, 02 Aug 2013 15:01:59 +0100 |
parents | 94e6592eb106 |
children | 39106212a3c6 |
files | src/org/qmul/eecs/c4dm/sia/N3ToRdf.java src/org/qmul/eecs/c4dm/sia/SiaSemWeb.java |
diffstat | 2 files changed, 293 insertions(+), 0 deletions(-) [+] |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/qmul/eecs/c4dm/sia/N3ToRdf.java Fri Aug 02 15:01:59 2013 +0100 @@ -0,0 +1,68 @@ +package org.qmul.eecs.c4dm.sia; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; + +import org.apache.jena.web.DatasetGraphAccessorHTTP; +import org.mindswap.pellet.jena.PelletReasonerFactory; +import org.qmul.eecs.c4dm.sparql.utilities.SparqlWrapperMethods; + +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.query.DatasetAccessor; +import com.hp.hpl.jena.query.DatasetAccessorFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.tdb.TDBFactory; +import com.hp.hpl.jena.update.UpdateAction; + +/** + * @author stevenh + * Utility class which converts an N3 file to RDF/XML + */ +public class N3ToRdf { + + // Ontology N3 file + private static final String n3ontology = "file:src/rdf/siaTestDatapointOntology.n3"; +// private static final String n3ontology = "file:src/rdf/midiModel.n3"; + + // The RDF output file + private static final String rdfFileName = "/Volumes/USB_DISK/mercurial/SiaSesame/ontology/siaTestDatapointOntology"; + + /** + * @param args + */ + public static void main(String[] args) { + + // Create an ontology model + OntModel ontModel = ModelFactory + .createOntologyModel(); + + // Read the ontology and data from the file into the ontology model + ontModel.read(n3ontology, "N3"); + + // Write the model to a file + File outFileRdf = new File(rdfFileName + ".rdf"); + FileOutputStream outFileOutputStreamRdf; + + // RDF/XML version + try { + outFileOutputStreamRdf = new FileOutputStream(outFileRdf); + ontModel.writeAll(outFileOutputStreamRdf, "RDF/XML", null); + } catch (FileNotFoundException e) { + System.out.println("Unable to write to file: " + + outFileRdf.getAbsolutePath()); + e.printStackTrace(); + System.exit(1); + } + + System.out.println("Model written to file: " + + outFileRdf.getAbsolutePath()); + + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/qmul/eecs/c4dm/sia/SiaSemWeb.java Fri Aug 02 15:01:59 2013 +0100 @@ -0,0 +1,225 @@ +package org.qmul.eecs.c4dm.sia; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; + +import org.apache.jena.web.DatasetGraphAccessorHTTP; +import org.mindswap.pellet.jena.PelletReasonerFactory; +import org.qmul.eecs.c4dm.sparql.utilities.SparqlWrapperMethods; + +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.query.DatasetAccessor; +import com.hp.hpl.jena.query.DatasetAccessorFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.tdb.TDBFactory; +import com.hp.hpl.jena.update.UpdateAction; + +public class SiaSemWeb { + + private static final String selectOrderedVteInfoQuery = "file:src/sparql/select_ordered_vte_info.sparql"; + private static final String selectAllQuery = "file:src/sparql/select_all.sparql"; + private static final String selectEqualVtesQuery = "file:src/sparql/select_equal_vtes.sparql"; + private static final String selectVectorTableDetailsQuery = "file:src/sparql/select_vector_table_details.sparql"; + private static final String selectPartialQuery = "file:src/sparql/partial.sparql"; + private static final String selectVteEquivalenceQuery = "file:src/sparql/select_vte_equivalence.sparql"; + private static final String selectMtpsQuery = "file:src/sparql/select_mtps.sparql"; + private static final String selectNewVectorsQuery = "file:src/sparql/select_new_vectors.sparql"; + private static final String selectExistingVectorsQuery = "file:src/sparql/select_existing_vectors.sparql"; + + // SPARQL updates + private static final String insertDatapointOrderBarOneSparql = "file:src/sparql/insert_datapoint_order_bar_one.sparql"; + private static final String insertDatapointOrderLastOneSparql = "file:src/sparql/insert_datapoint_order_last_one.sparql"; + private static final String insertSiatecVectorTableBNodesSparql = "file:src/sparql/insert_siatec_vector_table_bnodes.sparql"; + private static final String insertSetVClassificationSparql = "file:src/sparql/insert_set_v_classification.sparql"; + private static final String insertSetWClassificationSparql = "file:src/sparql/insert_set_w_classification.sparql"; + private static final String insertNewDimValsForVectorTableSparql = "file:src/sparql/insert_new_dimVals_for_vector_table.sparql"; + private static final String insertVectorTableDetailsSparql = "file:src/sparql/insert_vector_table_details.sparql"; + private static final String insertVteOrderBarOneSparql = "file:src/sparql/insert_vte_order_bar_one.sparql"; + private static final String insertVteOrderLastOneSparql = "file:src/sparql/insert_vte_order_last_one.sparql"; + private static final String insertDimensionEquivalenceSparql = "file:src/sparql/insert_dimension_equivalence.sparql"; + private static final String insertVteEquivalenceSparql = "file:src/sparql/insert_vte_equivalence.sparql"; + private static final String insertTecsSparql = "file:src/sparql/insert_tecs.sparql"; + private static final String insertTranslatedTecBnodesSparql = "file:src/sparql/insert_translated_tec_bnodes.sparql"; + private static final String insertCORRECTEquivalentTecDetailsSparql = "file:src/sparql/insert_correct_equivalent_tec_details.sparql"; + private static final String insertNewVectorsSparql = "file:src/sparql/insert_new_vectors.sparql"; + private static final String insertEqualVteSuperclassesSparql = "file:src/sparql/insert_equal_vte_superclasses.sparql"; + + // Ontology file + private static final String ontology = "file:src/rdf/siaTestDatapointOntology.n3"; +// private static final String ontology = "file:src/rdf/midiModel.n3"; + + // The final output file + private static final String finalModelFileName = "src/rdf/finalSiaModel"; + + // The HTTP sparql (sesame) services + private static final String sparqlQueryService = "http://localhost:8080/openrdf-workbench/repositories/sia4/query"; + private static final String sparqlUpdateService = "http://localhost:8080/openrdf-workbench/repositories/sia4/update"; + private static final String sparqlAddService = "http://localhost:8080/openrdf-workbench/repositories/sia4/add"; + private static final String sparqlRemoveService = "http://localhost:8080/openrdf-workbench/repositories/sia4/remove"; + private static final String sparqlClearService = "http://localhost:8080/openrdf-workbench/repositories/sia4/clear"; + + /** + * @param args + */ + public static void main(String[] args) { + + // Create a pellet ontology model + OntModel ontModel = ModelFactory + .createOntologyModel(PelletReasonerFactory.THE_SPEC); + +// // Create a jena ontology model +// OntModel ontModel = ModelFactory +// .createOntologyModel(); + + // Read the ontology and data from the file into the ontology model + ontModel.read(ontology, "N3"); + + //TODO Should probably check for and deal with duplicate vectors here + // (vectors must be unique - datapoints don't have to be) + +// DatasetAccessor sparqlAdd = DatasetAccessorFactory.createHTTP(sparqlAddService); + +// DatasetGraphAccessorHTTP datasetGraphAccessorHTTP = new DatasetGraphAccessorHTTP(sparqlAddService); + +// System.out.println("Load the model..."); +//// sparqlAdd.add(ontModel); +// datasetGraphAccessorHTTP.httpPut(ontModel.getGraph()); +// +// System.out.println("Final model..."); +// ResultSet rs = SparqlWrapperMethods.querySparqlService(selectAllQuery, sparqlQueryService); +// +// while (rs.hasNext()) +// { +// QuerySolution soln = rs.next(); +// System.out.println(soln.toString()); +// } +// +// System.exit(0); + + // Perform SPARQL inserts + System.out.println("Performing step 1..."); + UpdateAction.readExecute(insertDatapointOrderBarOneSparql, ontModel); + + System.out.println("Performing step 2..."); + UpdateAction.readExecute(insertDatapointOrderLastOneSparql, ontModel); + + System.out.println("Performing step 3..."); + UpdateAction.readExecute(insertSiatecVectorTableBNodesSparql, ontModel); + + System.out.println("Performing step 3a..."); + UpdateAction.readExecute(insertSetVClassificationSparql, ontModel); + + System.out.println("Performing step 3b..."); + UpdateAction.readExecute(insertSetWClassificationSparql, ontModel); + + System.out.println("Performing step (pre) 4..."); + UpdateAction.readExecute(insertNewDimValsForVectorTableSparql, ontModel); + +// System.out.println("Inserting new Vectors..."); +// UpdateAction.readExecute(insertNewVectorsSparql, ontModel); +// + System.out.println("Performing step 4..."); + UpdateAction.readExecute(insertVectorTableDetailsSparql, ontModel); + + System.out.println("Performing step 5..."); + long startTime = System.currentTimeMillis(); + UpdateAction.readExecute(insertVteOrderBarOneSparql, ontModel); + long endTime = System.currentTimeMillis(); + long elapsedTime = endTime - startTime; + System.out.println("Step 5 took " + elapsedTime + " ms"); + + System.out.println("Performing step 6..."); + UpdateAction.readExecute(insertVteOrderLastOneSparql, ontModel); + +// // Perform insertVectorTableDetailsSparql using the raw model, because +// // the pellet ontology model adds superfluous typed/untyped literals +// Model rawModel = ontModel.getRawModel(); +// +// System.out.println("Model after step 6..."); +//// SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel); +//// SparqlWrapperMethods.queryTheModel(selectAllQuery, rawModel); +// + System.out.println("Ordered vtes:"); + SparqlWrapperMethods.queryOntModel(selectOrderedVteInfoQuery, ontModel); +//// SparqlWrapperMethods.queryTheModel(selectOrderedVteInfoQuery, rawModel); +//// +//// +////// // Now recreate the pellet version of the model +////// ontModel = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC, rawModel); +//// +////// System.out.println("Performing step 7..."); +////// UpdateAction.readExecute(insertDimensionEquivalenceSparql, ontModel); +//// +////// System.out.println("Final model..."); +////// SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel); +//// +//// System.out.println("equivalent vtes..."); +//// SparqlWrapperMethods.queryOntModel(selectVteEquivalenceQuery, ontModel); + System.out.println("Performing step 7..."); + UpdateAction.readExecute(insertVteEquivalenceSparql, ontModel); + + System.out.println("Performing insertTecs..."); + UpdateAction.readExecute(insertTecsSparql, ontModel); + + System.out.println("Performing insertTranslatedTecBnodes..."); + UpdateAction.readExecute(insertTranslatedTecBnodesSparql, ontModel); + + System.out.println("Performing insertCORRECTEquivalentTecDetails..."); + UpdateAction.readExecute(insertCORRECTEquivalentTecDetailsSparql, ontModel); + +// +// System.out.println("Performing step 8..."); +// UpdateAction.readExecute(insertEqualVteSuperclassesSparql, ontModel); +// +// System.out.println("Final model..."); +// SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel); +//// System.out.println("ordered vtes..."); +//// SparqlWrapperMethods.queryOntModel(selectOrderedVteInfoQuery, ontModel); +// + System.out.println("Equal vtes..."); + SparqlWrapperMethods.queryOntModel(selectEqualVtesQuery, ontModel); +//// System.out.println("MTPs..."); +//// SparqlWrapperMethods.queryOntModel(selectMtpsQuery, ontModel); + System.out.println("Final model..."); + SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel); + +// // Write the model to a file +// File outFileRdf = new File(finalModelFileName + ".rdf"); +// File outFileN3 = new File(finalModelFileName + ".n3"); +// FileOutputStream outFileOutputStreamRdf; +// FileOutputStream outFileOutputStreamN3; +// +// // RDF/XML version +// try { +// outFileOutputStreamRdf = new FileOutputStream(outFileRdf); +// ontModel.writeAll(outFileOutputStreamRdf, "RDF/XML", null); +// } catch (FileNotFoundException e) { +// System.out.println("Unable to write to file: " +// + outFileRdf.getAbsolutePath()); +// e.printStackTrace(); +// System.exit(1); +// } +// +// // N3 version +// try { +// outFileOutputStreamN3 = new FileOutputStream(outFileN3); +// ontModel.writeAll(outFileOutputStreamN3, "N3", null); +// } catch (FileNotFoundException e) { +// System.out.println("Unable to write to file: " +// + outFileN3.getAbsolutePath()); +// e.printStackTrace(); +// System.exit(1); +// } +// +// System.out.println("Model written to files: " +// + outFileRdf.getAbsolutePath() + " and " + outFileN3.getAbsolutePath()); +// + } + +}