view src/org/qmul/eecs/c4dm/sia/SiaSemWeb.java @ 94:fedf516f6a78 tip

test file
author stevenh
date Mon, 29 Dec 2014 15:16:22 +0000
parents c544f8a666fa
children
line wrap: on
line source
package org.qmul.eecs.c4dm.sia;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;

import org.apache.jena.web.DatasetGraphAccessorHTTP;
import org.mindswap.pellet.jena.PelletReasonerFactory;
import org.qmul.eecs.c4dm.sparql.utilities.SparqlWrapperMethods;

import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.tdb.TDBFactory;
import com.hp.hpl.jena.update.UpdateAction;

public class SiaSemWeb {

	private static final String selectAllQuery = "file:src/sparql/select_all.sparql";
	private static final String selectEqualVtesQuery = "file:src/sparql/select_equal_vtes.sparql";

	// SPARQL updates
	private static final String insertDatapointOrderBarOneSparql = "file:src/sparql/insert_datapoint_order_bar_one.sparql";
	private static final String insertDatapointOrderLastOneSparql = "file:src/sparql/insert_datapoint_order_last_one.sparql";
	private static final String insertSiatecVectorTableBNodesSparql = "file:src/sparql/insert_siatec_vector_table_bnodes.sparql";
	private static final String insertSetVClassificationSparql = "file:src/sparql/insert_set_v_classification.sparql";
	private static final String insertSetWClassificationSparql = "file:src/sparql/insert_set_w_classification.sparql";
	private static final String insertNewDimValsForVectorTableSparql = "file:src/sparql/insert_new_dimVals_for_vector_table.sparql";
	private static final String insertVectorTableDetailsSparql = "file:src/sparql/insert_vector_table_details.sparql";
	private static final String insertVteOrderBarOneSparql = "file:src/sparql/insert_vte_order_bar_one.sparql";
	private static final String insertVteOrderLastOneSparql = "file:src/sparql/insert_vte_order_last_one.sparql";
	private static final String insertVteEquivalenceSparql = "file:src/sparql/insert_vte_equivalence.sparql";
	private static final String insertTecsSparql = "file:src/sparql/insert_tecs.sparql";
	private static final String insertTranslatedTecBnodesSparql = "file:src/sparql/insert_translated_tec_bnodes.sparql";
	private static final String insertCORRECTEquivalentTecDetailsSparql = "file:src/sparql/insert_correct_equivalent_tec_details.sparql";

	// Ontology file
	private static final String ontology = "file:src/rdf/siaTestDatapointOntology.n3";
//	private static final String ontology = "file:src/rdf/midiModel.n3";
	
	/**
	 * @param args
	 */
	public static void main(String[] args) {
		
		// Create a pellet ontology model
		OntModel ontModel = ModelFactory
				.createOntologyModel(PelletReasonerFactory.THE_SPEC);
		
		// Read the ontology and data from the file into the ontology model
		ontModel.read(ontology, "N3");
		
		//TODO Should probably check for and deal with duplicate vectors here
		// (vectors must be unique - datapoints don't have to be)
				
//		DatasetAccessor sparqlAdd = DatasetAccessorFactory.createHTTP(sparqlAddService);
		
//		DatasetGraphAccessorHTTP datasetGraphAccessorHTTP = new DatasetGraphAccessorHTTP(sparqlAddService);
		
//		System.out.println("Load the model...");
////		sparqlAdd.add(ontModel);
//		datasetGraphAccessorHTTP.httpPut(ontModel.getGraph());
//
//		System.out.println("Final model...");
//		ResultSet rs = SparqlWrapperMethods.querySparqlService(selectAllQuery, sparqlQueryService);
//		
//		while (rs.hasNext())
//		{
//			QuerySolution soln = rs.next();
//			System.out.println(soln.toString());
//		}
//		
//		System.exit(0);

		// Perform SPARQL inserts
    	System.out.println("Performing step 1...");
    	UpdateAction.readExecute(insertDatapointOrderBarOneSparql, ontModel);
    			
    	System.out.println("Performing step 2...");
		UpdateAction.readExecute(insertDatapointOrderLastOneSparql, ontModel);
		
		System.out.println("Performing step 3...");
    	UpdateAction.readExecute(insertSiatecVectorTableBNodesSparql, ontModel);

    	System.out.println("Performing step 3a...");
    	UpdateAction.readExecute(insertSetVClassificationSparql, ontModel);
    	
    	System.out.println("Performing step 3b...");
    	UpdateAction.readExecute(insertSetWClassificationSparql, ontModel);
    	
		System.out.println("Performing step (pre) 4...");
    	UpdateAction.readExecute(insertNewDimValsForVectorTableSparql, ontModel);
    	
//		System.out.println("Inserting new Vectors...");
//    	UpdateAction.readExecute(insertNewVectorsSparql, ontModel);
//
		System.out.println("Performing step 4...");
    	UpdateAction.readExecute(insertVectorTableDetailsSparql, ontModel);

    	System.out.println("Performing step 5...");
    	long startTime = System.currentTimeMillis();
    	UpdateAction.readExecute(insertVteOrderBarOneSparql, ontModel);
    	long endTime = System.currentTimeMillis();
    	long elapsedTime = endTime - startTime;
    	System.out.println("Step 5 took " + elapsedTime + " ms");

    	System.out.println("Performing step 6...");
    	UpdateAction.readExecute(insertVteOrderLastOneSparql, ontModel);

//    	// Perform insertVectorTableDetailsSparql using the raw model, because
//    	// the pellet ontology model adds superfluous typed/untyped literals
//    	Model rawModel = ontModel.getRawModel();
//
//    	System.out.println("Model after step 6...");
////		SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel);
////		SparqlWrapperMethods.queryTheModel(selectAllQuery, rawModel);
//		
//////    	// Now recreate the pellet version of the model
//////    	ontModel = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC, rawModel);
////
//////    	System.out.println("Performing step 7...");
//////    	UpdateAction.readExecute(insertDimensionEquivalenceSparql, ontModel);
////
//////    	System.out.println("Final model...");
//////		SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel);
////
////		System.out.println("equivalent vtes...");
////		SparqlWrapperMethods.queryOntModel(selectVteEquivalenceQuery, ontModel);		
		System.out.println("Performing step 7...");
		UpdateAction.readExecute(insertVteEquivalenceSparql, ontModel);

		System.out.println("Performing insertTecs...");
		UpdateAction.readExecute(insertTecsSparql, ontModel);
		
		System.out.println("Performing insertTranslatedTecBnodes...");
		UpdateAction.readExecute(insertTranslatedTecBnodesSparql, ontModel);
		
		System.out.println("Performing insertCORRECTEquivalentTecDetails...");
		UpdateAction.readExecute(insertCORRECTEquivalentTecDetailsSparql, ontModel);
		
//
//		System.out.println("Performing step 8...");
//		UpdateAction.readExecute(insertEqualVteSuperclassesSparql, ontModel);
//
//		System.out.println("Final model...");
//		SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel);
////		System.out.println("ordered vtes...");
////		SparqlWrapperMethods.queryOntModel(selectOrderedVteInfoQuery, ontModel);
//
		System.out.println("Equal vtes...");
		SparqlWrapperMethods.queryOntModel(selectEqualVtesQuery, ontModel);
////		System.out.println("MTPs...");
////		SparqlWrapperMethods.queryOntModel(selectMtpsQuery, ontModel);
		System.out.println("Final model...");
		SparqlWrapperMethods.queryOntModel(selectAllQuery, ontModel);

//		// Write the model to a file
//		File outFileRdf = new File(finalModelFileName  + ".rdf");
//		File outFileN3 = new File(finalModelFileName + ".n3");
//		FileOutputStream outFileOutputStreamRdf;
//		FileOutputStream outFileOutputStreamN3;
//
//		// RDF/XML version
//		try {
//			outFileOutputStreamRdf = new FileOutputStream(outFileRdf);
//			ontModel.writeAll(outFileOutputStreamRdf, "RDF/XML", null);
//		} catch (FileNotFoundException e) {
//			System.out.println("Unable to write to file: "
//					+ outFileRdf.getAbsolutePath());
//			e.printStackTrace();
//			System.exit(1);
//		}
//
//		// N3 version
//		try {
//			outFileOutputStreamN3 = new FileOutputStream(outFileN3);
//			ontModel.writeAll(outFileOutputStreamN3, "N3", null);
//		} catch (FileNotFoundException e) {
//			System.out.println("Unable to write to file: "
//					+ outFileN3.getAbsolutePath());
//			e.printStackTrace();
//			System.exit(1);
//		}
//
//		System.out.println("Model written to files: "
//				+ outFileRdf.getAbsolutePath() + " and " + outFileN3.getAbsolutePath());
//
	}

}