# HG changeset patch # User Chris Cannam # Date 1337371605 -3600 # Node ID 1bfb405490038530c195227eb61d32f3fe29da27 # Parent c789deb83bd48ac6704192a2bf71f15418693d61 Convert RDFImporter to Dataquay diff -r c789deb83bd4 -r 1bfb40549003 rdf/RDFImporter.cpp --- a/rdf/RDFImporter.cpp Fri May 18 14:45:15 2012 +0100 +++ b/rdf/RDFImporter.cpp Fri May 18 21:06:45 2012 +0100 @@ -21,8 +21,6 @@ #include #include -#include "SimpleSPARQLQuery.h" - #include "base/ProgressReporter.h" #include "base/RealTime.h" @@ -38,6 +36,17 @@ #include "data/fileio/CachedFile.h" #include "data/fileio/FileFinder.h" +#include +#include + +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::PropertyObject; + using std::cerr; using std::endl; @@ -55,6 +64,8 @@ std::vector getDataModels(ProgressReporter *); protected: + BasicStore *m_store; + QString m_uristring; QString m_errorString; std::map m_audioModelMap; @@ -62,9 +73,6 @@ std::map > m_labelValueMap; - static bool m_prefixesLoaded; - static void loadPrefixes(ProgressReporter *reporter); - void getDataModelsAudio(std::vector &, ProgressReporter *); void getDataModelsSparse(std::vector &, ProgressReporter *); void getDataModelsDense(std::vector &, ProgressReporter *); @@ -78,8 +86,6 @@ void fillModel(Model *, long, long, bool, std::vector &, QString); }; -bool RDFImporterImpl::m_prefixesLoaded = false; - QString RDFImporter::getKnownExtensions() { @@ -121,14 +127,24 @@ } RDFImporterImpl::RDFImporterImpl(QString uri, int sampleRate) : + m_store(new BasicStore), m_uristring(uri), m_sampleRate(sampleRate) { + //!!! retrieve data if remote... then + + m_store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + m_store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + m_store->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); + m_store->addPrefix("tl", Uri("http://purl.org/NET/c4dm/timeline.owl#")); + m_store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + m_store->addPrefix("rdfs", Uri("http://www.w3.org/2000/01/rdf-schema#")); + m_store->import(QUrl::fromLocalFile(uri), BasicStore::ImportIgnoreDuplicates); } RDFImporterImpl::~RDFImporterImpl() { - SimpleSPARQLQuery::closeSingleSource(m_uristring); + delete m_store; } bool @@ -146,8 +162,6 @@ std::vector RDFImporterImpl::getDataModels(ProgressReporter *reporter) { - loadPrefixes(reporter); - std::vector models; getDataModelsAudio(models, reporter); @@ -185,46 +199,31 @@ RDFImporterImpl::getDataModelsAudio(std::vector &models, ProgressReporter *reporter) { - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?source a mo:AudioFile . " - " ?signal a mo:Signal . " - " ?source mo:encodes ?signal } " - ) - .arg(m_uristring)); + Nodes sigs = m_store->match + (Triple(Node(), "a", m_store->expand("mo:Signal"))).a(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sig, sigs) { + + Node file = m_store->matchFirst(Triple(Node(), "mo:encodes", sig)).a; + if (file == Node()) { + file = m_store->matchFirst(Triple(sig, "mo:available_as", Node())).c; + } + if (file == Node()) { + std::cerr << "RDFImporterImpl::getDataModelsAudio: ERROR: No source for signal " << sig << std::endl; + continue; + } - if (results.empty()) { + QString signal = sig.value; + QString source = file.value; - SimpleSPARQLQuery query2 - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?source } " - ) - .arg(m_uristring)); - - results = query.execute(); - } - - for (int i = 0; i < (int)results.size(); ++i) { - - QString signal = results[i]["signal"].value; - QString source = results[i]["source"].value; - - SVDEBUG << "NOTE: Seeking signal source \"" << source << "\"..." << endl; + SVDEBUG << "NOTE: Seeking signal source \"" << source + << "\"..." << endl; FileSource *fs = new FileSource(source, reporter); if (fs->isAvailable()) { SVDEBUG << "NOTE: Source is available: Local filename is \"" - << fs->getLocalFilename() << "\"..." << endl; + << fs->getLocalFilename() + << "\"..." << endl; } #ifdef NO_SV_GUI @@ -235,7 +234,8 @@ } #else if (!fs->isAvailable()) { - SVDEBUG << "NOTE: Signal source \"" << source << "\" is not available, using file finder..." << endl; + SVDEBUG << "NOTE: Signal source \"" << source + << "\" is not available, using file finder..." << endl; FileFinder *ff = FileFinder::getInstance(); if (ff) { QString path = ff->find(FileFinder::AudioFile, @@ -284,44 +284,21 @@ reporter->setMessage(RDFImporter::tr("Importing dense signal data from RDF...")); } - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?feature ?feature_signal_type ?value " - " FROM <%1> " - - " WHERE { " - - " ?signal af:signal_feature ?feature . " - - " ?feature a ?feature_signal_type ; " - " af:value ?value . " - - " } " - ) - .arg(m_uristring)); + Nodes sigFeatures = m_store->match + (Triple(Node(), "af:signal_feature", Node())).c(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sf, sigFeatures) { - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } + if (sf.type != Node::URI && sf.type != Node::Blank) continue; + + Node t = m_store->matchFirst(Triple(sf, "a", Node())).c; + Node v = m_store->matchFirst(Triple(sf, "af:value", Node())).c; - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - for (int i = 0; i < (int)results.size(); ++i) { - - QString feature = results[i]["feature"].value; - QString type = results[i]["feature_signal_type"].value; - QString value = results[i]["value"].value; + QString feature = sf.value; + QString type = t.value; + QString value = v.value; + + if (type == "" || value == "") continue; int sampleRate = 0; int windowLength = 0; @@ -407,37 +384,21 @@ QString featureUri, QString featureTypeUri) { - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring); - - SimpleSPARQLQuery::Value v; + Node n = m_store->matchFirst + (Triple(Uri(featureUri), "dc:title", Node())).c; - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureUri), - "title"); - - if (v.value != "") { - SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << v.value << "\"" << endl; - m->setObjectName(v.value); + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureTypeUri), - "title"); - - if (v.value != "") { - SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << v.value << "\"" << endl; - m->setObjectName(v.value); + n = m_store->matchFirst + (Triple(Uri(featureTypeUri), "dc:title", Node())).c; + + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } @@ -449,91 +410,61 @@ int &sampleRate, int &windowLength, int &hopSize, int &width, int &height) { - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + Node dim = m_store->matchFirst + (Triple(Uri(featureUri), "af:dimensions", Node())).c; - QString dimensionsQuery - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?dimensions " - " FROM <%1> " + cerr << "Dimensions = \"" << dim.value << "\"" << endl; - " WHERE { " - - " <%2> af:dimensions ?dimensions . " - - " } " - ); - - SimpleSPARQLQuery::Value dimensionsValue = - SimpleSPARQLQuery::singleResultQuery - (s, dimensionsQuery.arg(m_uristring).arg(featureUri), "dimensions"); - - cerr << "Dimensions = \"" << dimensionsValue.value << "\"" - << endl; - - if (dimensionsValue.value != "") { - QStringList dl = dimensionsValue.value.split(" "); - if (dl.empty()) dl.push_back(dimensionsValue.value); + if (dim.type == Node::Literal && dim.value != "") { + QStringList dl = dim.value.split(" "); + if (dl.empty()) dl.push_back(dim.value); if (dl.size() > 0) height = dl[0].toInt(); if (dl.size() > 1) width = dl[1].toInt(); } + + // Looking for rate, hop, window from: + // + // ?feature mo:time ?time . + // ?time a tl:Interval . + // ?time tl:onTimeLine ?timeline . + // ?map tl:rangeTimeLine ?timeline . + // ?map tl:sampleRate ?rate . + // ?map tl:hopSize ?hop . + // ?map tl:windowLength ?window . - QString queryTemplate - ( - " PREFIX mo: " - " PREFIX af: " - " PREFIX tl: " + Node interval = m_store->matchFirst(Triple(Uri(featureUri), "mo:time", Node())).c; - " SELECT ?%3 " - " FROM <%1> " - - " WHERE { " - - " <%2> mo:time ?time . " - - " ?time a tl:Interval ; " - " tl:onTimeLine ?timeline . " - - " ?map tl:rangeTimeLine ?timeline . " - - " ?map tl:%3 ?%3 . " - - " } " - ); - - // Another laborious workaround for rasqal's failure to handle - // multiple optionals properly - - SimpleSPARQLQuery::Value srValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("sampleRate"), - "sampleRate"); - if (srValue.value != "") { - sampleRate = srValue.value.toInt(); + if (!m_store->contains(Triple(interval, "a", m_store->expand("tl:Interval")))) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Feature time node " + << interval << " is not a tl:Interval" << endl; + return; } - SimpleSPARQLQuery::Value hopValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("hopSize"), - "hopSize"); - if (srValue.value != "") { - hopSize = hopValue.value.toInt(); + Node tl = m_store->matchFirst(Triple(interval, "tl:onTimeLine", Node())).c; + + if (tl == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Interval node " + << interval << " lacks tl:onTimeLine property" << endl; + return; } - SimpleSPARQLQuery::Value winValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("windowLength"), - "windowLength"); - if (winValue.value != "") { - windowLength = winValue.value.toInt(); + Node map = m_store->matchFirst(Triple(Node(), "tl:rangeTimeLine", tl)).a; + + if (map == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: No map for " + << "timeline node " << tl << endl; + } + + PropertyObject po(m_store, "tl:", map); + + if (po.hasProperty("sampleRate")) { + sampleRate = po.getProperty("sampleRate").toInt(); + } + if (po.hasProperty("hopSize")) { + hopSize = po.getProperty("hopSize").toInt(); + } + if (po.hasProperty("windowLength")) { + windowLength = po.getProperty("windowLength").toInt(); } cerr << "sr = " << sampleRate << ", hop = " << hopSize << ", win = " << windowLength << endl; @@ -547,227 +478,139 @@ reporter->setMessage(RDFImporter::tr("Importing event data from RDF...")); } - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + /* + This function is only used for sparse data (for dense data we + would be in getDataModelsDense instead). - // Our query is intended to retrieve every thing that has a time, - // and every feature type and value associated with a thing that - // has a time. + Our query is intended to retrieve every thing that has a time, + and every feature type and value associated with a thing that + has a time. - // We will then need to refine this big bag of results into a set - // of data models. + We will then need to refine this big bag of results into a set + of data models. - // Results that have different source signals should go into - // different models. + Results that have different source signals should go into + different models. - // Results that have different feature types should go into - // different models. + Results that have different feature types should go into + different models. + */ - // Results that are sparse should go into different models from - // those that are dense (we need to examine the timestamps to - // establish this -- if the timestamps are regular, the results - // are dense -- so we can't do it as we go along, only after - // collecting all results). - - // Timed things that have features associated with them should not - // appear directly in any model -- their features should appear - // instead -- and these should be different models from those used - // for timed things that do not have features. - - // As we load the results, we'll push them into a partially - // structured container that maps from source signal (URI as - // string) -> feature type (likewise) -> time -> list of values. - // If the source signal or feature type is unavailable, the empty - // string will do. - - QString prefixes = QString( - " PREFIX event: " - " PREFIX tl: " - " PREFIX mo: " - " PREFIX af: " - " PREFIX rdfs: " - ); - - QString queryString = prefixes + QString( - - " SELECT ?signal ?timed_thing ?timeline ?event_type ?value" - " FROM <%1>" - - " WHERE {" - - " ?signal a mo:Signal ." - - " ?signal mo:time ?interval ." - " ?interval tl:onTimeLine ?timeline ." - " ?time tl:onTimeLine ?timeline ." - " ?timed_thing event:time ?time ." - " ?timed_thing a ?event_type ." - - " OPTIONAL {" - " ?timed_thing af:feature ?value" - " }" - " }" - - ).arg(m_uristring); - - //!!! NB we're using rather old terminology for these things, apparently: - // beginsAt -> start - // onTimeLine -> timeline - - QString timeQueryString = prefixes + QString( - - " SELECT ?time FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:at ?time . " - " } " - - ).arg(m_uristring); - - QString rangeQueryString = prefixes + QString( - - " SELECT ?time ?duration FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:beginsAt ?time . " - " ?t tl:duration ?duration . " - " } " - - ).arg(m_uristring); - - QString labelQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> rdfs:label ?label . " - " } " - - ).arg(m_uristring); - - QString textQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> af:text ?label . " - " } " - - ).arg(m_uristring); - - SimpleSPARQLQuery query(s, queryString); - query.setProgressReporter(reporter); - -// cerr << "Query will be: " << queryString << endl; - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } - - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - /* - This function is now only used for sparse data (for dense data - we would be in getDataModelsDense instead). - - For sparse data, the determining factors in deciding what model - to use are: Do the features have values? and Do the features - have duration? - - We can run through the results and check off whether we find - values and duration for each of the source+type keys, and then - run through the source+type keys pushing each of the results - into a suitable model. - - Unfortunately, at this point we do not yet have any actual - timing data (time/duration) -- just the time URI. - - What we _could_ do is to create one of each type of model at the - start, for each of the source+type keys, and then push each - feature into the relevant model depending on what we find out - about it. Then return only non-empty models. - */ + Nodes sigs = m_store->match + (Triple(Node(), "a", m_store->expand("mo:Signal"))).a(); // Map from timeline uri to event type to dimensionality to // presence of duration to model ptr. Whee! std::map > > > modelMap; - for (int i = 0; i < (int)results.size(); ++i) { + foreach (Node sig, sigs) { + + Node interval = m_store->matchFirst(Triple(sig, "mo:time", Node())).c; + if (interval == Node()) continue; - if (i % 4 == 0) { - if (reporter) reporter->setProgress(i/4); - } + Node tl = m_store->matchFirst(Triple(interval, "tl:onTimeLine", Node())).c; + if (tl == Node()) continue; - QString source = results[i]["signal"].value; - QString timeline = results[i]["timeline"].value; - QString type = results[i]["event_type"].value; - QString thinguri = results[i]["timed_thing"].value; + Nodes times = m_store->match(Triple(Node(), "tl:onTimeLine", tl)).a(); - RealTime time; - RealTime duration; + foreach (Node tn, times) { + + Nodes timedThings = m_store->match(Triple(Node(), "event:time", tn)).a(); - bool haveTime = false; - bool haveDuration = false; + foreach (Node thing, timedThings) { + + Node typ = m_store->matchFirst(Triple(thing, "a", Node())).c; + if (typ == Node()) continue; - QString label = ""; - bool text = (type.contains("Text") || type.contains("text")); // Ha, ha - bool note = (type.contains("Note") || type.contains("note")); // Guffaw + Node valu = m_store->matchFirst(Triple(thing, "af:feature", Node())).c; - if (text) { - label = SimpleSPARQLQuery::singleResultQuery - (s, textQueryString.arg(thinguri), "label").value; - } + QString source = sig.value; + QString timeline = tl.value; + QString type = typ.value; + QString thinguri = thing.value; - if (label == "") { - label = SimpleSPARQLQuery::singleResultQuery - (s, labelQueryString.arg(thinguri), "label").value; - } + /* + For sparse data, the determining factors in deciding + what model to use are: Do the features have values? + and Do the features have duration? - SimpleSPARQLQuery rangeQuery(s, rangeQueryString.arg(thinguri)); - SimpleSPARQLQuery::ResultList rangeResults = rangeQuery.execute(); - if (!rangeResults.empty()) { -// std::cerr << rangeResults.size() << " range results" << std::endl; - time = RealTime::fromXsdDuration - (rangeResults[0]["time"].value.toStdString()); - duration = RealTime::fromXsdDuration - (rangeResults[0]["duration"].value.toStdString()); -// std::cerr << "duration string " << rangeResults[0]["duration"].value << std::endl; - haveTime = true; - haveDuration = true; - } else { - QString timestring = SimpleSPARQLQuery::singleResultQuery - (s, timeQueryString.arg(thinguri), "time").value; -// SVDEBUG << "timestring = " << timestring << endl; - if (timestring != "") { - time = RealTime::fromXsdDuration(timestring.toStdString()); - haveTime = true; - } - } + We can run through the results and check off whether + we find values and duration for each of the + source+type keys, and then run through the + source+type keys pushing each of the results into a + suitable model. - QString valuestring = results[i]["value"].value; - std::vector values; + Unfortunately, at this point we do not yet have any + actual timing data (time/duration) -- just the time + URI. - if (valuestring != "") { - QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); - for (int j = 0; j < vsl.size(); ++j) { - bool success = false; - float v = vsl[j].toFloat(&success); - if (success) values.push_back(v); - } - } + What we _could_ do is to create one of each type of + model at the start, for each of the source+type + keys, and then push each feature into the relevant + model depending on what we find out about it. Then + return only non-empty models. + */ - int dimensions = 1; - if (values.size() == 1) dimensions = 2; - else if (values.size() > 1) dimensions = 3; + QString label = ""; + bool text = (type.contains("Text") || type.contains("text")); // Ha, ha + bool note = (type.contains("Note") || type.contains("note")); // Guffaw - Model *model = 0; + if (text) { + label = m_store->matchFirst(Triple(thing, "af:text", Node())).c.value; + } + + if (label == "") { + label = m_store->matchFirst(Triple(thing, "rdfs:label", Node())).c.value; + } - if (modelMap[timeline][type][dimensions].find(haveDuration) == - modelMap[timeline][type][dimensions].end()) { + RealTime time; + RealTime duration; + + bool haveTime = false; + bool haveDuration = false; + + Node at = m_store->matchFirst(Triple(tn, "tl:at", Node())).c; + + if (at != Node()) { + time = RealTime::fromXsdDuration(at.value.toStdString()); + haveTime = true; + } else { + //!!! NB we're using rather old terminology for these things, apparently: + // beginsAt -> start + // onTimeLine -> timeline + + Node start = m_store->matchFirst(Triple(tn, "tl:beginsAt", Node())).c; + Node dur = m_store->matchFirst(Triple(tn, "tl:duration", Node())).c; + if (start != Node() && dur != Node()) { + time = RealTime::fromXsdDuration + (start.value.toStdString()); + duration = RealTime::fromXsdDuration + (dur.value.toStdString()); + haveTime = haveDuration = true; + } + } + + QString valuestring = valu.value; + std::vector values; + + if (valuestring != "") { + QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); + for (int j = 0; j < vsl.size(); ++j) { + bool success = false; + float v = vsl[j].toFloat(&success); + if (success) values.push_back(v); + } + } + + int dimensions = 1; + if (values.size() == 1) dimensions = 2; + else if (values.size() > 1) dimensions = 3; + + Model *model = 0; + + if (modelMap[timeline][type][dimensions].find(haveDuration) == + modelMap[timeline][type][dimensions].end()) { /* SVDEBUG << "Creating new model: source = " << source << ", type = " << type << ", dimensions = " @@ -776,92 +619,69 @@ << endl; */ - if (!haveDuration) { + if (!haveDuration) { - if (dimensions == 1) { + if (dimensions == 1) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + } + } else if (dimensions == 2) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseTimeValueModel(m_sampleRate, 1, false); + } + } else { + // We don't have a three-dimensional sparse model, + // so use a note model. We do have some logic (in + // extractStructure below) for guessing whether + // this should after all have been a dense model, + // but it's hard to apply it because we don't have + // all the necessary timing data yet... hmm + model = new NoteModel(m_sampleRate, 1, false); + } - if (text) { - - model = new TextModel(m_sampleRate, 1, false); + } else { // haveDuration - } else { - - model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + if (note || (dimensions > 2)) { + model = new NoteModel(m_sampleRate, 1, false); + } else { + // If our units are frequency or midi pitch, we + // should be using a note model... hm + model = new RegionModel(m_sampleRate, 1, false); + } } - } else if (dimensions == 2) { + model->setRDFTypeURI(type); - if (text) { - - model = new TextModel(m_sampleRate, 1, false); - - } else { - - model = new SparseTimeValueModel(m_sampleRate, 1, false); + if (m_audioModelMap.find(source) != m_audioModelMap.end()) { + std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; + model->setSourceModel(m_audioModelMap[source]); } - } else { + QString title = m_store->matchFirst + (Triple(typ, "dc:title", Node())).a.value; + if (title == "") { + // take it from the end of the event type + title = type; + title.replace(QRegExp("^.*[/#]"), ""); + } + model->setObjectName(title); - // We don't have a three-dimensional sparse model, - // so use a note model. We do have some logic (in - // extractStructure below) for guessing whether - // this should after all have been a dense model, - // but it's hard to apply it because we don't have - // all the necessary timing data yet... hmm - - model = new NoteModel(m_sampleRate, 1, false); + modelMap[timeline][type][dimensions][haveDuration] = model; + models.push_back(model); } - } else { // haveDuration + model = modelMap[timeline][type][dimensions][haveDuration]; - if (note || (dimensions > 2)) { - - model = new NoteModel(m_sampleRate, 1, false); - - } else { - - // If our units are frequency or midi pitch, we - // should be using a note model... hm - - model = new RegionModel(m_sampleRate, 1, false); + if (model) { + long ftime = RealTime::realTime2Frame(time, m_sampleRate); + long fduration = RealTime::realTime2Frame(duration, m_sampleRate); + fillModel(model, ftime, fduration, haveDuration, values, label); } } - - model->setRDFTypeURI(type); - - if (m_audioModelMap.find(source) != m_audioModelMap.end()) { - std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; - model->setSourceModel(m_audioModelMap[source]); - } - - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring).arg(type); - QString title = SimpleSPARQLQuery::singleResultQuery - (s, titleQuery, "title").value; - if (title == "") { - // take it from the end of the event type - title = type; - title.replace(QRegExp("^.*[/#]"), ""); - } - model->setObjectName(title); - - modelMap[timeline][type][dimensions][haveDuration] = model; - models.push_back(model); - } - - model = modelMap[timeline][type][dimensions][haveDuration]; - - if (model) { - long ftime = RealTime::realTime2Frame(time, m_sampleRate); - long fduration = RealTime::realTime2Frame(duration, m_sampleRate); - fillModel(model, ftime, fduration, haveDuration, values, label); } } } @@ -975,33 +795,33 @@ { bool haveAudio = false; bool haveAnnotations = false; + bool haveRDF = false; - // This query is not expected to return any values, but if it - // executes successfully (leaving no error in the error string) - // then we know we have RDF - SimpleSPARQLQuery q(SimpleSPARQLQuery::QueryFromSingleSource, - QString(" SELECT ?x FROM <%1> WHERE { ?x } ") - .arg(url)); - - SimpleSPARQLQuery::ResultList r = q.execute(); - if (!q.isOK()) { - SimpleSPARQLQuery::closeSingleSource(url); + BasicStore *store = 0; + + // This is not expected to return anything useful, but if it does + // anything at all then we know we have RDF + try { + //!!! non-local document? + store = BasicStore::load(QUrl(url)); + Triple t = store->matchFirst(Triple()); + if (t != Triple()) haveRDF = true; + } catch (...) { + } + + if (!haveRDF) { + delete store; return NotRDF; } + store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + // "MO-conformant" structure for audio files - SimpleSPARQLQuery::Value value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?url a mo:AudioFile } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + Node n = store->matchFirst(Triple(Node(), "a", store->expand("mo:AudioFile"))).a; + if (n != Node() && n.type == Node::URI) { haveAudio = true; @@ -1011,51 +831,27 @@ // (which is not properly in conformance with the Music // Ontology) - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?url } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { - haveAudio = true; + Nodes sigs = store->match(Triple(Node(), "a", store->expand("mo:Signal"))).a(); + foreach (Node sig, sigs) { + Node aa = store->matchFirst(Triple(sig, "mo:available_as", Node())).c; + if (aa != Node()) { + haveAudio = true; + break; + } } } SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " << haveAudio << endl; - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX event: " - " SELECT ?thing FROM <%1> " - " WHERE { ?thing event:time ?time } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + n = store->matchFirst(Triple(Node(), "event:time", Node())).a; + if (n != Node()) { haveAnnotations = true; } if (!haveAnnotations) { - - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX af: " - " SELECT ?thing FROM <%1> " - " WHERE { ?signal af:signal_feature ?thing } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + n = store->matchFirst(Triple(Node(), "af:signal_feature", Node())).a; + if (n != Node()) { haveAnnotations = true; } } @@ -1063,7 +859,7 @@ SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " << haveAnnotations << endl; - SimpleSPARQLQuery::closeSingleSource(url); + delete store; if (haveAudio) { if (haveAnnotations) { @@ -1082,25 +878,3 @@ return OtherRDFDocument; } -void -RDFImporterImpl::loadPrefixes(ProgressReporter *reporter) -{ - return; -//!!! - if (m_prefixesLoaded) return; - const char *prefixes[] = { - "http://purl.org/NET/c4dm/event.owl", - "http://purl.org/NET/c4dm/timeline.owl", - "http://purl.org/ontology/mo/", - "http://purl.org/ontology/af/", - "http://www.w3.org/2000/01/rdf-schema", - "http://purl.org/dc/elements/1.1/", - }; - for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { - CachedFile cf(prefixes[i], reporter, "application/rdf+xml"); - if (!cf.isOK()) continue; - SimpleSPARQLQuery::addSourceToModel - (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); - } - m_prefixesLoaded = true; -}