Mercurial > hg > svcore
comparison rdf/RDFImporter.cpp @ 742:c10cb8782576 coreaudio_tests
Merge from branch "default"
| author | Chris Cannam |
|---|---|
| date | Sun, 01 Jul 2012 11:53:00 +0100 |
| parents | 547b03533375 |
| children | e802e550a1f2 |
comparison
equal
deleted
inserted
replaced
| 666:4efa7429cd85 | 742:c10cb8782576 |
|---|---|
| 2 | 2 |
| 3 /* | 3 /* |
| 4 Sonic Visualiser | 4 Sonic Visualiser |
| 5 An audio file viewer and annotation editor. | 5 An audio file viewer and annotation editor. |
| 6 Centre for Digital Music, Queen Mary, University of London. | 6 Centre for Digital Music, Queen Mary, University of London. |
| 7 This file copyright 2008 QMUL. | 7 This file copyright 2008-2012 QMUL. |
| 8 | 8 |
| 9 This program is free software; you can redistribute it and/or | 9 This program is free software; you can redistribute it and/or |
| 10 modify it under the terms of the GNU General Public License as | 10 modify it under the terms of the GNU General Public License as |
| 11 published by the Free Software Foundation; either version 2 of the | 11 published by the Free Software Foundation; either version 2 of the |
| 12 License, or (at your option) any later version. See the file | 12 License, or (at your option) any later version. See the file |
| 19 #include <vector> | 19 #include <vector> |
| 20 | 20 |
| 21 #include <iostream> | 21 #include <iostream> |
| 22 #include <cmath> | 22 #include <cmath> |
| 23 | 23 |
| 24 #include "SimpleSPARQLQuery.h" | |
| 25 | |
| 26 #include "base/ProgressReporter.h" | 24 #include "base/ProgressReporter.h" |
| 27 #include "base/RealTime.h" | 25 #include "base/RealTime.h" |
| 28 | 26 |
| 29 #include "data/model/SparseOneDimensionalModel.h" | 27 #include "data/model/SparseOneDimensionalModel.h" |
| 30 #include "data/model/SparseTimeValueModel.h" | 28 #include "data/model/SparseTimeValueModel.h" |
| 36 | 34 |
| 37 #include "data/fileio/FileSource.h" | 35 #include "data/fileio/FileSource.h" |
| 38 #include "data/fileio/CachedFile.h" | 36 #include "data/fileio/CachedFile.h" |
| 39 #include "data/fileio/FileFinder.h" | 37 #include "data/fileio/FileFinder.h" |
| 40 | 38 |
| 39 #include <dataquay/BasicStore.h> | |
| 40 #include <dataquay/PropertyObject.h> | |
| 41 | |
| 42 using Dataquay::Uri; | |
| 43 using Dataquay::Node; | |
| 44 using Dataquay::Nodes; | |
| 45 using Dataquay::Triple; | |
| 46 using Dataquay::Triples; | |
| 47 using Dataquay::BasicStore; | |
| 48 using Dataquay::PropertyObject; | |
| 49 | |
| 41 using std::cerr; | 50 using std::cerr; |
| 42 using std::endl; | 51 using std::endl; |
| 43 | 52 |
| 44 class RDFImporterImpl | 53 class RDFImporterImpl |
| 45 { | 54 { |
| 53 QString getErrorString() const; | 62 QString getErrorString() const; |
| 54 | 63 |
| 55 std::vector<Model *> getDataModels(ProgressReporter *); | 64 std::vector<Model *> getDataModels(ProgressReporter *); |
| 56 | 65 |
| 57 protected: | 66 protected: |
| 67 BasicStore *m_store; | |
| 68 Uri expand(QString s) { return m_store->expand(s); } | |
| 69 | |
| 58 QString m_uristring; | 70 QString m_uristring; |
| 59 QString m_errorString; | 71 QString m_errorString; |
| 60 std::map<QString, Model *> m_audioModelMap; | 72 std::map<QString, Model *> m_audioModelMap; |
| 61 int m_sampleRate; | 73 int m_sampleRate; |
| 62 | 74 |
| 63 std::map<Model *, std::map<QString, float> > m_labelValueMap; | 75 std::map<Model *, std::map<QString, float> > m_labelValueMap; |
| 64 | 76 |
| 65 static bool m_prefixesLoaded; | |
| 66 static void loadPrefixes(ProgressReporter *reporter); | |
| 67 | |
| 68 void getDataModelsAudio(std::vector<Model *> &, ProgressReporter *); | 77 void getDataModelsAudio(std::vector<Model *> &, ProgressReporter *); |
| 69 void getDataModelsSparse(std::vector<Model *> &, ProgressReporter *); | 78 void getDataModelsSparse(std::vector<Model *> &, ProgressReporter *); |
| 70 void getDataModelsDense(std::vector<Model *> &, ProgressReporter *); | 79 void getDataModelsDense(std::vector<Model *> &, ProgressReporter *); |
| 71 | 80 |
| 72 void getDenseModelTitle(Model *, QString, QString); | 81 void getDenseModelTitle(Model *, QString, QString); |
| 76 int &hopSize, int &width, int &height); | 85 int &hopSize, int &width, int &height); |
| 77 | 86 |
| 78 void fillModel(Model *, long, long, bool, std::vector<float> &, QString); | 87 void fillModel(Model *, long, long, bool, std::vector<float> &, QString); |
| 79 }; | 88 }; |
| 80 | 89 |
| 81 bool RDFImporterImpl::m_prefixesLoaded = false; | |
| 82 | |
| 83 QString | 90 QString |
| 84 RDFImporter::getKnownExtensions() | 91 RDFImporter::getKnownExtensions() |
| 85 { | 92 { |
| 86 return "*.rdf *.n3 *.ttl"; | 93 return "*.rdf *.n3 *.ttl"; |
| 87 } | 94 } |
| 119 { | 126 { |
| 120 return m_d->getDataModels(r); | 127 return m_d->getDataModels(r); |
| 121 } | 128 } |
| 122 | 129 |
| 123 RDFImporterImpl::RDFImporterImpl(QString uri, int sampleRate) : | 130 RDFImporterImpl::RDFImporterImpl(QString uri, int sampleRate) : |
| 131 m_store(new BasicStore), | |
| 124 m_uristring(uri), | 132 m_uristring(uri), |
| 125 m_sampleRate(sampleRate) | 133 m_sampleRate(sampleRate) |
| 126 { | 134 { |
| 135 //!!! retrieve data if remote... then | |
| 136 | |
| 137 m_store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); | |
| 138 m_store->addPrefix("af", Uri("http://purl.org/ontology/af/")); | |
| 139 m_store->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); | |
| 140 m_store->addPrefix("tl", Uri("http://purl.org/NET/c4dm/timeline.owl#")); | |
| 141 m_store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); | |
| 142 m_store->addPrefix("rdfs", Uri("http://www.w3.org/2000/01/rdf-schema#")); | |
| 143 | |
| 144 try { | |
| 145 QUrl url; | |
| 146 if (uri.startsWith("file:")) { | |
| 147 url = QUrl(uri); | |
| 148 } else { | |
| 149 url = QUrl::fromLocalFile(uri); | |
| 150 } | |
| 151 m_store->import(url, BasicStore::ImportIgnoreDuplicates); | |
| 152 } catch (std::exception &e) { | |
| 153 m_errorString = e.what(); | |
| 154 } | |
| 127 } | 155 } |
| 128 | 156 |
| 129 RDFImporterImpl::~RDFImporterImpl() | 157 RDFImporterImpl::~RDFImporterImpl() |
| 130 { | 158 { |
| 131 SimpleSPARQLQuery::closeSingleSource(m_uristring); | 159 delete m_store; |
| 132 } | 160 } |
| 133 | 161 |
| 134 bool | 162 bool |
| 135 RDFImporterImpl::isOK() | 163 RDFImporterImpl::isOK() |
| 136 { | 164 { |
| 144 } | 172 } |
| 145 | 173 |
| 146 std::vector<Model *> | 174 std::vector<Model *> |
| 147 RDFImporterImpl::getDataModels(ProgressReporter *reporter) | 175 RDFImporterImpl::getDataModels(ProgressReporter *reporter) |
| 148 { | 176 { |
| 149 loadPrefixes(reporter); | |
| 150 | |
| 151 std::vector<Model *> models; | 177 std::vector<Model *> models; |
| 152 | 178 |
| 153 getDataModelsAudio(models, reporter); | 179 getDataModelsAudio(models, reporter); |
| 154 | 180 |
| 155 if (m_sampleRate == 0) { | 181 if (m_sampleRate == 0) { |
| 156 m_errorString = QString("Invalid audio data model (is audio file format supported?)"); | 182 m_errorString = QString("Invalid audio data model (is audio file format supported?)"); |
| 157 std::cerr << m_errorString.toStdString() << std::endl; | 183 std::cerr << m_errorString << std::endl; |
| 158 return models; | 184 return models; |
| 159 } | 185 } |
| 160 | 186 |
| 161 QString error; | 187 QString error; |
| 162 | 188 |
| 183 | 209 |
| 184 void | 210 void |
| 185 RDFImporterImpl::getDataModelsAudio(std::vector<Model *> &models, | 211 RDFImporterImpl::getDataModelsAudio(std::vector<Model *> &models, |
| 186 ProgressReporter *reporter) | 212 ProgressReporter *reporter) |
| 187 { | 213 { |
| 188 SimpleSPARQLQuery query | 214 Nodes sigs = m_store->match |
| 189 (SimpleSPARQLQuery::QueryFromSingleSource, | 215 (Triple(Node(), Uri("a"), expand("mo:Signal"))).subjects(); |
| 190 QString | 216 |
| 191 ( | 217 foreach (Node sig, sigs) { |
| 192 " PREFIX mo: <http://purl.org/ontology/mo/> " | |
| 193 " SELECT ?signal ?source FROM <%1> " | |
| 194 " WHERE { ?source a mo:AudioFile . " | |
| 195 " ?signal a mo:Signal . " | |
| 196 " ?source mo:encodes ?signal } " | |
| 197 ) | |
| 198 .arg(m_uristring)); | |
| 199 | |
| 200 SimpleSPARQLQuery::ResultList results = query.execute(); | |
| 201 | |
| 202 if (results.empty()) { | |
| 203 | |
| 204 SimpleSPARQLQuery query2 | |
| 205 (SimpleSPARQLQuery::QueryFromSingleSource, | |
| 206 QString | |
| 207 ( | |
| 208 " PREFIX mo: <http://purl.org/ontology/mo/> " | |
| 209 " SELECT ?signal ?source FROM <%1> " | |
| 210 " WHERE { ?signal a mo:Signal ; mo:available_as ?source } " | |
| 211 ) | |
| 212 .arg(m_uristring)); | |
| 213 | 218 |
| 214 results = query.execute(); | 219 Node file = m_store->complete(Triple(Node(), expand("mo:encodes"), sig)); |
| 215 } | 220 if (file == Node()) { |
| 216 | 221 file = m_store->complete(Triple(sig, expand("mo:available_as"), Node())); |
| 217 for (int i = 0; i < results.size(); ++i) { | 222 } |
| 218 | 223 if (file == Node()) { |
| 219 QString signal = results[i]["signal"].value; | 224 std::cerr << "RDFImporterImpl::getDataModelsAudio: ERROR: No source for signal " << sig << std::endl; |
| 220 QString source = results[i]["source"].value; | 225 continue; |
| 221 | 226 } |
| 222 std::cerr << "NOTE: Seeking signal source \"" << source.toStdString() | 227 |
| 223 << "\"..." << std::endl; | 228 QString signal = sig.value; |
| 229 QString source = file.value; | |
| 230 | |
| 231 SVDEBUG << "NOTE: Seeking signal source \"" << source | |
| 232 << "\"..." << endl; | |
| 224 | 233 |
| 225 FileSource *fs = new FileSource(source, reporter); | 234 FileSource *fs = new FileSource(source, reporter); |
| 226 if (fs->isAvailable()) { | 235 if (fs->isAvailable()) { |
| 227 std::cerr << "NOTE: Source is available: Local filename is \"" | 236 SVDEBUG << "NOTE: Source is available: Local filename is \"" |
| 228 << fs->getLocalFilename().toStdString() | 237 << fs->getLocalFilename() |
| 229 << "\"..." << std::endl; | 238 << "\"..." << endl; |
| 230 } | 239 } |
| 231 | 240 |
| 232 #ifdef NO_SV_GUI | 241 #ifdef NO_SV_GUI |
| 233 if (!fs->isAvailable()) { | 242 if (!fs->isAvailable()) { |
| 234 m_errorString = QString("Signal source \"%1\" is not available").arg(source); | 243 m_errorString = QString("Signal source \"%1\" is not available").arg(source); |
| 235 delete fs; | 244 delete fs; |
| 236 continue; | 245 continue; |
| 237 } | 246 } |
| 238 #else | 247 #else |
| 239 if (!fs->isAvailable()) { | 248 if (!fs->isAvailable()) { |
| 240 std::cerr << "NOTE: Signal source \"" << source.toStdString() | 249 SVDEBUG << "NOTE: Signal source \"" << source |
| 241 << "\" is not available, using file finder..." << std::endl; | 250 << "\" is not available, using file finder..." << endl; |
| 242 FileFinder *ff = FileFinder::getInstance(); | 251 FileFinder *ff = FileFinder::getInstance(); |
| 243 if (ff) { | 252 if (ff) { |
| 244 QString path = ff->find(FileFinder::AudioFile, | 253 QString path = ff->find(FileFinder::AudioFile, |
| 245 fs->getLocation(), | 254 fs->getLocation(), |
| 246 m_uristring); | 255 m_uristring); |
| 263 reporter->setMessage(RDFImporter::tr("Importing audio referenced in RDF...")); | 272 reporter->setMessage(RDFImporter::tr("Importing audio referenced in RDF...")); |
| 264 } | 273 } |
| 265 fs->waitForData(); | 274 fs->waitForData(); |
| 266 WaveFileModel *newModel = new WaveFileModel(*fs, m_sampleRate); | 275 WaveFileModel *newModel = new WaveFileModel(*fs, m_sampleRate); |
| 267 if (newModel->isOK()) { | 276 if (newModel->isOK()) { |
| 268 std::cerr << "Successfully created wave file model from source at \"" << source.toStdString() << "\"" << std::endl; | 277 std::cerr << "Successfully created wave file model from source at \"" << source << "\"" << std::endl; |
| 269 models.push_back(newModel); | 278 models.push_back(newModel); |
| 270 m_audioModelMap[signal] = newModel; | 279 m_audioModelMap[signal] = newModel; |
| 271 if (m_sampleRate == 0) { | 280 if (m_sampleRate == 0) { |
| 272 m_sampleRate = newModel->getSampleRate(); | 281 m_sampleRate = newModel->getSampleRate(); |
| 273 } | 282 } |
| 285 { | 294 { |
| 286 if (reporter) { | 295 if (reporter) { |
| 287 reporter->setMessage(RDFImporter::tr("Importing dense signal data from RDF...")); | 296 reporter->setMessage(RDFImporter::tr("Importing dense signal data from RDF...")); |
| 288 } | 297 } |
| 289 | 298 |
| 290 SimpleSPARQLQuery query | 299 Nodes sigFeatures = m_store->match |
| 291 (SimpleSPARQLQuery::QueryFromSingleSource, | 300 (Triple(Node(), expand("af:signal_feature"), Node())).objects(); |
| 292 QString | 301 |
| 293 ( | 302 foreach (Node sf, sigFeatures) { |
| 294 " PREFIX mo: <http://purl.org/ontology/mo/>" | 303 |
| 295 " PREFIX af: <http://purl.org/ontology/af/>" | 304 if (sf.type != Node::URI && sf.type != Node::Blank) continue; |
| 296 | 305 |
| 297 " SELECT ?feature ?feature_signal_type ?value " | 306 Node t = m_store->complete(Triple(sf, expand("a"), Node())); |
| 298 " FROM <%1> " | 307 Node v = m_store->complete(Triple(sf, expand("af:value"), Node())); |
| 299 | 308 |
| 300 " WHERE { " | 309 QString feature = sf.value; |
| 301 | 310 QString type = t.value; |
| 302 " ?signal af:signal_feature ?feature . " | 311 QString value = v.value; |
| 303 | 312 |
| 304 " ?feature a ?feature_signal_type ; " | 313 if (type == "" || value == "") continue; |
| 305 " af:value ?value . " | |
| 306 | |
| 307 " } " | |
| 308 ) | |
| 309 .arg(m_uristring)); | |
| 310 | |
| 311 SimpleSPARQLQuery::ResultList results = query.execute(); | |
| 312 | |
| 313 if (!query.isOK()) { | |
| 314 m_errorString = query.getErrorString(); | |
| 315 return; | |
| 316 } | |
| 317 | |
| 318 if (query.wasCancelled()) { | |
| 319 m_errorString = "Query cancelled"; | |
| 320 return; | |
| 321 } | |
| 322 | |
| 323 for (int i = 0; i < results.size(); ++i) { | |
| 324 | |
| 325 QString feature = results[i]["feature"].value; | |
| 326 QString type = results[i]["feature_signal_type"].value; | |
| 327 QString value = results[i]["value"].value; | |
| 328 | 314 |
| 329 int sampleRate = 0; | 315 int sampleRate = 0; |
| 330 int windowLength = 0; | 316 int windowLength = 0; |
| 331 int hopSize = 0; | 317 int hopSize = 0; |
| 332 int width = 0; | 318 int width = 0; |
| 408 void | 394 void |
| 409 RDFImporterImpl::getDenseModelTitle(Model *m, | 395 RDFImporterImpl::getDenseModelTitle(Model *m, |
| 410 QString featureUri, | 396 QString featureUri, |
| 411 QString featureTypeUri) | 397 QString featureTypeUri) |
| 412 { | 398 { |
| 413 QString titleQuery = QString | 399 Node n = m_store->complete |
| 414 ( | 400 (Triple(Uri(featureUri), expand("dc:title"), Node())); |
| 415 " PREFIX dc: <http://purl.org/dc/elements/1.1/> " | 401 |
| 416 " SELECT ?title " | 402 if (n.type == Node::Literal && n.value != "") { |
| 417 " FROM <%1> " | 403 SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << n.value << "\"" << endl; |
| 418 " WHERE { " | 404 m->setObjectName(n.value); |
| 419 " <%2> dc:title ?title . " | |
| 420 " } " | |
| 421 ).arg(m_uristring); | |
| 422 | |
| 423 SimpleSPARQLQuery::Value v; | |
| 424 | |
| 425 v = SimpleSPARQLQuery::singleResultQuery | |
| 426 (SimpleSPARQLQuery::QueryFromSingleSource, | |
| 427 titleQuery.arg(featureUri), | |
| 428 "title"); | |
| 429 | |
| 430 if (v.value != "") { | |
| 431 std::cerr << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << v.value.toStdString() << "\"" << std::endl; | |
| 432 m->setObjectName(v.value); | |
| 433 return; | 405 return; |
| 434 } | 406 } |
| 435 | 407 |
| 436 v = SimpleSPARQLQuery::singleResultQuery | 408 n = m_store->complete |
| 437 (SimpleSPARQLQuery::QueryFromSingleSource, | 409 (Triple(Uri(featureTypeUri), expand("dc:title"), Node())); |
| 438 titleQuery.arg(featureTypeUri), | 410 |
| 439 "title"); | 411 if (n.type == Node::Literal && n.value != "") { |
| 440 | 412 SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << n.value << "\"" << endl; |
| 441 if (v.value != "") { | 413 m->setObjectName(n.value); |
| 442 std::cerr << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << v.value.toStdString() << "\"" << std::endl; | |
| 443 m->setObjectName(v.value); | |
| 444 return; | 414 return; |
| 445 } | 415 } |
| 446 | 416 |
| 447 std::cerr << "RDFImporterImpl::getDenseModelTitle: No title available for feature <" << featureUri.toStdString() << ">" << std::endl; | 417 SVDEBUG << "RDFImporterImpl::getDenseModelTitle: No title available for feature <" << featureUri << ">" << endl; |
| 448 } | 418 } |
| 449 | 419 |
| 450 void | 420 void |
| 451 RDFImporterImpl::getDenseFeatureProperties(QString featureUri, | 421 RDFImporterImpl::getDenseFeatureProperties(QString featureUri, |
| 452 int &sampleRate, int &windowLength, | 422 int &sampleRate, int &windowLength, |
| 453 int &hopSize, int &width, int &height) | 423 int &hopSize, int &width, int &height) |
| 454 { | 424 { |
| 455 SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; | 425 Node dim = m_store->complete |
| 456 | 426 (Triple(Uri(featureUri), expand("af:dimensions"), Node())); |
| 457 QString dimensionsQuery | 427 |
| 458 ( | 428 cerr << "Dimensions = \"" << dim.value << "\"" << endl; |
| 459 " PREFIX mo: <http://purl.org/ontology/mo/>" | 429 |
| 460 " PREFIX af: <http://purl.org/ontology/af/>" | 430 if (dim.type == Node::Literal && dim.value != "") { |
| 461 | 431 QStringList dl = dim.value.split(" "); |
| 462 " SELECT ?dimensions " | 432 if (dl.empty()) dl.push_back(dim.value); |
| 463 " FROM <%1> " | |
| 464 | |
| 465 " WHERE { " | |
| 466 | |
| 467 " <%2> af:dimensions ?dimensions . " | |
| 468 | |
| 469 " } " | |
| 470 ); | |
| 471 | |
| 472 SimpleSPARQLQuery::Value dimensionsValue = | |
| 473 SimpleSPARQLQuery::singleResultQuery | |
| 474 (s, dimensionsQuery.arg(m_uristring).arg(featureUri), "dimensions"); | |
| 475 | |
| 476 cerr << "Dimensions = \"" << dimensionsValue.value.toStdString() << "\"" | |
| 477 << endl; | |
| 478 | |
| 479 if (dimensionsValue.value != "") { | |
| 480 QStringList dl = dimensionsValue.value.split(" "); | |
| 481 if (dl.empty()) dl.push_back(dimensionsValue.value); | |
| 482 if (dl.size() > 0) height = dl[0].toInt(); | 433 if (dl.size() > 0) height = dl[0].toInt(); |
| 483 if (dl.size() > 1) width = dl[1].toInt(); | 434 if (dl.size() > 1) width = dl[1].toInt(); |
| 484 } | 435 } |
| 485 | 436 |
| 486 QString queryTemplate | 437 // Looking for rate, hop, window from: |
| 487 ( | 438 // |
| 488 " PREFIX mo: <http://purl.org/ontology/mo/>" | 439 // ?feature mo:time ?time . |
| 489 " PREFIX af: <http://purl.org/ontology/af/>" | 440 // ?time a tl:Interval . |
| 490 " PREFIX tl: <http://purl.org/NET/c4dm/timeline.owl#>" | 441 // ?time tl:onTimeLine ?timeline . |
| 491 | 442 // ?map tl:rangeTimeLine ?timeline . |
| 492 " SELECT ?%3 " | 443 // ?map tl:sampleRate ?rate . |
| 493 " FROM <%1> " | 444 // ?map tl:hopSize ?hop . |
| 494 | 445 // ?map tl:windowLength ?window . |
| 495 " WHERE { " | 446 |
| 496 | 447 Node interval = m_store->complete(Triple(Uri(featureUri), expand("mo:time"), Node())); |
| 497 " <%2> mo:time ?time . " | 448 |
| 498 | 449 if (!m_store->contains(Triple(interval, expand("a"), expand("tl:Interval")))) { |
| 499 " ?time a tl:Interval ; " | 450 cerr << "RDFImporterImpl::getDenseFeatureProperties: Feature time node " |
| 500 " tl:onTimeLine ?timeline . " | 451 << interval << " is not a tl:Interval" << endl; |
| 501 | 452 return; |
| 502 " ?map tl:rangeTimeLine ?timeline . " | 453 } |
| 503 | 454 |
| 504 " ?map tl:%3 ?%3 . " | 455 Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); |
| 505 | 456 |
| 506 " } " | 457 if (tl == Node()) { |
| 507 ); | 458 cerr << "RDFImporterImpl::getDenseFeatureProperties: Interval node " |
| 508 | 459 << interval << " lacks tl:onTimeLine property" << endl; |
| 509 // Another laborious workaround for rasqal's failure to handle | 460 return; |
| 510 // multiple optionals properly | 461 } |
| 511 | 462 |
| 512 SimpleSPARQLQuery::Value srValue = | 463 Node map = m_store->complete(Triple(Node(), expand("tl:rangeTimeLine"), tl)); |
| 513 SimpleSPARQLQuery::singleResultQuery(s, | 464 |
| 514 queryTemplate | 465 if (map == Node()) { |
| 515 .arg(m_uristring).arg(featureUri) | 466 cerr << "RDFImporterImpl::getDenseFeatureProperties: No map for " |
| 516 .arg("sampleRate"), | 467 << "timeline node " << tl << endl; |
| 517 "sampleRate"); | 468 } |
| 518 if (srValue.value != "") { | 469 |
| 519 sampleRate = srValue.value.toInt(); | 470 PropertyObject po(m_store, "tl:", map); |
| 520 } | 471 |
| 521 | 472 if (po.hasProperty("sampleRate")) { |
| 522 SimpleSPARQLQuery::Value hopValue = | 473 sampleRate = po.getProperty("sampleRate").toInt(); |
| 523 SimpleSPARQLQuery::singleResultQuery(s, | 474 } |
| 524 queryTemplate | 475 if (po.hasProperty("hopSize")) { |
| 525 .arg(m_uristring).arg(featureUri) | 476 hopSize = po.getProperty("hopSize").toInt(); |
| 526 .arg("hopSize"), | 477 } |
| 527 "hopSize"); | 478 if (po.hasProperty("windowLength")) { |
| 528 if (srValue.value != "") { | 479 windowLength = po.getProperty("windowLength").toInt(); |
| 529 hopSize = hopValue.value.toInt(); | |
| 530 } | |
| 531 | |
| 532 SimpleSPARQLQuery::Value winValue = | |
| 533 SimpleSPARQLQuery::singleResultQuery(s, | |
| 534 queryTemplate | |
| 535 .arg(m_uristring).arg(featureUri) | |
| 536 .arg("windowLength"), | |
| 537 "windowLength"); | |
| 538 if (winValue.value != "") { | |
| 539 windowLength = winValue.value.toInt(); | |
| 540 } | 480 } |
| 541 | 481 |
| 542 cerr << "sr = " << sampleRate << ", hop = " << hopSize << ", win = " << windowLength << endl; | 482 cerr << "sr = " << sampleRate << ", hop = " << hopSize << ", win = " << windowLength << endl; |
| 543 } | 483 } |
| 544 | 484 |
| 548 { | 488 { |
| 549 if (reporter) { | 489 if (reporter) { |
| 550 reporter->setMessage(RDFImporter::tr("Importing event data from RDF...")); | 490 reporter->setMessage(RDFImporter::tr("Importing event data from RDF...")); |
| 551 } | 491 } |
| 552 | 492 |
| 553 SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; | |
| 554 | |
| 555 // Our query is intended to retrieve every thing that has a time, | |
| 556 // and every feature type and value associated with a thing that | |
| 557 // has a time. | |
| 558 | |
| 559 // We will then need to refine this big bag of results into a set | |
| 560 // of data models. | |
| 561 | |
| 562 // Results that have different source signals should go into | |
| 563 // different models. | |
| 564 | |
| 565 // Results that have different feature types should go into | |
| 566 // different models. | |
| 567 | |
| 568 // Results that are sparse should go into different models from | |
| 569 // those that are dense (we need to examine the timestamps to | |
| 570 // establish this -- if the timestamps are regular, the results | |
| 571 // are dense -- so we can't do it as we go along, only after | |
| 572 // collecting all results). | |
| 573 | |
| 574 // Timed things that have features associated with them should not | |
| 575 // appear directly in any model -- their features should appear | |
| 576 // instead -- and these should be different models from those used | |
| 577 // for timed things that do not have features. | |
| 578 | |
| 579 // As we load the results, we'll push them into a partially | |
| 580 // structured container that maps from source signal (URI as | |
| 581 // string) -> feature type (likewise) -> time -> list of values. | |
| 582 // If the source signal or feature type is unavailable, the empty | |
| 583 // string will do. | |
| 584 | |
| 585 QString prefixes = QString( | |
| 586 " PREFIX event: <http://purl.org/NET/c4dm/event.owl#>" | |
| 587 " PREFIX tl: <http://purl.org/NET/c4dm/timeline.owl#>" | |
| 588 " PREFIX mo: <http://purl.org/ontology/mo/>" | |
| 589 " PREFIX af: <http://purl.org/ontology/af/>" | |
| 590 " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" | |
| 591 ); | |
| 592 | |
| 593 QString queryString = prefixes + QString( | |
| 594 | |
| 595 " SELECT ?signal ?timed_thing ?timeline ?event_type ?value" | |
| 596 " FROM <%1>" | |
| 597 | |
| 598 " WHERE {" | |
| 599 | |
| 600 " ?signal a mo:Signal ." | |
| 601 | |
| 602 " ?signal mo:time ?interval ." | |
| 603 " ?interval tl:onTimeLine ?timeline ." | |
| 604 " ?time tl:onTimeLine ?timeline ." | |
| 605 " ?timed_thing event:time ?time ." | |
| 606 " ?timed_thing a ?event_type ." | |
| 607 | |
| 608 " OPTIONAL {" | |
| 609 " ?timed_thing af:feature ?value" | |
| 610 " }" | |
| 611 " }" | |
| 612 | |
| 613 ).arg(m_uristring); | |
| 614 | |
| 615 //!!! NB we're using rather old terminology for these things, apparently: | |
| 616 // beginsAt -> start | |
| 617 // onTimeLine -> timeline | |
| 618 | |
| 619 QString timeQueryString = prefixes + QString( | |
| 620 | |
| 621 " SELECT ?time FROM <%1> " | |
| 622 " WHERE { " | |
| 623 " <%2> event:time ?t . " | |
| 624 " ?t tl:at ?time . " | |
| 625 " } " | |
| 626 | |
| 627 ).arg(m_uristring); | |
| 628 | |
| 629 QString rangeQueryString = prefixes + QString( | |
| 630 | |
| 631 " SELECT ?time ?duration FROM <%1> " | |
| 632 " WHERE { " | |
| 633 " <%2> event:time ?t . " | |
| 634 " ?t tl:beginsAt ?time . " | |
| 635 " ?t tl:duration ?duration . " | |
| 636 " } " | |
| 637 | |
| 638 ).arg(m_uristring); | |
| 639 | |
| 640 QString labelQueryString = prefixes + QString( | |
| 641 | |
| 642 " SELECT ?label FROM <%1> " | |
| 643 " WHERE { " | |
| 644 " <%2> rdfs:label ?label . " | |
| 645 " } " | |
| 646 | |
| 647 ).arg(m_uristring); | |
| 648 | |
| 649 QString textQueryString = prefixes + QString( | |
| 650 | |
| 651 " SELECT ?label FROM <%1> " | |
| 652 " WHERE { " | |
| 653 " <%2> af:text ?label . " | |
| 654 " } " | |
| 655 | |
| 656 ).arg(m_uristring); | |
| 657 | |
| 658 SimpleSPARQLQuery query(s, queryString); | |
| 659 query.setProgressReporter(reporter); | |
| 660 | |
| 661 // cerr << "Query will be: " << queryString.toStdString() << endl; | |
| 662 | |
| 663 SimpleSPARQLQuery::ResultList results = query.execute(); | |
| 664 | |
| 665 if (!query.isOK()) { | |
| 666 m_errorString = query.getErrorString(); | |
| 667 return; | |
| 668 } | |
| 669 | |
| 670 if (query.wasCancelled()) { | |
| 671 m_errorString = "Query cancelled"; | |
| 672 return; | |
| 673 } | |
| 674 | |
| 675 /* | 493 /* |
| 676 This function is now only used for sparse data (for dense data | 494 This function is only used for sparse data (for dense data we |
| 677 we would be in getDataModelsDense instead). | 495 would be in getDataModelsDense instead). |
| 678 | 496 |
| 679 For sparse data, the determining factors in deciding what model | 497 Our query is intended to retrieve every thing that has a time, |
| 680 to use are: Do the features have values? and Do the features | 498 and every feature type and value associated with a thing that |
| 681 have duration? | 499 has a time. |
| 682 | 500 |
| 683 We can run through the results and check off whether we find | 501 We will then need to refine this big bag of results into a set |
| 684 values and duration for each of the source+type keys, and then | 502 of data models. |
| 685 run through the source+type keys pushing each of the results | 503 |
| 686 into a suitable model. | 504 Results that have different source signals should go into |
| 687 | 505 different models. |
| 688 Unfortunately, at this point we do not yet have any actual | 506 |
| 689 timing data (time/duration) -- just the time URI. | 507 Results that have different feature types should go into |
| 690 | 508 different models. |
| 691 What we _could_ do is to create one of each type of model at the | |
| 692 start, for each of the source+type keys, and then push each | |
| 693 feature into the relevant model depending on what we find out | |
| 694 about it. Then return only non-empty models. | |
| 695 */ | 509 */ |
| 510 | |
| 511 Nodes sigs = m_store->match | |
| 512 (Triple(Node(), expand("a"), expand("mo:Signal"))).subjects(); | |
| 696 | 513 |
| 697 // Map from timeline uri to event type to dimensionality to | 514 // Map from timeline uri to event type to dimensionality to |
| 698 // presence of duration to model ptr. Whee! | 515 // presence of duration to model ptr. Whee! |
| 699 std::map<QString, std::map<QString, std::map<int, std::map<bool, Model *> > > > | 516 std::map<QString, std::map<QString, std::map<int, std::map<bool, Model *> > > > |
| 700 modelMap; | 517 modelMap; |
| 701 | 518 |
| 702 for (int i = 0; i < results.size(); ++i) { | 519 foreach (Node sig, sigs) { |
| 703 | |
| 704 if (i % 4 == 0) { | |
| 705 if (reporter) reporter->setProgress(i/4); | |
| 706 } | |
| 707 | |
| 708 QString source = results[i]["signal"].value; | |
| 709 QString timeline = results[i]["timeline"].value; | |
| 710 QString type = results[i]["event_type"].value; | |
| 711 QString thinguri = results[i]["timed_thing"].value; | |
| 712 | 520 |
| 713 RealTime time; | 521 Node interval = m_store->complete(Triple(sig, expand("mo:time"), Node())); |
| 714 RealTime duration; | 522 if (interval == Node()) continue; |
| 715 | 523 |
| 716 bool haveTime = false; | 524 Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); |
| 717 bool haveDuration = false; | 525 if (tl == Node()) continue; |
| 718 | 526 |
| 719 QString label = ""; | 527 Nodes times = m_store->match(Triple(Node(), expand("tl:onTimeLine"), tl)).subjects(); |
| 720 bool text = (type.contains("Text") || type.contains("text")); // Ha, ha | 528 |
| 721 | 529 foreach (Node tn, times) { |
| 722 if (text) { | 530 |
| 723 label = SimpleSPARQLQuery::singleResultQuery | 531 Nodes timedThings = m_store->match(Triple(Node(), expand("event:time"), tn)).subjects(); |
| 724 (s, textQueryString.arg(thinguri), "label").value; | 532 |
| 725 } | 533 foreach (Node thing, timedThings) { |
| 726 | 534 |
| 727 if (label == "") { | 535 Node typ = m_store->complete(Triple(thing, expand("a"), Node())); |
| 728 label = SimpleSPARQLQuery::singleResultQuery | 536 if (typ == Node()) continue; |
| 729 (s, labelQueryString.arg(thinguri), "label").value; | 537 |
| 730 } | 538 Node valu = m_store->complete(Triple(thing, expand("af:feature"), Node())); |
| 731 | 539 |
| 732 SimpleSPARQLQuery rangeQuery(s, rangeQueryString.arg(thinguri)); | 540 QString source = sig.value; |
| 733 SimpleSPARQLQuery::ResultList rangeResults = rangeQuery.execute(); | 541 QString timeline = tl.value; |
| 734 if (!rangeResults.empty()) { | 542 QString type = typ.value; |
| 735 // std::cerr << rangeResults.size() << " range results" << std::endl; | 543 QString thinguri = thing.value; |
| 736 time = RealTime::fromXsdDuration | 544 |
| 737 (rangeResults[0]["time"].value.toStdString()); | 545 /* |
| 738 duration = RealTime::fromXsdDuration | 546 For sparse data, the determining factors in deciding |
| 739 (rangeResults[0]["duration"].value.toStdString()); | 547 what model to use are: Do the features have values? |
| 740 // std::cerr << "duration string " << rangeResults[0]["duration"].value.toStdString() << std::endl; | 548 and Do the features have duration? |
| 741 haveTime = true; | 549 |
| 742 haveDuration = true; | 550 We can run through the results and check off whether |
| 743 } else { | 551 we find values and duration for each of the |
| 744 QString timestring = SimpleSPARQLQuery::singleResultQuery | 552 source+type keys, and then run through the |
| 745 (s, timeQueryString.arg(thinguri), "time").value; | 553 source+type keys pushing each of the results into a |
| 746 // std::cerr << "timestring = " << timestring.toStdString() << std::endl; | 554 suitable model. |
| 747 if (timestring != "") { | 555 |
| 748 time = RealTime::fromXsdDuration(timestring.toStdString()); | 556 Unfortunately, at this point we do not yet have any |
| 749 haveTime = true; | 557 actual timing data (time/duration) -- just the time |
| 750 } | 558 URI. |
| 751 } | 559 |
| 752 | 560 What we _could_ do is to create one of each type of |
| 753 QString valuestring = results[i]["value"].value; | 561 model at the start, for each of the source+type |
| 754 std::vector<float> values; | 562 keys, and then push each feature into the relevant |
| 755 | 563 model depending on what we find out about it. Then |
| 756 if (valuestring != "") { | 564 return only non-empty models. |
| 757 QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); | 565 */ |
| 758 for (int j = 0; j < vsl.size(); ++j) { | 566 |
| 759 bool success = false; | 567 QString label = ""; |
| 760 float v = vsl[j].toFloat(&success); | 568 bool text = (type.contains("Text") || type.contains("text")); // Ha, ha |
| 761 if (success) values.push_back(v); | 569 bool note = (type.contains("Note") || type.contains("note")); // Guffaw |
| 762 } | 570 |
| 763 } | 571 if (text) { |
| 764 | 572 label = m_store->complete(Triple(thing, expand("af:text"), Node())).value; |
| 765 int dimensions = 1; | 573 } |
| 766 if (values.size() == 1) dimensions = 2; | 574 |
| 767 else if (values.size() > 1) dimensions = 3; | 575 if (label == "") { |
| 768 | 576 label = m_store->complete(Triple(thing, expand("rdfs:label"), Node())).value; |
| 769 Model *model = 0; | 577 } |
| 770 | 578 |
| 771 if (modelMap[timeline][type][dimensions].find(haveDuration) == | 579 RealTime time; |
| 772 modelMap[timeline][type][dimensions].end()) { | 580 RealTime duration; |
| 581 | |
| 582 bool haveTime = false; | |
| 583 bool haveDuration = false; | |
| 584 | |
| 585 Node at = m_store->complete(Triple(tn, expand("tl:at"), Node())); | |
| 586 | |
| 587 if (at != Node()) { | |
| 588 time = RealTime::fromXsdDuration(at.value.toStdString()); | |
| 589 haveTime = true; | |
| 590 } else { | |
| 591 //!!! NB we're using rather old terminology for these things, apparently: | |
| 592 // beginsAt -> start | |
| 593 // onTimeLine -> timeline | |
| 594 | |
| 595 Node start = m_store->complete(Triple(tn, expand("tl:beginsAt"), Node())); | |
| 596 Node dur = m_store->complete(Triple(tn, expand("tl:duration"), Node())); | |
| 597 if (start != Node() && dur != Node()) { | |
| 598 time = RealTime::fromXsdDuration | |
| 599 (start.value.toStdString()); | |
| 600 duration = RealTime::fromXsdDuration | |
| 601 (dur.value.toStdString()); | |
| 602 haveTime = haveDuration = true; | |
| 603 } | |
| 604 } | |
| 605 | |
| 606 QString valuestring = valu.value; | |
| 607 std::vector<float> values; | |
| 608 | |
| 609 if (valuestring != "") { | |
| 610 QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); | |
| 611 for (int j = 0; j < vsl.size(); ++j) { | |
| 612 bool success = false; | |
| 613 float v = vsl[j].toFloat(&success); | |
| 614 if (success) values.push_back(v); | |
| 615 } | |
| 616 } | |
| 617 | |
| 618 int dimensions = 1; | |
| 619 if (values.size() == 1) dimensions = 2; | |
| 620 else if (values.size() > 1) dimensions = 3; | |
| 621 | |
| 622 Model *model = 0; | |
| 623 | |
| 624 if (modelMap[timeline][type][dimensions].find(haveDuration) == | |
| 625 modelMap[timeline][type][dimensions].end()) { | |
| 773 | 626 |
| 774 /* | 627 /* |
| 775 std::cerr << "Creating new model: source = " << source.toStdString() | 628 SVDEBUG << "Creating new model: source = " << source << ", type = " << type << ", dimensions = " |
| 776 << ", type = " << type.toStdString() << ", dimensions = " | |
| 777 << dimensions << ", haveDuration = " << haveDuration | 629 << dimensions << ", haveDuration = " << haveDuration |
| 778 << ", time = " << time << ", duration = " << duration | 630 << ", time = " << time << ", duration = " << duration |
| 779 << std::endl; | 631 << endl; |
| 780 */ | 632 */ |
| 781 | 633 |
| 782 if (!haveDuration) { | 634 if (!haveDuration) { |
| 783 | 635 |
| 784 if (dimensions == 1) { | 636 if (dimensions == 1) { |
| 785 | 637 if (text) { |
| 786 if (text) { | 638 model = new TextModel(m_sampleRate, 1, false); |
| 787 | 639 } else { |
| 788 model = new TextModel(m_sampleRate, 1, false); | 640 model = new SparseOneDimensionalModel(m_sampleRate, 1, false); |
| 789 | 641 } |
| 790 } else { | 642 } else if (dimensions == 2) { |
| 791 | 643 if (text) { |
| 792 model = new SparseOneDimensionalModel(m_sampleRate, 1, false); | 644 model = new TextModel(m_sampleRate, 1, false); |
| 645 } else { | |
| 646 model = new SparseTimeValueModel(m_sampleRate, 1, false); | |
| 647 } | |
| 648 } else { | |
| 649 // We don't have a three-dimensional sparse model, | |
| 650 // so use a note model. We do have some logic (in | |
| 651 // extractStructure below) for guessing whether | |
| 652 // this should after all have been a dense model, | |
| 653 // but it's hard to apply it because we don't have | |
| 654 // all the necessary timing data yet... hmm | |
| 655 model = new NoteModel(m_sampleRate, 1, false); | |
| 656 } | |
| 657 | |
| 658 } else { // haveDuration | |
| 659 | |
| 660 if (note || (dimensions > 2)) { | |
| 661 model = new NoteModel(m_sampleRate, 1, false); | |
| 662 } else { | |
| 663 // If our units are frequency or midi pitch, we | |
| 664 // should be using a note model... hm | |
| 665 model = new RegionModel(m_sampleRate, 1, false); | |
| 666 } | |
| 793 } | 667 } |
| 794 | 668 |
| 795 } else if (dimensions == 2) { | 669 model->setRDFTypeURI(type); |
| 796 | 670 |
| 797 if (text) { | 671 if (m_audioModelMap.find(source) != m_audioModelMap.end()) { |
| 798 | 672 std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; |
| 799 model = new TextModel(m_sampleRate, 1, false); | 673 model->setSourceModel(m_audioModelMap[source]); |
| 800 | |
| 801 } else { | |
| 802 | |
| 803 model = new SparseTimeValueModel(m_sampleRate, 1, false); | |
| 804 } | 674 } |
| 805 | 675 |
| 806 } else { | 676 QString title = m_store->complete |
| 807 | 677 (Triple(typ, expand("dc:title"), Node())).value; |
| 808 // We don't have a three-dimensional sparse model, | 678 if (title == "") { |
| 809 // so use a note model. We do have some logic (in | 679 // take it from the end of the event type |
| 810 // extractStructure below) for guessing whether | 680 title = type; |
| 811 // this should after all have been a dense model, | 681 title.replace(QRegExp("^.*[/#]"), ""); |
| 812 // but it's hard to apply it because we don't have | 682 } |
| 813 // all the necessary timing data yet... hmm | 683 model->setObjectName(title); |
| 814 | 684 |
| 815 model = new NoteModel(m_sampleRate, 1, false); | 685 modelMap[timeline][type][dimensions][haveDuration] = model; |
| 816 } | 686 models.push_back(model); |
| 817 | 687 } |
| 818 } else { // haveDuration | 688 |
| 819 | 689 model = modelMap[timeline][type][dimensions][haveDuration]; |
| 820 if (dimensions == 1 || dimensions == 2) { | 690 |
| 821 | 691 if (model) { |
| 822 // If our units are frequency or midi pitch, we | 692 long ftime = RealTime::realTime2Frame(time, m_sampleRate); |
| 823 // should be using a note model... hm | 693 long fduration = RealTime::realTime2Frame(duration, m_sampleRate); |
| 824 | 694 fillModel(model, ftime, fduration, haveDuration, values, label); |
| 825 model = new RegionModel(m_sampleRate, 1, false); | 695 } |
| 826 | 696 } |
| 827 } else { | |
| 828 | |
| 829 // We don't have a three-dimensional sparse model, | |
| 830 // so use a note model. We do have some logic (in | |
| 831 // extractStructure below) for guessing whether | |
| 832 // this should after all have been a dense model, | |
| 833 // but it's hard to apply it because we don't have | |
| 834 // all the necessary timing data yet... hmm | |
| 835 | |
| 836 model = new NoteModel(m_sampleRate, 1, false); | |
| 837 } | |
| 838 } | |
| 839 | |
| 840 model->setRDFTypeURI(type); | |
| 841 | |
| 842 if (m_audioModelMap.find(source) != m_audioModelMap.end()) { | |
| 843 std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; | |
| 844 model->setSourceModel(m_audioModelMap[source]); | |
| 845 } | |
| 846 | |
| 847 QString titleQuery = QString | |
| 848 ( | |
| 849 " PREFIX dc: <http://purl.org/dc/elements/1.1/> " | |
| 850 " SELECT ?title " | |
| 851 " FROM <%1> " | |
| 852 " WHERE { " | |
| 853 " <%2> dc:title ?title . " | |
| 854 " } " | |
| 855 ).arg(m_uristring).arg(type); | |
| 856 QString title = SimpleSPARQLQuery::singleResultQuery | |
| 857 (s, titleQuery, "title").value; | |
| 858 if (title == "") { | |
| 859 // take it from the end of the event type | |
| 860 title = type; | |
| 861 title.replace(QRegExp("^.*[/#]"), ""); | |
| 862 } | |
| 863 model->setObjectName(title); | |
| 864 | |
| 865 modelMap[timeline][type][dimensions][haveDuration] = model; | |
| 866 models.push_back(model); | |
| 867 } | |
| 868 | |
| 869 model = modelMap[timeline][type][dimensions][haveDuration]; | |
| 870 | |
| 871 if (model) { | |
| 872 long ftime = RealTime::realTime2Frame(time, m_sampleRate); | |
| 873 long fduration = RealTime::realTime2Frame(duration, m_sampleRate); | |
| 874 fillModel(model, ftime, fduration, haveDuration, values, label); | |
| 875 } | 697 } |
| 876 } | 698 } |
| 877 } | 699 } |
| 878 | 700 |
| 879 void | 701 void |
| 882 long fduration, | 704 long fduration, |
| 883 bool haveDuration, | 705 bool haveDuration, |
| 884 std::vector<float> &values, | 706 std::vector<float> &values, |
| 885 QString label) | 707 QString label) |
| 886 { | 708 { |
| 887 // std::cerr << "RDFImporterImpl::fillModel: adding point at frame " << ftime << std::endl; | 709 // SVDEBUG << "RDFImporterImpl::fillModel: adding point at frame " << ftime << endl; |
| 888 | 710 |
| 889 SparseOneDimensionalModel *sodm = | 711 SparseOneDimensionalModel *sodm = |
| 890 dynamic_cast<SparseOneDimensionalModel *>(model); | 712 dynamic_cast<SparseOneDimensionalModel *>(model); |
| 891 if (sodm) { | 713 if (sodm) { |
| 892 SparseOneDimensionalModel::Point point(ftime, label); | 714 SparseOneDimensionalModel::Point point(ftime, label); |
| 983 RDFImporter::RDFDocumentType | 805 RDFImporter::RDFDocumentType |
| 984 RDFImporter::identifyDocumentType(QString url) | 806 RDFImporter::identifyDocumentType(QString url) |
| 985 { | 807 { |
| 986 bool haveAudio = false; | 808 bool haveAudio = false; |
| 987 bool haveAnnotations = false; | 809 bool haveAnnotations = false; |
| 988 | 810 bool haveRDF = false; |
| 989 // This query is not expected to return any values, but if it | 811 |
| 990 // executes successfully (leaving no error in the error string) | 812 BasicStore *store = 0; |
| 991 // then we know we have RDF | 813 |
| 992 SimpleSPARQLQuery q(SimpleSPARQLQuery::QueryFromSingleSource, | 814 // This is not expected to return anything useful, but if it does |
| 993 QString(" SELECT ?x FROM <%1> WHERE { ?x <y> <z> } ") | 815 // anything at all then we know we have RDF |
| 994 .arg(url)); | 816 try { |
| 995 | 817 //!!! non-local document? |
| 996 SimpleSPARQLQuery::ResultList r = q.execute(); | 818 store = BasicStore::load(QUrl(url)); |
| 997 if (!q.isOK()) { | 819 Triple t = store->matchOnce(Triple()); |
| 998 SimpleSPARQLQuery::closeSingleSource(url); | 820 if (t != Triple()) haveRDF = true; |
| 821 } catch (std::exception &e) { | |
| 822 // nothing; haveRDF will be false so the next bit catches it | |
| 823 } | |
| 824 | |
| 825 if (!haveRDF) { | |
| 826 delete store; | |
| 999 return NotRDF; | 827 return NotRDF; |
| 1000 } | 828 } |
| 1001 | 829 |
| 830 store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); | |
| 831 store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); | |
| 832 store->addPrefix("af", Uri("http://purl.org/ontology/af/")); | |
| 833 | |
| 1002 // "MO-conformant" structure for audio files | 834 // "MO-conformant" structure for audio files |
| 1003 | 835 |
| 1004 SimpleSPARQLQuery::Value value = | 836 Node n = store->complete(Triple(Node(), Uri("a"), store->expand("mo:AudioFile"))); |
| 1005 SimpleSPARQLQuery::singleResultQuery | 837 if (n != Node() && n.type == Node::URI) { |
| 1006 (SimpleSPARQLQuery::QueryFromSingleSource, | |
| 1007 QString | |
| 1008 (" PREFIX mo: <http://purl.org/ontology/mo/> " | |
| 1009 " SELECT ?url FROM <%1> " | |
| 1010 " WHERE { ?url a mo:AudioFile } " | |
| 1011 ).arg(url), | |
| 1012 "url"); | |
| 1013 | |
| 1014 if (value.type == SimpleSPARQLQuery::URIValue) { | |
| 1015 | 838 |
| 1016 haveAudio = true; | 839 haveAudio = true; |
| 1017 | 840 |
| 1018 } else { | 841 } else { |
| 1019 | 842 |
| 1020 // Sonic Annotator v0.2 and below used to write this structure | 843 // Sonic Annotator v0.2 and below used to write this structure |
| 1021 // (which is not properly in conformance with the Music | 844 // (which is not properly in conformance with the Music |
| 1022 // Ontology) | 845 // Ontology) |
| 1023 | 846 |
| 1024 value = | 847 Nodes sigs = store->match(Triple(Node(), Uri("a"), store->expand("mo:Signal"))).subjects(); |
| 1025 SimpleSPARQLQuery::singleResultQuery | 848 foreach (Node sig, sigs) { |
| 1026 (SimpleSPARQLQuery::QueryFromSingleSource, | 849 Node aa = store->complete(Triple(sig, store->expand("mo:available_as"), Node())); |
| 1027 QString | 850 if (aa != Node()) { |
| 1028 (" PREFIX mo: <http://purl.org/ontology/mo/> " | 851 haveAudio = true; |
| 1029 " SELECT ?url FROM <%1> " | 852 break; |
| 1030 " WHERE { ?signal a mo:Signal ; mo:available_as ?url } " | 853 } |
| 1031 ).arg(url), | 854 } |
| 1032 "url"); | 855 } |
| 1033 | 856 |
| 1034 if (value.type == SimpleSPARQLQuery::URIValue) { | 857 SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " |
| 1035 haveAudio = true; | 858 << haveAudio << endl; |
| 1036 } | 859 |
| 1037 } | 860 // can't call complete() with two Nothing nodes |
| 1038 | 861 n = store->matchOnce(Triple(Node(), store->expand("event:time"), Node())).c; |
| 1039 std::cerr << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " | 862 if (n != Node()) { |
| 1040 << haveAudio << std::endl; | |
| 1041 | |
| 1042 value = | |
| 1043 SimpleSPARQLQuery::singleResultQuery | |
| 1044 (SimpleSPARQLQuery::QueryFromSingleSource, | |
| 1045 QString | |
| 1046 (" PREFIX event: <http://purl.org/NET/c4dm/event.owl#> " | |
| 1047 " SELECT ?thing FROM <%1> " | |
| 1048 " WHERE { ?thing event:time ?time } " | |
| 1049 ).arg(url), | |
| 1050 "thing"); | |
| 1051 | |
| 1052 if (value.type == SimpleSPARQLQuery::URIValue) { | |
| 1053 haveAnnotations = true; | 863 haveAnnotations = true; |
| 1054 } | 864 } |
| 1055 | 865 |
| 1056 if (!haveAnnotations) { | 866 if (!haveAnnotations) { |
| 1057 | 867 // can't call complete() with two Nothing nodes |
| 1058 value = | 868 n = store->matchOnce(Triple(Node(), store->expand("af:signal_feature"), Node())).c; |
| 1059 SimpleSPARQLQuery::singleResultQuery | 869 if (n != Node()) { |
| 1060 (SimpleSPARQLQuery::QueryFromSingleSource, | |
| 1061 QString | |
| 1062 (" PREFIX af: <http://purl.org/ontology/af/> " | |
| 1063 " SELECT ?thing FROM <%1> " | |
| 1064 " WHERE { ?signal af:signal_feature ?thing } " | |
| 1065 ).arg(url), | |
| 1066 "thing"); | |
| 1067 | |
| 1068 if (value.type == SimpleSPARQLQuery::URIValue) { | |
| 1069 haveAnnotations = true; | 870 haveAnnotations = true; |
| 1070 } | 871 } |
| 1071 } | 872 } |
| 1072 | 873 |
| 1073 std::cerr << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " | 874 SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " |
| 1074 << haveAnnotations << std::endl; | 875 << haveAnnotations << endl; |
| 1075 | 876 |
| 1076 SimpleSPARQLQuery::closeSingleSource(url); | 877 delete store; |
| 1077 | 878 |
| 1078 if (haveAudio) { | 879 if (haveAudio) { |
| 1079 if (haveAnnotations) { | 880 if (haveAnnotations) { |
| 1080 return AudioRefAndAnnotations; | 881 return AudioRefAndAnnotations; |
| 1081 } else { | 882 } else { |
| 1090 } | 891 } |
| 1091 | 892 |
| 1092 return OtherRDFDocument; | 893 return OtherRDFDocument; |
| 1093 } | 894 } |
| 1094 | 895 |
| 1095 void | |
| 1096 RDFImporterImpl::loadPrefixes(ProgressReporter *reporter) | |
| 1097 { | |
| 1098 return; | |
| 1099 //!!! | |
| 1100 if (m_prefixesLoaded) return; | |
| 1101 const char *prefixes[] = { | |
| 1102 "http://purl.org/NET/c4dm/event.owl", | |
| 1103 "http://purl.org/NET/c4dm/timeline.owl", | |
| 1104 "http://purl.org/ontology/mo/", | |
| 1105 "http://purl.org/ontology/af/", | |
| 1106 "http://www.w3.org/2000/01/rdf-schema", | |
| 1107 "http://purl.org/dc/elements/1.1/", | |
| 1108 }; | |
| 1109 for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { | |
| 1110 CachedFile cf(prefixes[i], reporter, "application/rdf+xml"); | |
| 1111 if (!cf.isOK()) continue; | |
| 1112 SimpleSPARQLQuery::addSourceToModel | |
| 1113 (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); | |
| 1114 } | |
| 1115 m_prefixesLoaded = true; | |
| 1116 } |
