diff dml-cla/python/rdf_wrapper.py @ 0:718306e29690 tip

commiting public release
author Daniel Wolff
date Tue, 09 Feb 2016 21:05:06 +0100
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dml-cla/python/rdf_wrapper.py	Tue Feb 09 21:05:06 2016 +0100
@@ -0,0 +1,223 @@
+# Part of DML (Digital Music Laboratory)
+# Copyright 2014-2015 Steven Hargreaves
+ 
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+# 
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+# 
+# You should have received a copy of the GNU General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+__author__="hargreavess"
+
+import ConfigParser
+import logging
+import sys
+import os
+import time
+import shutil
+import argparse
+from os import walk
+import rdflib
+from rdflib import Graph, Namespace, BNode, Literal, RDF, RDFS
+from RDFClosure import DeductiveClosure, OWLRL_Semantics
+import transforms.keyTonicHistogram
+import transforms.tuningFrequencyStatistics
+import transforms.semitoneHistogram
+import math
+
+cla = Namespace("http://dml.org/cla#")
+
+def print_status(msg):
+    sys.stderr.write(msg+'\n')
+
+def main():
+
+    # get config
+    # print_status("Reading configuration...")
+    # config = ConfigParser.ConfigParser()
+    # config.read('dml-analyser.cfg')
+
+    # parse dmlcla ontolgy and input graph
+    input_graph = Graph()
+    # print_status("Reading DML CLA ontology...")
+    # input_graph.parse(config.get('Ontology', 'dmlclaOntology_URI'), format="n3")
+    # print_status("Reading input triples...")
+    input_graph.parse(sys.stdin, format="n3")
+    # print_status("------")
+    # input_graph.serialize(destination=sys.stderr,format='n3')
+    # print_status("------")
+    # print_status("Forward chaining OWL entailments...")
+    # DeductiveClosure(OWLRL_Semantics).expand(input_graph)
+
+    # initialise output rdf graph
+    # bnode = BNode()
+    # print_status("Building output graph...")
+    output_graph = Graph()
+    output_graph.bind("dmlcla",cla)
+
+    for comp in input_graph.subjects(cla.function,None):
+        func= input_graph.value(comp, cla.function)
+        inp = input_graph.value(comp, cla.input)
+        print_status("Got computation %s: %s(%s)" % (comp,func,inp))
+        fn = eval(func)
+        output = fn(inp.value)
+        print_status("Result is %s" % output)
+        output_graph.set((comp,cla.output,Literal(output)))
+    # comps = input_graph.query(
+    #     """prefix cla:     <http://dml.org/cla#>
+    #         SELECT ?comp ?function ?input
+    #         WHERE {
+    #             ?comp cla:function ?function .
+    #             ?comp cla:input ?input
+    #         }""")
+
+    # for row in comps:
+    #     print_status("Got computation %s: %s(%s)" % (row.comp,row.function,row.input))
+    #     fn = eval(row.function)
+    #     output = fn(row.input)
+    #     print_state("Result is %s" % output)
+
+
+    # # Determine which transforms are to be applied, and
+    # # the associated input files
+    # transforms = find_transforms_in_n3(input_graph)
+
+    # # Apply the transform(s) to each file and create 
+    # # rdf results graph
+    # execute_transforms(transforms, output_graph)
+
+    # # Write output rdf to stdout
+    # print_status("Writing output triples...")
+    output_graph.serialize(destination=sys.stdout,format='n3')
+
+# # Loop through all transforms, process the corresponding
+# # input files appropriately and add the (RDF) result to output_graph
+# def execute_transforms(transforms, output_graph):
+
+#     transform_iter = transforms.iterkeys()
+#     key_histogram = []
+
+#     for (transform, transform_type) in transforms:
+
+#         input_f_files = transforms.get((transform, transform_type))
+
+#         # Add additional clauses to this if statement
+#         # for each transform type
+#         if transform_type == rdflib.term.URIRef(u'http://dml.org/dml/cla#CollectionLevelKeyTonic'):
+
+#             transforms.keyTonicHistogram.run(transform,input_f_files, output_graph)
+            
+#         elif transform_type == rdflib.term.URIRef(u'http://dml.org/dml/cla#CollectionLevelTuningFrequencyStatistics'):
+
+#             transforms.tuningFrequencyStatistics.run(transform,input_f_files, output_graph)
+
+#         elif transform_type == rdflib.term.URIRef(u'http://dml.org/dml/cla#CollectionLevelSemitone'):
+#             transforms.semitoneHistogram.run(transform, input_f_files, output_graph)
+
+
+# # Find all transforms, and their associated input files,
+# # from rdf_graph
+# def find_transforms_in_n3(rdf_graph):
+
+#     q1 = rdf_graph.query(
+#         """prefix dml:     <http://dml.org/dml/cla#>
+#             SELECT ?comp ?function
+#             WHERE {
+#                 ?comp a dml:Computation .
+#                 ?comp dml:function ?function .
+#             }""")
+
+#     for row in q1:
+
+#         inputs = rdf_graph.query(
+#             """prefix dml:     <http://dml.org/dml/cla#>
+#                 SELECT ?input 
+#                 WHERE {
+#                     ?comp dml:input ?input .
+#                 }""")
+
+#     computations = dict()
+    
+#     for row in qres:
+
+#         comp = row.comp
+#         input = row.input
+#         transform_type = row.transform_type
+
+#         if transforms.has_key((transform_bnode, transform_type)):
+
+#             transform_key = transforms.get((transform_bnode, transform_type))
+#             transform_key.append(dml_input)
+
+#         else:
+
+#             transforms[(transform_bnode, transform_type)] = [dml_input]
+
+#     return transforms
+
+# # Determine the mapping between feature file URIs and
+# # their source audio file URIs
+# def map_audio_to_feature_files():    
+
+#     # Loop through audio files
+#     lines = [line.strip() for line in args.audio_files]
+
+#     for audio_file in lines:
+
+#         print "sonic-annotator -T " + args.transforms + " --rdf-basedir " + args.basedir + " <" + audio_file + ">"
+
+#     audio_to_feature_file_dict = dict()
+
+#     for (dirpath, dirnames, filenames) in walk(args.basedir):
+#         for file in filenames:
+
+#             print "found file: " + file
+
+#             if file.endswith(".n3"):
+
+#                 print "found n3 file: " + file
+
+#                 # open and parse n3 file
+#                 rdf_graph = Graph()
+#                 rdf_graph.parse(os.path.join(dirpath, file), format="n3")
+
+#                 # find subject in ?subject a mo:AudioFile
+#                 qres = rdf_graph.query(
+#                     """SELECT ?audio_file
+#                        WHERE {
+#                           ?audio_file a mo:AudioFile .
+#                        }""")
+
+#                 print len(qres)
+
+#                 for row in qres:
+
+#                     print("audio file URI is %s" % row.audio_file.n3())
+#                     print("feature file URI is %s" % os.path.join(os.getcwd(), dirpath, file))
+#                     audio_to_feature_file_dict[row.audio_file.n3()] = os.path.join(os.getcwd(), dirpath, file)
+
+#                 # add full file URI, subject to dict
+
+#     print audio_to_feature_file_dict
+
+if __name__ == "__main__":
+
+    # parser = argparse.ArgumentParser()
+
+    # # parser.add_argument("-T", "--transforms", help="the URI of an n3 (RDF) file describing one or more transforms, and the files to which they should be applied")
+    # parser.add_argument("-b", "--basedir", help="the URI of the base output directory")
+
+    # args = parser.parse_args()
+
+    main()
+