# HG changeset patch # User Chris Cannam # Date 1337954301 -3600 # Node ID 51d3b8e816b82ba7247466400859d34615cafef5 # Parent 66c3f4e060e965fee1bfd6f14910617c6c23c8f9# Parent 547b0353337576bc07ffc65649e8aa8b3dd360c8 Merge from branch "dataquay". Next release will require Dataquay v0.9 diff -r 66c3f4e060e9 -r 51d3b8e816b8 configure --- a/configure Wed May 23 17:17:17 2012 +0100 +++ b/configure Fri May 25 14:58:21 2012 +0100 @@ -1,13 +1,11 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.68 for SVcore 1.8. +# Generated by GNU Autoconf 2.69 for SVcore 1.8. # # Report bugs to . # # -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software -# Foundation, Inc. +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation @@ -136,6 +134,31 @@ # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh @@ -169,7 +192,8 @@ else exitcode=1; echo positional parameters were not saved. fi -test x\$exitcode = x0 || exit 1" +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && @@ -214,21 +238,25 @@ if test "x$CONFIG_SHELL" != x; then : - # We cannot yet assume a decent shell, so we have to provide a - # neutralization value for shells without unset; and this also - # works around shells that cannot unset nonexistent variables. - # Preserve -v and -x to the replacement shell. - BASH_ENV=/dev/null - ENV=/dev/null - (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV - export CONFIG_SHELL - case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; - esac - exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 fi if test x$as_have_required = xno; then : @@ -331,6 +359,14 @@ } # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take @@ -452,6 +488,10 @@ chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). @@ -486,16 +526,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -507,28 +547,8 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -631,10 +651,8 @@ portaudio_2_0_CFLAGS liblo_LIBS liblo_CFLAGS -redland_LIBS -redland_CFLAGS -rasqal_LIBS -rasqal_CFLAGS +dataquay_LIBS +dataquay_CFLAGS rubberband_LIBS rubberband_CFLAGS vamphostsdk_LIBS @@ -746,10 +764,8 @@ vamphostsdk_LIBS rubberband_CFLAGS rubberband_LIBS -rasqal_CFLAGS -rasqal_LIBS -redland_CFLAGS -redland_LIBS +dataquay_CFLAGS +dataquay_LIBS liblo_CFLAGS liblo_LIBS portaudio_2_0_CFLAGS @@ -1223,8 +1239,6 @@ if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe - $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used" >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi @@ -1423,13 +1437,10 @@ C compiler flags for rubberband, overriding pkg-config rubberband_LIBS linker flags for rubberband, overriding pkg-config - rasqal_CFLAGS - C compiler flags for rasqal, overriding pkg-config - rasqal_LIBS linker flags for rasqal, overriding pkg-config - redland_CFLAGS - C compiler flags for redland, overriding pkg-config - redland_LIBS - linker flags for redland, overriding pkg-config + dataquay_CFLAGS + C compiler flags for dataquay, overriding pkg-config + dataquay_LIBS + linker flags for dataquay, overriding pkg-config liblo_CFLAGS C compiler flags for liblo, overriding pkg-config liblo_LIBS linker flags for liblo, overriding pkg-config @@ -1524,9 +1535,9 @@ if $ac_init_version; then cat <<\_ACEOF SVcore configure 1.8 -generated by GNU Autoconf 2.68 - -Copyright (C) 2010 Free Software Foundation, Inc. +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF @@ -1841,7 +1852,7 @@ test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext + test -x conftest$ac_exeext }; then : ac_retval=0 else @@ -1864,7 +1875,7 @@ running configure, to aid debugging if configure makes a mistake. It was created by SVcore $as_me 1.8, which was -generated by GNU Autoconf 2.68. Invocation command line was +generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2258,7 +2269,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2298,7 +2309,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2351,7 +2362,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2392,7 +2403,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue @@ -2450,7 +2461,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2494,7 +2505,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2940,8 +2951,7 @@ /* end confdefs.h. */ #include #include -#include -#include +struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); @@ -3054,7 +3064,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3098,7 +3108,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3349,7 +3359,7 @@ # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. @@ -3418,7 +3428,7 @@ test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ @@ -3599,7 +3609,7 @@ for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue + as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in @@ -3665,7 +3675,7 @@ for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue + as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in @@ -3858,7 +3868,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3901,7 +3911,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3972,7 +3982,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_QMAKE="$QTDIR/bin/qmake-qt4" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4011,7 +4021,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_QMAKE="$QTDIR/bin/qmake" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4050,7 +4060,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_QMAKE="$QTDIR/bin/qmake.exe" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4089,7 +4099,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_QMAKE="qmake-qt4" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4128,7 +4138,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_QMAKE="qmake" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5157,10 +5167,10 @@ SV_MODULE_MODULE=vamphostsdk -SV_MODULE_VERSION_TEST="vamp-hostsdk >= 2.0" +SV_MODULE_VERSION_TEST="vamp-hostsdk >= 2.3.1" SV_MODULE_HEADER=vamp-hostsdk/PluginLoader.h -SV_MODULE_LIB= -SV_MODULE_FUNC= +SV_MODULE_LIB=vamp-hostsdk +SV_MODULE_FUNC=libvamphostsdk_v_2_3_1_present SV_MODULE_HAVE=HAVE_$(echo vamphostsdk | tr 'a-z' 'A-Z') SV_MODULE_FAILED=1 if test -n "$vamphostsdk_LIBS" ; then @@ -5458,18 +5468,18 @@ fi -SV_MODULE_MODULE=rasqal -SV_MODULE_VERSION_TEST="rasqal >= 0.9.19" -SV_MODULE_HEADER=rasqal/rasqal.h -SV_MODULE_LIB=rasqal -SV_MODULE_FUNC=rasqal_new_world -SV_MODULE_HAVE=HAVE_$(echo rasqal | tr 'a-z' 'A-Z') +SV_MODULE_MODULE=dataquay +SV_MODULE_VERSION_TEST="dataquay >= 0.9" +SV_MODULE_HEADER=dataquay/Uri.h +SV_MODULE_LIB=dataquay +SV_MODULE_FUNC= +SV_MODULE_HAVE=HAVE_$(echo dataquay | tr 'a-z' 'A-Z') SV_MODULE_FAILED=1 -if test -n "$rasqal_LIBS" ; then +if test -n "$dataquay_LIBS" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 $as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} - CXXFLAGS="$CXXFLAGS $rasqal_CFLAGS" - LIBS="$LIBS $rasqal_LIBS" + CXXFLAGS="$CXXFLAGS $dataquay_CFLAGS" + LIBS="$LIBS $dataquay_LIBS" SV_MODULE_FAILED="" fi if test -z "$SV_MODULE_VERSION_TEST" ; then @@ -5478,11 +5488,11 @@ if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then pkg_failed=no -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for rasqal" >&5 -$as_echo_n "checking for rasqal... " >&6; } - -if test -n "$rasqal_CFLAGS"; then - pkg_cv_rasqal_CFLAGS="$rasqal_CFLAGS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for dataquay" >&5 +$as_echo_n "checking for dataquay... " >&6; } + +if test -n "$dataquay_CFLAGS"; then + pkg_cv_dataquay_CFLAGS="$dataquay_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 @@ -5490,7 +5500,7 @@ ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_rasqal_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + pkg_cv_dataquay_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -5498,8 +5508,8 @@ else pkg_failed=untried fi -if test -n "$rasqal_LIBS"; then - pkg_cv_rasqal_LIBS="$rasqal_LIBS" +if test -n "$dataquay_LIBS"; then + pkg_cv_dataquay_LIBS="$dataquay_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 @@ -5507,7 +5517,7 @@ ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_rasqal_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + pkg_cv_dataquay_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -5528,12 +5538,12 @@ _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - rasqal_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + dataquay_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` else - rasqal_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + dataquay_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` fi # Put the nasty error message in config.log where it belongs - echo "$rasqal_PKG_ERRORS" >&5 + echo "$dataquay_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 $as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} @@ -5543,162 +5553,11 @@ { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 $as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} else - rasqal_CFLAGS=$pkg_cv_rasqal_CFLAGS - rasqal_LIBS=$pkg_cv_rasqal_LIBS + dataquay_CFLAGS=$pkg_cv_dataquay_CFLAGS + dataquay_LIBS=$pkg_cv_dataquay_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } - HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $rasqal_CFLAGS";LIBS="$LIBS $rasqal_LIBS";SV_MODULE_FAILED="" -fi -fi -if test -n "$SV_MODULE_FAILED"; then - as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` -ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - HAVES="$HAVES $SV_MODULE_HAVE" -else - as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 -fi - - - if test -n "$SV_MODULE_LIB"; then - as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 -$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } -if eval \${$as_ac_Lib+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-l$SV_MODULE_LIB $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char $SV_MODULE_FUNC (); -int -main () -{ -return $SV_MODULE_FUNC (); - ; - return 0; -} -_ACEOF -if ac_fn_cxx_try_link "$LINENO"; then : - eval "$as_ac_Lib=yes" -else - eval "$as_ac_Lib=no" -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -eval ac_res=\$$as_ac_Lib - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : - LIBS="$LIBS -l$SV_MODULE_LIB" -else - as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 -fi - - fi -fi - - -SV_MODULE_MODULE=redland -SV_MODULE_VERSION_TEST="redland >= 1.0.10" -SV_MODULE_HEADER=librdf.h -SV_MODULE_LIB=rdf -SV_MODULE_FUNC=librdf_new_world -SV_MODULE_HAVE=HAVE_$(echo redland | tr 'a-z' 'A-Z') -SV_MODULE_FAILED=1 -if test -n "$redland_LIBS" ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 -$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} - CXXFLAGS="$CXXFLAGS $redland_CFLAGS" - LIBS="$LIBS $redland_LIBS" - SV_MODULE_FAILED="" -fi -if test -z "$SV_MODULE_VERSION_TEST" ; then - SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE -fi -if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then - -pkg_failed=no -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for redland" >&5 -$as_echo_n "checking for redland... " >&6; } - -if test -n "$redland_CFLAGS"; then - pkg_cv_redland_CFLAGS="$redland_CFLAGS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 - ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - pkg_cv_redland_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` - test "x$?" != "x0" && pkg_failed=yes -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi -if test -n "$redland_LIBS"; then - pkg_cv_redland_LIBS="$redland_LIBS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 - ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - pkg_cv_redland_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` - test "x$?" != "x0" && pkg_failed=yes -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi - - - -if test $pkg_failed = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no -fi - if test $_pkg_short_errors_supported = yes; then - redland_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` - else - redland_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$redland_PKG_ERRORS" >&5 - - { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 -$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} -elif test $pkg_failed = untried; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 -$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} -else - redland_CFLAGS=$pkg_cv_redland_CFLAGS - redland_LIBS=$pkg_cv_redland_LIBS - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $redland_CFLAGS";LIBS="$LIBS $redland_LIBS";SV_MODULE_FAILED="" + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $dataquay_CFLAGS";LIBS="$LIBS $dataquay_LIBS";SV_MODULE_FAILED="" fi fi if test -n "$SV_MODULE_FAILED"; then @@ -7620,16 +7479,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -7689,28 +7548,16 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -7732,7 +7579,7 @@ # values after options handling. ac_log=" This file was extended by SVcore $as_me 1.8, which was -generated by GNU Autoconf 2.68. Invocation command line was +generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS @@ -7785,10 +7632,10 @@ ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ SVcore config.status 1.8 -configured by $0, generated by GNU Autoconf 2.68, +configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" -Copyright (C) 2010 Free Software Foundation, Inc. +Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." @@ -7867,7 +7714,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then - set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' diff -r 66c3f4e060e9 -r 51d3b8e816b8 configure.ac --- a/configure.ac Wed May 23 17:17:17 2012 +0100 +++ b/configure.ac Fri May 25 14:58:21 2012 +0100 @@ -80,10 +80,9 @@ SV_MODULE_REQUIRED([sndfile],[sndfile >= 1.0.16],[sndfile.h],[sndfile],[sf_open]) SV_MODULE_REQUIRED([samplerate],[samplerate >= 0.1.2],[samplerate.h],[samplerate],[src_new]) SV_MODULE_REQUIRED([vamp],[vamp >= 2.1],[vamp/vamp.h],[],[]) -SV_MODULE_REQUIRED([vamphostsdk],[vamp-hostsdk >= 2.0],[vamp-hostsdk/PluginLoader.h],[],[]) +SV_MODULE_REQUIRED([vamphostsdk],[vamp-hostsdk >= 2.3.1],[vamp-hostsdk/PluginLoader.h],[vamp-hostsdk],[libvamphostsdk_v_2_3_1_present]) SV_MODULE_REQUIRED([rubberband],[rubberband],[rubberband/RubberBandStretcher.h],[rubberband],[rubberband_new]) -SV_MODULE_REQUIRED([rasqal],[rasqal >= 0.9.19],[rasqal/rasqal.h],[rasqal],[rasqal_new_world]) -SV_MODULE_REQUIRED([redland],[redland >= 1.0.10],[librdf.h],[rdf],[librdf_new_world]) +SV_MODULE_REQUIRED([dataquay],[dataquay >= 0.9],[dataquay/Uri.h],[dataquay],[]) SV_MODULE_OPTIONAL([liblo],[],[lo/lo.h],[lo],[lo_address_new]) SV_MODULE_OPTIONAL([portaudio_2_0],[portaudio-2.0 >= 19],[portaudio.h],[portaudio],[Pa_IsFormatSupported]) diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/PluginRDFDescription.cpp --- a/rdf/PluginRDFDescription.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/PluginRDFDescription.cpp Fri May 25 14:58:21 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -16,16 +16,24 @@ #include "PluginRDFDescription.h" #include "PluginRDFIndexer.h" -#include "SimpleSPARQLQuery.h" #include "base/Profiler.h" #include "plugin/PluginIdentifier.h" +#include + #include using std::cerr; using std::endl; +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; + PluginRDFDescription::PluginRDFDescription(QString pluginId) : m_pluginId(pluginId), m_haveDescription(false) @@ -172,93 +180,52 @@ { Profiler profiler("PluginRDFDescription::index"); - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + Uri plugin(m_pluginUri); - QString queryTemplate = - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " PREFIX dc: " - " SELECT ?%3 " - " WHERE { " - " <%1> %2 ?%3 . " - " }") - .arg(m_pluginUri); + Node n = index->complete + (Triple(plugin, index->expand("vamp:name"), Node())); - SimpleSPARQLQuery::Value v; - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("vamp:name").arg("name"), "name"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginName = v.value; + if (n.type == Node::Literal && n.value != "") { + m_pluginName = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("dc:description").arg("description"), "description"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginDescription = v.value; + n = index->complete + (Triple(plugin, index->expand("dc:description"), Node())); + + if (n.type == Node::Literal && n.value != "") { + m_pluginDescription = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?name " - " WHERE { " - " <%1> foaf:maker ?maker . " - " ?maker foaf:name ?name . " - " }") - .arg(m_pluginUri), - "name"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginMaker = v.value; + n = index->complete + (Triple(plugin, index->expand("foaf:maker"), Node())); + + if (n.type == Node::URI || n.type == Node::Blank) { + n = index->complete(Triple(n, index->expand("foaf:name"), Node())); + if (n.type == Node::Literal && n.value != "") { + m_pluginMaker = n.value; + } } // If we have a more-information URL for this plugin, then we take - // that. Otherwise, a more-information URL for the plugin - // library would do nicely. Failing that, we could perhaps use - // any foaf:page URL at all that appears in the file -- but - // perhaps that would be unwise + // that. Otherwise, a more-information URL for the plugin library + // would do nicely. - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?page " - " WHERE { " - " <%1> foaf:page ?page . " - " }") - .arg(m_pluginUri), - "page"); + n = index->complete + (Triple(plugin, index->expand("foaf:page"), Node())); - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { + if (n.type == Node::URI && n.value != "") { + m_pluginInfoURL = n.value; + } - m_pluginInfoURL = v.value; + n = index->complete + (Triple(Node(), index->expand("vamp:available_plugin"), plugin)); - } else { - - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?page " - " WHERE { " - " ?library vamp:available_plugin <%1> ; " - " a vamp:PluginLibrary ; " - " foaf:page ?page . " - " }") - .arg(m_pluginUri), - "page"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - - m_pluginInfoURL = v.value; + if (n.value != "") { + n = index->complete(Triple(n, index->expand("foaf:page"), Node())); + if (n.type == Node::URI && n.value != "") { + m_pluginInfoURL = n.value; } } @@ -270,87 +237,43 @@ { Profiler profiler("PluginRDFDescription::indexOutputs"); - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + Uri plugin(m_pluginUri); - SimpleSPARQLQuery query - (m, - QString - ( - " PREFIX vamp: " + Nodes outputs = index->match + (Triple(plugin, index->expand("vamp:output"), Node())).objects(); - " SELECT ?output " - - " WHERE { " - " <%1> vamp:output ?output . " - " } " - ) - .arg(m_pluginUri)); - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - cerr << "ERROR: PluginRDFDescription::index: ERROR: Failed to query outputs for <" - << m_pluginUri << ">: " - << query.getErrorString() << endl; - return false; - } - - if (results.empty()) { + if (outputs.empty()) { cerr << "ERROR: PluginRDFDescription::indexURL: NOTE: No outputs defined for <" << m_pluginUri << ">" << endl; return false; } - // Note that an output may appear more than once, if it inherits - // more than one type (e.g. DenseOutput and QuantizedOutput). So - // these results must accumulate + foreach (Node output, outputs) { - for (int i = 0; i < results.size(); ++i) { - - if (results[i]["output"].type != SimpleSPARQLQuery::URIValue || - results[i]["output"].value == "") { - cerr << "ERROR: PluginRDFDescription::indexURL: No valid URI for output " << i << " of plugin <" << m_pluginUri << ">" << endl; + if ((output.type != Node::URI && output.type != Node::Blank) || + output.value == "") { + cerr << "ERROR: PluginRDFDescription::indexURL: No valid URI for output " << output << " of plugin <" << m_pluginUri << ">" << endl; return false; } - - QString outputUri = results[i]["output"].value; - - SimpleSPARQLQuery::Value v; - - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString(" PREFIX vamp: " - " SELECT ?output_id " - " WHERE { <%1> vamp:identifier ?output_id } ") - .arg(outputUri), "output_id"); - - if (v.type != SimpleSPARQLQuery::LiteralValue || v.value == "") { - cerr << "ERROR: PluginRDFDescription::indexURL: No identifier for output <" << outputUri << ">" << endl; + + Node n = index->complete(Triple(output, index->expand("vamp:identifier"), Node())); + if (n.type != Node::Literal || n.value == "") { + cerr << "ERROR: PluginRDFDescription::indexURL: No vamp:identifier for output <" << output << ">" << endl; return false; } - QString outputId = v.value; + QString outputId = n.value; - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString(" PREFIX vamp: " - " SELECT ?output_type " - " WHERE { <%1> a ?output_type } ") - .arg(outputUri), "output_type"); + m_outputUriMap[outputId] = output.value; + n = index->complete(Triple(output, Uri("a"), Node())); QString outputType; - if (v.type == SimpleSPARQLQuery::URIValue) outputType = v.value; + if (n.type == Node::URI) outputType = n.value; - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString(" PREFIX vamp: " - " SELECT ?unit " - " WHERE { <%1> vamp:unit ?unit } ") - .arg(outputUri), "unit"); - + n = index->complete(Triple(output, index->expand("vamp:unit"), Node())); QString outputUnit; - if (v.type == SimpleSPARQLQuery::LiteralValue) outputUnit = v.value; - - m_outputUriMap[outputId] = outputUri; + if (n.type == Node::Literal) outputUnit = n.value; if (outputType.contains("DenseOutput")) { m_outputDispositions[outputId] = OutputDense; @@ -361,48 +284,32 @@ } else { m_outputDispositions[outputId] = OutputDispositionUnknown; } +// cerr << "output " << output << " -> id " << outputId << ", type " << outputType << ", unit " +// << outputUnit << ", disposition " << m_outputDispositions[outputId] << endl; if (outputUnit != "") { m_outputUnitMap[outputId] = outputUnit; } - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString(" PREFIX vamp: " - " PREFIX dc: " - " SELECT ?title " - " WHERE { <%1> dc:title ?title } ") - .arg(outputUri), "title"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_outputNames[outputId] = v.value; + n = index->complete(Triple(output, index->expand("dc:title"), Node())); + if (n.type == Node::Literal && n.value != "") { + m_outputNames[outputId] = n.value; } - QString queryTemplate = - QString(" PREFIX vamp: " - " SELECT ?%2 " - " WHERE { <%1> vamp:computes_%2 ?%2 } ") - .arg(outputUri); - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("event_type"), "event_type"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputEventTypeURIMap[outputId] = v.value; + n = index->complete(Triple(output, index->expand("vamp:computes_event_type"), Node())); +// cerr << output << " -> computes_event_type " << n << endl; + if (n.type == Node::URI && n.value != "") { + m_outputEventTypeURIMap[outputId] = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("feature"), "feature"); + n = index->complete(Triple(output, index->expand("vamp:computes_feature"), Node())); + if (n.type == Node::URI && n.value != "") { + m_outputFeatureAttributeURIMap[outputId] = n.value; + } - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputFeatureAttributeURIMap[outputId] = v.value; - } - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("signal_type"), "signal_type"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputSignalTypeURIMap[outputId] = v.value; + n = index->complete(Triple(output, index->expand("vamp:computes_signal_type"), Node())); + if (n.type == Node::URI && n.value != "") { + m_outputSignalTypeURIMap[outputId] = n.value; } } diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/PluginRDFIndexer.cpp --- a/rdf/PluginRDFIndexer.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/PluginRDFIndexer.cpp Fri May 25 14:58:21 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -15,8 +15,6 @@ #include "PluginRDFIndexer.h" -#include "SimpleSPARQLQuery.h" - #include "data/fileio/CachedFile.h" #include "data/fileio/FileSource.h" #include "data/fileio/PlaylistFileReader.h" @@ -26,6 +24,9 @@ #include +#include +#include + #include #include #include @@ -40,12 +41,18 @@ using std::string; using Vamp::PluginHostAdapter; +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::RDFException; +using Dataquay::RDFDuplicateImportException; + PluginRDFIndexer * PluginRDFIndexer::m_instance = 0; -bool -PluginRDFIndexer::m_prefixesLoaded = false; - PluginRDFIndexer * PluginRDFIndexer::getInstance() { @@ -53,11 +60,21 @@ return m_instance; } -PluginRDFIndexer::PluginRDFIndexer() +PluginRDFIndexer::PluginRDFIndexer() : + m_index(new Dataquay::BasicStore) { + m_index->addPrefix("vamp", Uri("http://purl.org/ontology/vamp/")); + m_index->addPrefix("foaf", Uri("http://xmlns.com/foaf/0.1/")); + m_index->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); indexInstalledURLs(); } +const BasicStore * +PluginRDFIndexer::getIndex() +{ + return m_index; +} + PluginRDFIndexer::~PluginRDFIndexer() { QMutexLocker locker(&m_mutex); @@ -68,16 +85,20 @@ { vector paths = PluginHostAdapter::getPluginPath(); +// std::cerr << "\nPluginRDFIndexer::indexInstalledURLs: pid is " << getpid() << std::endl; + QStringList filters; + filters << "*.ttl"; + filters << "*.TTL"; filters << "*.n3"; filters << "*.N3"; filters << "*.rdf"; filters << "*.RDF"; - // Search each Vamp plugin path for a .rdf file that either has + // Search each Vamp plugin path for an RDF file that either has // name "soname", "soname:label" or "soname/label" plus RDF - // extension. Use that order of preference, and prefer n3 over - // rdf extension. + // extension. Use that order of preference, and prefer ttl over + // n3 over rdf extension. for (vector::const_iterator i = paths.begin(); i != paths.end(); ++i) { @@ -241,13 +262,11 @@ { Profiler profiler("PluginRDFIndexer::indexURL"); - loadPrefixes(); - -// SVDEBUG << "PluginRDFIndexer::indexURL(" << urlString << ")" << endl; +// std::cerr << "PluginRDFIndexer::indexURL(" << urlString.toStdString() << ")" << std::endl; QMutexLocker locker(&m_mutex); - QString localString = urlString; + QUrl local = urlString; if (FileSource::isRemote(urlString) && FileSource::canHandleScheme(urlString)) { @@ -257,91 +276,82 @@ return false; } - localString = QUrl::fromLocalFile(cf.getLocalFilename()).toString(); + local = QUrl::fromLocalFile(cf.getLocalFilename()); + + } else if (urlString.startsWith("file:")) { + + local = QUrl(urlString); + + } else { + + local = QUrl::fromLocalFile(urlString); } - return SimpleSPARQLQuery::addSourceToModel(localString); + try { + m_index->import(local, BasicStore::ImportFailOnDuplicates); + } catch (RDFDuplicateImportException &e) { + cerr << e.what() << endl; + cerr << "PluginRDFIndexer::pullURL: Document at " << urlString + << " duplicates triples found in earlier loaded document -- skipping it" << endl; + return false; + } catch (RDFException &e) { + cerr << e.what() << endl; + cerr << "PluginRDFIndexer::pullURL: Failed to import document from " + << urlString << ": " << e.what() << endl; + return false; + } + return true; } bool PluginRDFIndexer::reindex() { - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; - - SimpleSPARQLQuery query - (m, - QString - ( - " PREFIX vamp: " - - " SELECT ?plugin ?library ?plugin_id " - - " WHERE { " - " ?plugin a vamp:Plugin . " - " ?plugin vamp:identifier ?plugin_id . " - - " OPTIONAL { " - " ?library vamp:available_plugin ?plugin " - " } " - " } " - )); - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - cerr << "ERROR: PluginRDFIndexer::reindex: ERROR: Failed to query plugins from model: " - << query.getErrorString() << endl; - return false; - } - - if (results.empty()) { - cerr << "PluginRDFIndexer::reindex: NOTE: no vamp:Plugin resources found in indexed documents" << endl; - return false; - } + Triples tt = m_index->match + (Triple(Node(), Uri("a"), m_index->expand("vamp:Plugin"))); + Nodes plugins = tt.subjects(); bool foundSomething = false; bool addedSomething = false; - for (SimpleSPARQLQuery::ResultList::iterator i = results.begin(); - i != results.end(); ++i) { - - QString pluginUri = (*i)["plugin"].value; - QString soUri = (*i)["library"].value; - QString identifier = (*i)["plugin_id"].value; - - if (identifier == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No vamp:identifier for plugin <" - << pluginUri << ">" - << endl; + foreach (Node plugin, plugins) { + + if (plugin.type != Node::URI) { + cerr << "PluginRDFIndexer::reindex: Plugin has no URI: node is " + << plugin << endl; continue; } - if (soUri == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No implementation library for plugin <" - << pluginUri << ">" - << endl; + + Node idn = m_index->complete + (Triple(plugin, m_index->expand("vamp:identifier"), Node())); + + if (idn.type != Node::Literal) { + cerr << "PluginRDFIndexer::reindex: Plugin " << plugin + << " lacks vamp:identifier literal" << endl; continue; } - QString sonameQuery = - QString( - " PREFIX vamp: " - " SELECT ?library_id " - " WHERE { " - " <%1> vamp:identifier ?library_id " - " } " - ) - .arg(soUri); + Node libn = m_index->complete + (Triple(Node(), m_index->expand("vamp:available_plugin"), plugin)); - SimpleSPARQLQuery::Value sonameValue = - SimpleSPARQLQuery::singleResultQuery(m, sonameQuery, "library_id"); - QString soname = sonameValue.value; - if (soname == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No identifier for library <" - << soUri << ">" - << endl; + if (libn.type != Node::URI) { + cerr << "PluginRDFIndexer::reindex: Plugin " << plugin + << " is not vamp:available_plugin in any library" << endl; continue; } + Node son = m_index->complete + (Triple(libn, m_index->expand("vamp:identifier"), Node())); + + if (son.type != Node::Literal) { + cerr << "PluginRDFIndexer::reindex: Library " << libn + << " lacks vamp:identifier for soname" << endl; + continue; + } + + QString pluginUri = plugin.value; + QString identifier = idn.value; + QString soname = son.value; + QString pluginId = PluginIdentifier::createIdentifier ("vamp", soname, identifier); @@ -373,23 +383,3 @@ return addedSomething; } - -void -PluginRDFIndexer::loadPrefixes() -{ - return; -//!!! - if (m_prefixesLoaded) return; - const char *prefixes[] = { - "http://purl.org/ontology/vamp/" - }; - for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { - CachedFile cf(prefixes[i], 0, "application/rdf+xml"); - if (!cf.isOK()) continue; - SimpleSPARQLQuery::addSourceToModel - (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); - } - m_prefixesLoaded = true; -} - - diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/PluginRDFIndexer.h --- a/rdf/PluginRDFIndexer.h Wed May 23 17:17:17 2012 +0100 +++ b/rdf/PluginRDFIndexer.h Fri May 25 14:58:21 2012 +0100 @@ -22,6 +22,10 @@ #include #include +namespace Dataquay { + class BasicStore; +} + class PluginRDFIndexer { public: @@ -48,6 +52,8 @@ QStringList getIndexedPluginIds(); + const Dataquay::BasicStore *getIndex(); + ~PluginRDFIndexer(); protected: @@ -64,8 +70,8 @@ bool pullURL(QString urlString); bool reindex(); - static void loadPrefixes(); - static bool m_prefixesLoaded; + Dataquay::BasicStore *m_index; + static PluginRDFIndexer *m_instance; }; diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/RDFExporter.cpp --- a/rdf/RDFExporter.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/RDFExporter.cpp Fri May 25 14:58:21 2012 +0100 @@ -172,6 +172,6 @@ QString RDFExporter::getSupportedExtensions() { - return "*.n3 *.ttl"; + return "*.ttl *.n3"; } diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/RDFFeatureWriter.cpp --- a/rdf/RDFFeatureWriter.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/RDFFeatureWriter.cpp Fri May 25 14:58:21 2012 +0100 @@ -174,7 +174,8 @@ m_startedStreamTransforms[stream].end()) { m_startedStreamTransforms[stream].insert(transform); writeLocalFeatureTypes - (stream, transform, output, m_rdfDescriptions[pluginId]); + (stream, transform, output, m_rdfDescriptions[pluginId], + summaryType); } if (m_singleFileName != "" || m_stdout) { @@ -390,14 +391,15 @@ << " tl:onTimeLine " << timelineURI << "\n ] .\n\n"; - stream << timelineURI << " a tl:Timeline .\n"; + stream << timelineURI << " a tl:Timeline .\n\n"; } void RDFFeatureWriter::writeLocalFeatureTypes(QTextStream *sptr, const Transform &transform, const Plugin::OutputDescriptor &od, - PluginRDFDescription &desc) + PluginRDFDescription &desc, + std::string summaryType) { QString outputId = od.identifier.c_str(); QTextStream &stream = *sptr; @@ -412,7 +414,8 @@ //!!! bin names, extents and so on can be written out using e.g. vamp:bin_names ( "a" "b" "c" ) - if (desc.getOutputDisposition(outputId) == + if (summaryType == "" && + desc.getOutputDisposition(outputId) == PluginRDFDescription::OutputDense) { // no feature events, so may need signal type but won't need diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/RDFFeatureWriter.h --- a/rdf/RDFFeatureWriter.h Wed May 23 17:17:17 2012 +0100 +++ b/rdf/RDFFeatureWriter.h Fri May 25 14:58:21 2012 +0100 @@ -77,7 +77,8 @@ void writeLocalFeatureTypes(QTextStream *, const Transform &, const Vamp::Plugin::OutputDescriptor &, - PluginRDFDescription &); + PluginRDFDescription &, + std::string summaryType); void writeSparseRDF(QTextStream *stream, const Transform &transform, diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/RDFImporter.cpp --- a/rdf/RDFImporter.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/RDFImporter.cpp Fri May 25 14:58:21 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -21,8 +21,6 @@ #include #include -#include "SimpleSPARQLQuery.h" - #include "base/ProgressReporter.h" #include "base/RealTime.h" @@ -38,6 +36,17 @@ #include "data/fileio/CachedFile.h" #include "data/fileio/FileFinder.h" +#include +#include + +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::PropertyObject; + using std::cerr; using std::endl; @@ -55,6 +64,9 @@ std::vector getDataModels(ProgressReporter *); protected: + BasicStore *m_store; + Uri expand(QString s) { return m_store->expand(s); } + QString m_uristring; QString m_errorString; std::map m_audioModelMap; @@ -62,9 +74,6 @@ std::map > m_labelValueMap; - static bool m_prefixesLoaded; - static void loadPrefixes(ProgressReporter *reporter); - void getDataModelsAudio(std::vector &, ProgressReporter *); void getDataModelsSparse(std::vector &, ProgressReporter *); void getDataModelsDense(std::vector &, ProgressReporter *); @@ -78,8 +87,6 @@ void fillModel(Model *, long, long, bool, std::vector &, QString); }; -bool RDFImporterImpl::m_prefixesLoaded = false; - QString RDFImporter::getKnownExtensions() { @@ -121,14 +128,35 @@ } RDFImporterImpl::RDFImporterImpl(QString uri, int sampleRate) : + m_store(new BasicStore), m_uristring(uri), m_sampleRate(sampleRate) { + //!!! retrieve data if remote... then + + m_store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + m_store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + m_store->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); + m_store->addPrefix("tl", Uri("http://purl.org/NET/c4dm/timeline.owl#")); + m_store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + m_store->addPrefix("rdfs", Uri("http://www.w3.org/2000/01/rdf-schema#")); + + try { + QUrl url; + if (uri.startsWith("file:")) { + url = QUrl(uri); + } else { + url = QUrl::fromLocalFile(uri); + } + m_store->import(url, BasicStore::ImportIgnoreDuplicates); + } catch (std::exception &e) { + m_errorString = e.what(); + } } RDFImporterImpl::~RDFImporterImpl() { - SimpleSPARQLQuery::closeSingleSource(m_uristring); + delete m_store; } bool @@ -146,8 +174,6 @@ std::vector RDFImporterImpl::getDataModels(ProgressReporter *reporter) { - loadPrefixes(reporter); - std::vector models; getDataModelsAudio(models, reporter); @@ -185,46 +211,31 @@ RDFImporterImpl::getDataModelsAudio(std::vector &models, ProgressReporter *reporter) { - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?source a mo:AudioFile . " - " ?signal a mo:Signal . " - " ?source mo:encodes ?signal } " - ) - .arg(m_uristring)); + Nodes sigs = m_store->match + (Triple(Node(), Uri("a"), expand("mo:Signal"))).subjects(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sig, sigs) { + + Node file = m_store->complete(Triple(Node(), expand("mo:encodes"), sig)); + if (file == Node()) { + file = m_store->complete(Triple(sig, expand("mo:available_as"), Node())); + } + if (file == Node()) { + std::cerr << "RDFImporterImpl::getDataModelsAudio: ERROR: No source for signal " << sig << std::endl; + continue; + } - if (results.empty()) { + QString signal = sig.value; + QString source = file.value; - SimpleSPARQLQuery query2 - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?source } " - ) - .arg(m_uristring)); - - results = query.execute(); - } - - for (int i = 0; i < (int)results.size(); ++i) { - - QString signal = results[i]["signal"].value; - QString source = results[i]["source"].value; - - SVDEBUG << "NOTE: Seeking signal source \"" << source << "\"..." << endl; + SVDEBUG << "NOTE: Seeking signal source \"" << source + << "\"..." << endl; FileSource *fs = new FileSource(source, reporter); if (fs->isAvailable()) { SVDEBUG << "NOTE: Source is available: Local filename is \"" - << fs->getLocalFilename() << "\"..." << endl; + << fs->getLocalFilename() + << "\"..." << endl; } #ifdef NO_SV_GUI @@ -235,7 +246,8 @@ } #else if (!fs->isAvailable()) { - SVDEBUG << "NOTE: Signal source \"" << source << "\" is not available, using file finder..." << endl; + SVDEBUG << "NOTE: Signal source \"" << source + << "\" is not available, using file finder..." << endl; FileFinder *ff = FileFinder::getInstance(); if (ff) { QString path = ff->find(FileFinder::AudioFile, @@ -284,44 +296,21 @@ reporter->setMessage(RDFImporter::tr("Importing dense signal data from RDF...")); } - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?feature ?feature_signal_type ?value " - " FROM <%1> " - - " WHERE { " - - " ?signal af:signal_feature ?feature . " - - " ?feature a ?feature_signal_type ; " - " af:value ?value . " - - " } " - ) - .arg(m_uristring)); + Nodes sigFeatures = m_store->match + (Triple(Node(), expand("af:signal_feature"), Node())).objects(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sf, sigFeatures) { - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } + if (sf.type != Node::URI && sf.type != Node::Blank) continue; + + Node t = m_store->complete(Triple(sf, expand("a"), Node())); + Node v = m_store->complete(Triple(sf, expand("af:value"), Node())); - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - for (int i = 0; i < (int)results.size(); ++i) { - - QString feature = results[i]["feature"].value; - QString type = results[i]["feature_signal_type"].value; - QString value = results[i]["value"].value; + QString feature = sf.value; + QString type = t.value; + QString value = v.value; + + if (type == "" || value == "") continue; int sampleRate = 0; int windowLength = 0; @@ -407,37 +396,21 @@ QString featureUri, QString featureTypeUri) { - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring); - - SimpleSPARQLQuery::Value v; + Node n = m_store->complete + (Triple(Uri(featureUri), expand("dc:title"), Node())); - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureUri), - "title"); - - if (v.value != "") { - SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << v.value << "\"" << endl; - m->setObjectName(v.value); + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureTypeUri), - "title"); - - if (v.value != "") { - SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << v.value << "\"" << endl; - m->setObjectName(v.value); + n = m_store->complete + (Triple(Uri(featureTypeUri), expand("dc:title"), Node())); + + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } @@ -449,91 +422,61 @@ int &sampleRate, int &windowLength, int &hopSize, int &width, int &height) { - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + Node dim = m_store->complete + (Triple(Uri(featureUri), expand("af:dimensions"), Node())); - QString dimensionsQuery - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?dimensions " - " FROM <%1> " + cerr << "Dimensions = \"" << dim.value << "\"" << endl; - " WHERE { " - - " <%2> af:dimensions ?dimensions . " - - " } " - ); - - SimpleSPARQLQuery::Value dimensionsValue = - SimpleSPARQLQuery::singleResultQuery - (s, dimensionsQuery.arg(m_uristring).arg(featureUri), "dimensions"); - - cerr << "Dimensions = \"" << dimensionsValue.value << "\"" - << endl; - - if (dimensionsValue.value != "") { - QStringList dl = dimensionsValue.value.split(" "); - if (dl.empty()) dl.push_back(dimensionsValue.value); + if (dim.type == Node::Literal && dim.value != "") { + QStringList dl = dim.value.split(" "); + if (dl.empty()) dl.push_back(dim.value); if (dl.size() > 0) height = dl[0].toInt(); if (dl.size() > 1) width = dl[1].toInt(); } + + // Looking for rate, hop, window from: + // + // ?feature mo:time ?time . + // ?time a tl:Interval . + // ?time tl:onTimeLine ?timeline . + // ?map tl:rangeTimeLine ?timeline . + // ?map tl:sampleRate ?rate . + // ?map tl:hopSize ?hop . + // ?map tl:windowLength ?window . - QString queryTemplate - ( - " PREFIX mo: " - " PREFIX af: " - " PREFIX tl: " + Node interval = m_store->complete(Triple(Uri(featureUri), expand("mo:time"), Node())); - " SELECT ?%3 " - " FROM <%1> " - - " WHERE { " - - " <%2> mo:time ?time . " - - " ?time a tl:Interval ; " - " tl:onTimeLine ?timeline . " - - " ?map tl:rangeTimeLine ?timeline . " - - " ?map tl:%3 ?%3 . " - - " } " - ); - - // Another laborious workaround for rasqal's failure to handle - // multiple optionals properly - - SimpleSPARQLQuery::Value srValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("sampleRate"), - "sampleRate"); - if (srValue.value != "") { - sampleRate = srValue.value.toInt(); + if (!m_store->contains(Triple(interval, expand("a"), expand("tl:Interval")))) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Feature time node " + << interval << " is not a tl:Interval" << endl; + return; } - SimpleSPARQLQuery::Value hopValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("hopSize"), - "hopSize"); - if (srValue.value != "") { - hopSize = hopValue.value.toInt(); + Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); + + if (tl == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Interval node " + << interval << " lacks tl:onTimeLine property" << endl; + return; } - SimpleSPARQLQuery::Value winValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("windowLength"), - "windowLength"); - if (winValue.value != "") { - windowLength = winValue.value.toInt(); + Node map = m_store->complete(Triple(Node(), expand("tl:rangeTimeLine"), tl)); + + if (map == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: No map for " + << "timeline node " << tl << endl; + } + + PropertyObject po(m_store, "tl:", map); + + if (po.hasProperty("sampleRate")) { + sampleRate = po.getProperty("sampleRate").toInt(); + } + if (po.hasProperty("hopSize")) { + hopSize = po.getProperty("hopSize").toInt(); + } + if (po.hasProperty("windowLength")) { + windowLength = po.getProperty("windowLength").toInt(); } cerr << "sr = " << sampleRate << ", hop = " << hopSize << ", win = " << windowLength << endl; @@ -547,227 +490,139 @@ reporter->setMessage(RDFImporter::tr("Importing event data from RDF...")); } - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + /* + This function is only used for sparse data (for dense data we + would be in getDataModelsDense instead). - // Our query is intended to retrieve every thing that has a time, - // and every feature type and value associated with a thing that - // has a time. + Our query is intended to retrieve every thing that has a time, + and every feature type and value associated with a thing that + has a time. - // We will then need to refine this big bag of results into a set - // of data models. + We will then need to refine this big bag of results into a set + of data models. - // Results that have different source signals should go into - // different models. + Results that have different source signals should go into + different models. - // Results that have different feature types should go into - // different models. + Results that have different feature types should go into + different models. + */ - // Results that are sparse should go into different models from - // those that are dense (we need to examine the timestamps to - // establish this -- if the timestamps are regular, the results - // are dense -- so we can't do it as we go along, only after - // collecting all results). - - // Timed things that have features associated with them should not - // appear directly in any model -- their features should appear - // instead -- and these should be different models from those used - // for timed things that do not have features. - - // As we load the results, we'll push them into a partially - // structured container that maps from source signal (URI as - // string) -> feature type (likewise) -> time -> list of values. - // If the source signal or feature type is unavailable, the empty - // string will do. - - QString prefixes = QString( - " PREFIX event: " - " PREFIX tl: " - " PREFIX mo: " - " PREFIX af: " - " PREFIX rdfs: " - ); - - QString queryString = prefixes + QString( - - " SELECT ?signal ?timed_thing ?timeline ?event_type ?value" - " FROM <%1>" - - " WHERE {" - - " ?signal a mo:Signal ." - - " ?signal mo:time ?interval ." - " ?interval tl:onTimeLine ?timeline ." - " ?time tl:onTimeLine ?timeline ." - " ?timed_thing event:time ?time ." - " ?timed_thing a ?event_type ." - - " OPTIONAL {" - " ?timed_thing af:feature ?value" - " }" - " }" - - ).arg(m_uristring); - - //!!! NB we're using rather old terminology for these things, apparently: - // beginsAt -> start - // onTimeLine -> timeline - - QString timeQueryString = prefixes + QString( - - " SELECT ?time FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:at ?time . " - " } " - - ).arg(m_uristring); - - QString rangeQueryString = prefixes + QString( - - " SELECT ?time ?duration FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:beginsAt ?time . " - " ?t tl:duration ?duration . " - " } " - - ).arg(m_uristring); - - QString labelQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> rdfs:label ?label . " - " } " - - ).arg(m_uristring); - - QString textQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> af:text ?label . " - " } " - - ).arg(m_uristring); - - SimpleSPARQLQuery query(s, queryString); - query.setProgressReporter(reporter); - -// cerr << "Query will be: " << queryString << endl; - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } - - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - /* - This function is now only used for sparse data (for dense data - we would be in getDataModelsDense instead). - - For sparse data, the determining factors in deciding what model - to use are: Do the features have values? and Do the features - have duration? - - We can run through the results and check off whether we find - values and duration for each of the source+type keys, and then - run through the source+type keys pushing each of the results - into a suitable model. - - Unfortunately, at this point we do not yet have any actual - timing data (time/duration) -- just the time URI. - - What we _could_ do is to create one of each type of model at the - start, for each of the source+type keys, and then push each - feature into the relevant model depending on what we find out - about it. Then return only non-empty models. - */ + Nodes sigs = m_store->match + (Triple(Node(), expand("a"), expand("mo:Signal"))).subjects(); // Map from timeline uri to event type to dimensionality to // presence of duration to model ptr. Whee! std::map > > > modelMap; - for (int i = 0; i < (int)results.size(); ++i) { + foreach (Node sig, sigs) { + + Node interval = m_store->complete(Triple(sig, expand("mo:time"), Node())); + if (interval == Node()) continue; - if (i % 4 == 0) { - if (reporter) reporter->setProgress(i/4); - } + Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); + if (tl == Node()) continue; - QString source = results[i]["signal"].value; - QString timeline = results[i]["timeline"].value; - QString type = results[i]["event_type"].value; - QString thinguri = results[i]["timed_thing"].value; + Nodes times = m_store->match(Triple(Node(), expand("tl:onTimeLine"), tl)).subjects(); - RealTime time; - RealTime duration; + foreach (Node tn, times) { + + Nodes timedThings = m_store->match(Triple(Node(), expand("event:time"), tn)).subjects(); - bool haveTime = false; - bool haveDuration = false; + foreach (Node thing, timedThings) { + + Node typ = m_store->complete(Triple(thing, expand("a"), Node())); + if (typ == Node()) continue; - QString label = ""; - bool text = (type.contains("Text") || type.contains("text")); // Ha, ha - bool note = (type.contains("Note") || type.contains("note")); // Guffaw + Node valu = m_store->complete(Triple(thing, expand("af:feature"), Node())); - if (text) { - label = SimpleSPARQLQuery::singleResultQuery - (s, textQueryString.arg(thinguri), "label").value; - } + QString source = sig.value; + QString timeline = tl.value; + QString type = typ.value; + QString thinguri = thing.value; - if (label == "") { - label = SimpleSPARQLQuery::singleResultQuery - (s, labelQueryString.arg(thinguri), "label").value; - } + /* + For sparse data, the determining factors in deciding + what model to use are: Do the features have values? + and Do the features have duration? - SimpleSPARQLQuery rangeQuery(s, rangeQueryString.arg(thinguri)); - SimpleSPARQLQuery::ResultList rangeResults = rangeQuery.execute(); - if (!rangeResults.empty()) { -// std::cerr << rangeResults.size() << " range results" << std::endl; - time = RealTime::fromXsdDuration - (rangeResults[0]["time"].value.toStdString()); - duration = RealTime::fromXsdDuration - (rangeResults[0]["duration"].value.toStdString()); -// std::cerr << "duration string " << rangeResults[0]["duration"].value << std::endl; - haveTime = true; - haveDuration = true; - } else { - QString timestring = SimpleSPARQLQuery::singleResultQuery - (s, timeQueryString.arg(thinguri), "time").value; -// SVDEBUG << "timestring = " << timestring << endl; - if (timestring != "") { - time = RealTime::fromXsdDuration(timestring.toStdString()); - haveTime = true; - } - } + We can run through the results and check off whether + we find values and duration for each of the + source+type keys, and then run through the + source+type keys pushing each of the results into a + suitable model. - QString valuestring = results[i]["value"].value; - std::vector values; + Unfortunately, at this point we do not yet have any + actual timing data (time/duration) -- just the time + URI. - if (valuestring != "") { - QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); - for (int j = 0; j < vsl.size(); ++j) { - bool success = false; - float v = vsl[j].toFloat(&success); - if (success) values.push_back(v); - } - } + What we _could_ do is to create one of each type of + model at the start, for each of the source+type + keys, and then push each feature into the relevant + model depending on what we find out about it. Then + return only non-empty models. + */ - int dimensions = 1; - if (values.size() == 1) dimensions = 2; - else if (values.size() > 1) dimensions = 3; + QString label = ""; + bool text = (type.contains("Text") || type.contains("text")); // Ha, ha + bool note = (type.contains("Note") || type.contains("note")); // Guffaw - Model *model = 0; + if (text) { + label = m_store->complete(Triple(thing, expand("af:text"), Node())).value; + } + + if (label == "") { + label = m_store->complete(Triple(thing, expand("rdfs:label"), Node())).value; + } - if (modelMap[timeline][type][dimensions].find(haveDuration) == - modelMap[timeline][type][dimensions].end()) { + RealTime time; + RealTime duration; + + bool haveTime = false; + bool haveDuration = false; + + Node at = m_store->complete(Triple(tn, expand("tl:at"), Node())); + + if (at != Node()) { + time = RealTime::fromXsdDuration(at.value.toStdString()); + haveTime = true; + } else { + //!!! NB we're using rather old terminology for these things, apparently: + // beginsAt -> start + // onTimeLine -> timeline + + Node start = m_store->complete(Triple(tn, expand("tl:beginsAt"), Node())); + Node dur = m_store->complete(Triple(tn, expand("tl:duration"), Node())); + if (start != Node() && dur != Node()) { + time = RealTime::fromXsdDuration + (start.value.toStdString()); + duration = RealTime::fromXsdDuration + (dur.value.toStdString()); + haveTime = haveDuration = true; + } + } + + QString valuestring = valu.value; + std::vector values; + + if (valuestring != "") { + QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); + for (int j = 0; j < vsl.size(); ++j) { + bool success = false; + float v = vsl[j].toFloat(&success); + if (success) values.push_back(v); + } + } + + int dimensions = 1; + if (values.size() == 1) dimensions = 2; + else if (values.size() > 1) dimensions = 3; + + Model *model = 0; + + if (modelMap[timeline][type][dimensions].find(haveDuration) == + modelMap[timeline][type][dimensions].end()) { /* SVDEBUG << "Creating new model: source = " << source << ", type = " << type << ", dimensions = " @@ -776,92 +631,69 @@ << endl; */ - if (!haveDuration) { + if (!haveDuration) { - if (dimensions == 1) { + if (dimensions == 1) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + } + } else if (dimensions == 2) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseTimeValueModel(m_sampleRate, 1, false); + } + } else { + // We don't have a three-dimensional sparse model, + // so use a note model. We do have some logic (in + // extractStructure below) for guessing whether + // this should after all have been a dense model, + // but it's hard to apply it because we don't have + // all the necessary timing data yet... hmm + model = new NoteModel(m_sampleRate, 1, false); + } - if (text) { - - model = new TextModel(m_sampleRate, 1, false); + } else { // haveDuration - } else { - - model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + if (note || (dimensions > 2)) { + model = new NoteModel(m_sampleRate, 1, false); + } else { + // If our units are frequency or midi pitch, we + // should be using a note model... hm + model = new RegionModel(m_sampleRate, 1, false); + } } - } else if (dimensions == 2) { + model->setRDFTypeURI(type); - if (text) { - - model = new TextModel(m_sampleRate, 1, false); - - } else { - - model = new SparseTimeValueModel(m_sampleRate, 1, false); + if (m_audioModelMap.find(source) != m_audioModelMap.end()) { + std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; + model->setSourceModel(m_audioModelMap[source]); } - } else { + QString title = m_store->complete + (Triple(typ, expand("dc:title"), Node())).value; + if (title == "") { + // take it from the end of the event type + title = type; + title.replace(QRegExp("^.*[/#]"), ""); + } + model->setObjectName(title); - // We don't have a three-dimensional sparse model, - // so use a note model. We do have some logic (in - // extractStructure below) for guessing whether - // this should after all have been a dense model, - // but it's hard to apply it because we don't have - // all the necessary timing data yet... hmm - - model = new NoteModel(m_sampleRate, 1, false); + modelMap[timeline][type][dimensions][haveDuration] = model; + models.push_back(model); } - } else { // haveDuration + model = modelMap[timeline][type][dimensions][haveDuration]; - if (note || (dimensions > 2)) { - - model = new NoteModel(m_sampleRate, 1, false); - - } else { - - // If our units are frequency or midi pitch, we - // should be using a note model... hm - - model = new RegionModel(m_sampleRate, 1, false); + if (model) { + long ftime = RealTime::realTime2Frame(time, m_sampleRate); + long fduration = RealTime::realTime2Frame(duration, m_sampleRate); + fillModel(model, ftime, fduration, haveDuration, values, label); } } - - model->setRDFTypeURI(type); - - if (m_audioModelMap.find(source) != m_audioModelMap.end()) { - std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; - model->setSourceModel(m_audioModelMap[source]); - } - - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring).arg(type); - QString title = SimpleSPARQLQuery::singleResultQuery - (s, titleQuery, "title").value; - if (title == "") { - // take it from the end of the event type - title = type; - title.replace(QRegExp("^.*[/#]"), ""); - } - model->setObjectName(title); - - modelMap[timeline][type][dimensions][haveDuration] = model; - models.push_back(model); - } - - model = modelMap[timeline][type][dimensions][haveDuration]; - - if (model) { - long ftime = RealTime::realTime2Frame(time, m_sampleRate); - long fduration = RealTime::realTime2Frame(duration, m_sampleRate); - fillModel(model, ftime, fduration, haveDuration, values, label); } } } @@ -975,33 +807,34 @@ { bool haveAudio = false; bool haveAnnotations = false; + bool haveRDF = false; - // This query is not expected to return any values, but if it - // executes successfully (leaving no error in the error string) - // then we know we have RDF - SimpleSPARQLQuery q(SimpleSPARQLQuery::QueryFromSingleSource, - QString(" SELECT ?x FROM <%1> WHERE { ?x } ") - .arg(url)); - - SimpleSPARQLQuery::ResultList r = q.execute(); - if (!q.isOK()) { - SimpleSPARQLQuery::closeSingleSource(url); + BasicStore *store = 0; + + // This is not expected to return anything useful, but if it does + // anything at all then we know we have RDF + try { + //!!! non-local document? + store = BasicStore::load(QUrl(url)); + Triple t = store->matchOnce(Triple()); + if (t != Triple()) haveRDF = true; + } catch (std::exception &e) { + // nothing; haveRDF will be false so the next bit catches it + } + + if (!haveRDF) { + delete store; return NotRDF; } + store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + // "MO-conformant" structure for audio files - SimpleSPARQLQuery::Value value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?url a mo:AudioFile } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + Node n = store->complete(Triple(Node(), Uri("a"), store->expand("mo:AudioFile"))); + if (n != Node() && n.type == Node::URI) { haveAudio = true; @@ -1011,51 +844,29 @@ // (which is not properly in conformance with the Music // Ontology) - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?url } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { - haveAudio = true; + Nodes sigs = store->match(Triple(Node(), Uri("a"), store->expand("mo:Signal"))).subjects(); + foreach (Node sig, sigs) { + Node aa = store->complete(Triple(sig, store->expand("mo:available_as"), Node())); + if (aa != Node()) { + haveAudio = true; + break; + } } } SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " << haveAudio << endl; - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX event: " - " SELECT ?thing FROM <%1> " - " WHERE { ?thing event:time ?time } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + // can't call complete() with two Nothing nodes + n = store->matchOnce(Triple(Node(), store->expand("event:time"), Node())).c; + if (n != Node()) { haveAnnotations = true; } if (!haveAnnotations) { - - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX af: " - " SELECT ?thing FROM <%1> " - " WHERE { ?signal af:signal_feature ?thing } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + // can't call complete() with two Nothing nodes + n = store->matchOnce(Triple(Node(), store->expand("af:signal_feature"), Node())).c; + if (n != Node()) { haveAnnotations = true; } } @@ -1063,7 +874,7 @@ SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " << haveAnnotations << endl; - SimpleSPARQLQuery::closeSingleSource(url); + delete store; if (haveAudio) { if (haveAnnotations) { @@ -1082,25 +893,3 @@ return OtherRDFDocument; } -void -RDFImporterImpl::loadPrefixes(ProgressReporter *reporter) -{ - return; -//!!! - if (m_prefixesLoaded) return; - const char *prefixes[] = { - "http://purl.org/NET/c4dm/event.owl", - "http://purl.org/NET/c4dm/timeline.owl", - "http://purl.org/ontology/mo/", - "http://purl.org/ontology/af/", - "http://www.w3.org/2000/01/rdf-schema", - "http://purl.org/dc/elements/1.1/", - }; - for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { - CachedFile cf(prefixes[i], reporter, "application/rdf+xml"); - if (!cf.isOK()) continue; - SimpleSPARQLQuery::addSourceToModel - (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); - } - m_prefixesLoaded = true; -} diff -r 66c3f4e060e9 -r 51d3b8e816b8 rdf/RDFTransformFactory.cpp --- a/rdf/RDFTransformFactory.cpp Wed May 23 17:17:17 2012 +0100 +++ b/rdf/RDFTransformFactory.cpp Fri May 25 14:58:21 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -24,7 +24,6 @@ #include #include -#include "SimpleSPARQLQuery.h" #include "PluginRDFIndexer.h" #include "PluginRDFDescription.h" #include "base/ProgressReporter.h" @@ -32,10 +31,19 @@ #include "transform/TransformFactory.h" +#include +#include + using std::cerr; using std::endl; -typedef const unsigned char *STR; // redland's expected string type +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::PropertyObject; class RDFTransformFactoryImpl @@ -53,6 +61,7 @@ static QString writeTransformToRDF(const Transform &, QString); protected: + BasicStore *m_store; QString m_urlString; QString m_errorString; bool m_isRDF; @@ -108,14 +117,27 @@ } RDFTransformFactoryImpl::RDFTransformFactoryImpl(QString url) : + m_store(new BasicStore), m_urlString(url), m_isRDF(false) { + //!!! retrieve data if remote... then + m_store->addPrefix("vamp", Uri("http://purl.org/ontology/vamp/")); + try { + QUrl qurl; + if (url.startsWith("file:")) { + qurl = QUrl(url); + } else { + qurl = QUrl::fromLocalFile(url); + } + m_store->import(qurl, BasicStore::ImportIgnoreDuplicates); + m_isRDF = true; + } catch (...) { } } RDFTransformFactoryImpl::~RDFTransformFactoryImpl() { - SimpleSPARQLQuery::closeSingleSource(m_urlString); + delete m_store; } bool @@ -143,54 +165,25 @@ std::map uriTransformMap; - QString query = - " PREFIX vamp: " - - " SELECT ?transform ?plugin " - - " FROM <%2> " - - " WHERE { " - " ?transform a vamp:Transform ; " - " vamp:plugin ?plugin . " - " } "; - - SimpleSPARQLQuery transformsQuery - (SimpleSPARQLQuery::QueryFromSingleSource, query.arg(m_urlString)); - - SimpleSPARQLQuery::ResultList transformResults = transformsQuery.execute(); - - if (!transformsQuery.isOK()) { - m_errorString = transformsQuery.getErrorString(); - return transforms; - } - - m_isRDF = true; - - if (transformResults.empty()) { - SVDEBUG << "RDFTransformFactory: NOTE: No RDF/TTL transform descriptions found in document at <" << m_urlString << ">" << endl; - return transforms; - } - - // There are various queries we need to make that might include - // data from either the transform RDF or the model accumulated - // from plugin descriptions. For example, the transform RDF may - // specify the output's true URI, or it might have a blank node or - // some other URI with the appropriate vamp:identifier included in - // the file. To cover both cases, we need to add the file itself - // into the model and always query the model using the transform - // URI rather than querying the file itself subsequently. - - SimpleSPARQLQuery::addSourceToModel(m_urlString); + Nodes tnodes = m_store->match + (Triple(Node(), Uri("a"), m_store->expand("vamp:Transform"))).subjects(); PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); - for (int i = 0; i < transformResults.size(); ++i) { + foreach (Node tnode, tnodes) { - SimpleSPARQLQuery::KeyValueMap &result = transformResults[i]; + Node pnode = m_store->complete + (Triple(tnode, m_store->expand("vamp:plugin"), Node())); - QString transformUri = result["transform"].value; - QString pluginUri = result["plugin"].value; + if (pnode == Node()) { + cerr << "RDFTransformFactory: WARNING: No vamp:plugin for " + << "vamp:Transform node " << tnode + << ", skipping this transform" << endl; + continue; + } + + QString transformUri = tnode.value; + QString pluginUri = pnode.value; QString pluginId = indexer->getIdForPluginURI(pluginUri); if (pluginId == "") { @@ -214,12 +207,7 @@ uriTransformMap[transformUri] = transform; - // We have to do this a very long way round, to work around - // rasqal's current inability to handle correctly more than one - // OPTIONAL graph in a query - static const char *optionals[] = { - "output", "program", "summary_type", "step_size", @@ -234,63 +222,39 @@ QString optional = optionals[j]; - QString queryTemplate = - " PREFIX vamp: " - - " SELECT ?%1 " - - " WHERE { " - " <%2> vamp:%1 ?%1 " - " } "; - - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromModel, - queryTemplate.arg(optional).arg(transformUri)); - - SimpleSPARQLQuery::ResultList results = query.execute(); + Node onode = m_store->complete + (Triple(Uri(transformUri), + m_store->expand(QString("vamp:") + optional), Node())); - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return transforms; - } + if (onode.type != Node::Literal) continue; - if (results.empty()) continue; - - for (int k = 0; k < results.size(); ++k) { - - const SimpleSPARQLQuery::Value &v = results[k][optional]; - - if (v.type == SimpleSPARQLQuery::LiteralValue) { - - if (optional == "program") { - transform.setProgram(v.value); - } else if (optional == "summary_type") { - transform.setSummaryType - (transform.stringToSummaryType(v.value)); - } else if (optional == "step_size") { - transform.setStepSize(v.value.toUInt()); - } else if (optional == "block_size") { - transform.setBlockSize(v.value.toUInt()); - } else if (optional == "window_type") { - transform.setWindowType - (Window::getTypeForName - (v.value.toLower().toStdString())); - } else if (optional == "sample_rate") { - transform.setSampleRate(v.value.toFloat()); - } else if (optional == "start") { - transform.setStartTime - (RealTime::fromXsdDuration(v.value.toStdString())); - } else if (optional == "duration") { - transform.setDuration - (RealTime::fromXsdDuration(v.value.toStdString())); - } else { - cerr << "RDFTransformFactory: ERROR: Inconsistent optionals lists (unexpected optional \"" << optional << "\"" << endl; - } - } + if (optional == "program") { + transform.setProgram(onode.value); + } else if (optional == "summary_type") { + transform.setSummaryType + (transform.stringToSummaryType(onode.value)); + } else if (optional == "step_size") { + transform.setStepSize(onode.value.toUInt()); + } else if (optional == "block_size") { + transform.setBlockSize(onode.value.toUInt()); + } else if (optional == "window_type") { + transform.setWindowType + (Window::getTypeForName + (onode.value.toLower().toStdString())); + } else if (optional == "sample_rate") { + transform.setSampleRate(onode.value.toFloat()); + } else if (optional == "start") { + transform.setStartTime + (RealTime::fromXsdDuration(onode.value.toStdString())); + } else if (optional == "duration") { + transform.setDuration + (RealTime::fromXsdDuration(onode.value.toStdString())); + } else { + cerr << "RDFTransformFactory: ERROR: Inconsistent optionals lists (unexpected optional \"" << optional << "\"" << endl; } } - SVDEBUG << "RDFTransformFactory: NOTE: Transform is: " << endl; + cerr << "RDFTransformFactory: NOTE: Transform is: " << endl; cerr << transform.toXmlString() << endl; transforms.push_back(transform); @@ -303,33 +267,36 @@ RDFTransformFactoryImpl::setOutput(Transform &transform, QString transformUri) { - SimpleSPARQLQuery::Value outputValue = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromModel, - QString - ( - " PREFIX vamp: " - - " SELECT ?output_id " + Node outputNode = m_store->complete + (Triple(Uri(transformUri), m_store->expand("vamp:output"), Node())); + + if (outputNode == Node()) return true; - " WHERE { " - " <%1> vamp:output ?output . " - " ?output vamp:identifier ?output_id " - " } " - ) - .arg(transformUri), - "output_id"); - - if (outputValue.type == SimpleSPARQLQuery::NoValue) { - return true; + if (outputNode.type != Node::URI && outputNode.type != Node::Blank) { + m_errorString = QString("vamp:output for output of transform <%1> is not a URI or blank node").arg(transformUri); + return false; } - - if (outputValue.type != SimpleSPARQLQuery::LiteralValue) { + + // Now, outputNode might be the subject of a triple within m_store + // that tells us the vamp:identifier, or it might be the subject + // of a triple within the indexer that tells us it + + Node identNode = m_store->complete + (Triple(outputNode, m_store->expand("vamp:identifier"), Node())); + + if (identNode == Node()) { + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + identNode = index->complete + (Triple(outputNode, index->expand("vamp:identifier"), Node())); + } + + if (identNode == Node() || identNode.type != Node::Literal) { m_errorString = QString("No vamp:identifier found for output of transform <%1>, or vamp:identifier is not a literal").arg(transformUri); return false; } - transform.setOutput(outputValue.value); + transform.setOutput(identNode.value); return true; } @@ -339,43 +306,48 @@ RDFTransformFactoryImpl::setParameters(Transform &transform, QString transformUri) { - SimpleSPARQLQuery paramQuery - (SimpleSPARQLQuery::QueryFromModel, - QString - ( - " PREFIX vamp: " - - " SELECT ?param_id ?param_value " - - " WHERE { " - " <%1> vamp:parameter_binding ?binding . " - " ?binding vamp:parameter ?param ; " - " vamp:value ?param_value . " - " ?param vamp:identifier ?param_id " - " } " - ) - .arg(transformUri)); + Nodes bindings = m_store->match + (Triple(Uri(transformUri), m_store->expand("vamp:parameter_binding"), Node())).objects(); - SimpleSPARQLQuery::ResultList paramResults = paramQuery.execute(); - - if (!paramQuery.isOK()) { - m_errorString = paramQuery.getErrorString(); - return false; - } - - if (paramQuery.wasCancelled()) { - m_errorString = "Query cancelled"; - return false; - } - - for (int j = 0; j < paramResults.size(); ++j) { + foreach (Node binding, bindings) { + + Node paramNode = m_store->complete + (Triple(binding, m_store->expand("vamp:parameter"), Node())); + + if (paramNode == Node()) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:parameter for binding " << binding << endl; + continue; + } + + Node valueNode = m_store->complete + (Triple(binding, m_store->expand("vamp:value"), Node())); + + if (paramNode == Node()) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:value for binding " << binding << endl; + continue; + } - QString paramId = paramResults[j]["param_id"].value; - QString paramValue = paramResults[j]["param_value"].value; + // As with output above, paramNode might be the subject of a + // triple within m_store that tells us the vamp:identifier, or + // it might be the subject of a triple within the indexer that + // tells us it + + Node idNode = m_store->complete + (Triple(paramNode, m_store->expand("vamp:identifier"), Node())); + + if (idNode == Node()) { + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + idNode = index->complete + (Triple(paramNode, index->expand("vamp:identifier"), Node())); + } + + if (idNode == Node() || idNode.type != Node::Literal) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:identifier for parameter " << paramNode << endl; + continue; + } - if (paramId == "" || paramValue == "") continue; - - transform.setParameter(paramId, paramValue.toFloat()); + transform.setParameter(idNode.value, valueNode.value.toFloat()); } return true; diff -r 66c3f4e060e9 -r 51d3b8e816b8 svcore.pro --- a/svcore.pro Wed May 23 17:17:17 2012 +0100 +++ b/svcore.pro Fri May 25 14:58:21 2012 +0100 @@ -230,20 +230,18 @@ plugin/api/dssi_alsa_compat.c \ plugin/plugins/SamplePlayer.cpp -HEADERS += rdf/PluginRDFDescription.h \ - rdf/PluginRDFIndexer.h \ +HEADERS += rdf/PluginRDFIndexer.h \ + rdf/PluginRDFDescription.h \ rdf/RDFExporter.h \ rdf/RDFFeatureWriter.h \ rdf/RDFImporter.h \ - rdf/RDFTransformFactory.h \ - rdf/SimpleSPARQLQuery.h -SOURCES += rdf/PluginRDFDescription.cpp \ - rdf/PluginRDFIndexer.cpp \ + rdf/RDFTransformFactory.h +SOURCES += rdf/PluginRDFIndexer.cpp \ + rdf/PluginRDFDescription.cpp \ rdf/RDFExporter.cpp \ rdf/RDFFeatureWriter.cpp \ rdf/RDFImporter.cpp \ - rdf/RDFTransformFactory.cpp \ - rdf/SimpleSPARQLQuery.cpp + rdf/RDFTransformFactory.cpp HEADERS += system/Init.h \ system/System.h