# HG changeset patch # User Chris Cannam # Date 1341139980 -3600 # Node ID c10cb8782576c775397c58332bb1d16c5bb21bbd # Parent 4efa7429cd85b454045587aa924657bca48870dc# Parent aca6e61eaea35a9c3f865a68521820b17cce7d3d Merge from branch "default" diff -r 4efa7429cd85 -r c10cb8782576 .hgignore --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgignore Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,16 @@ +syntax: glob +Makefile +*/Makefile +o/* +*/o/* +*/tmp_obj/* +*/tmp_moc/* +doc/html/ +*.o +*.so +*.so.* +*.a +*.wav +*~ +*.orig +*.rej diff -r 4efa7429cd85 -r c10cb8782576 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,30 @@ +21792a550ec982d3b5977197dec26aa3d628ead6 last-cc-copyright +21f86744d38e829c18637dffbdb351df1efee2cc sv-v1.4 +2fc6f3829f04c5ef0ff171d9b97d0a0a5343f185 sv1-1.0rc1 +40db5491bcf82c7c3294eddc2a83d3efee69e15c sv1-1.0pre2 +40db5491bcf82c7c3294eddc2a83d3efee69e15c sv1-1.0pre3 +4cd620bd4c619a2ddaa5dc1b6c4495c8ab2847c7 sv1-1.0pre1 +4fa2b135acbcdd43d024f32cb4e240f8363c1815 sv-v1.6 +524bcd89743b72caa6c1e78233df00659f9d2ec3 sv1-v1.0 +62789d79b98f21aca19745c134671382c616a1cd sv1-v1.2pre4 +7033e188b2b2fe6dba6887965739e94a2c1a1e08 sv1-1.0pre4 +7207e3eba44f15bb62dc2f4d1c412ce08e3f61d7 sv-v1.4rc1 +7cc6b7b0d8193b256ab9d8d85c792cb58dc0abda sv1-v1.2pre3 +9867f99e0bb7b2cf540e22c483a783304332c67e sv1-v1.2 +b1dc68507e463e65a9dcd6024a83880eb9130279 sv-v1.7.1 +bc4712c7d269c4718d37cf4747a15c1fbcdc768b sv1-v1.2pre5 +c30728d5625c58dae94f65c2a6f54e4512d2ee10 sv1-v0.9rc1 +cff476cfce772d06f660ef73ce9642b1285518e7 sv1-v1.3 +cff476cfce772d06f660ef73ce9642b1285518e7 sv1-v1.3rc1 +dfc4dd561bb6bc3df4126b800b686a0940271a74 sv-v1.5pre1 +eb1b517f5eeb2847374394683dbd835088e84b18 sv-v1.7 +f19437971e17e5acd1aa0b103fb62192c9c287fa sv-v1.7.2 +f9cf4b49b08bea3021486ee5f1179ccab21efd38 sv-v1.5 +fda016f64f7cec6818fe821a010997e7b65d6838 sv1-v0.9rc2 +611a4fa14dde377cc8d9778d10bd2b39f197a835 sv_v1.8 +97fbb6b3e7496c55a99afd3920f86446363dc8fa sv_v1.9 +b1b40fa0cf9cc84486fb4c42433e903ec1fbecd7 sonic-annotator-0.6 +97fbb6b3e7496c55a99afd3920f86446363dc8fa sv_v1.9 +579b2da21e7ad4e661e865ba7104c286fac3d510 sv_v1.9 +579b2da21e7ad4e661e865ba7104c286fac3d510 sv_v1.9 +9a0272c2d596b144ab145466cd8752c196a2016d sv_v1.9 diff -r 4efa7429cd85 -r c10cb8782576 COPYING --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/COPYING Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,280 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 675 Mass Ave, Cambridge, MA 02139, USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Library General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS diff -r 4efa7429cd85 -r c10cb8782576 Doxyfile --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Doxyfile Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,1234 @@ +# Doxyfile 1.4.4 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = svcore + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = 1.9 + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = doc + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Brazilian, Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, +# Dutch, Finnish, French, German, Greek, Hungarian, Italian, Japanese, +# Japanese-en (Japanese with English messages), Korean, Korean-en, Norwegian, +# Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, +# Swedish, and Ukrainian. + +OUTPUT_LANGUAGE = English + +# This tag can be used to specify the encoding used in the generated output. +# The encoding is not always determined by the language that is chosen, +# but also whether or not the output is meant for Windows or non-Windows users. +# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES +# forces the Windows encoding (this is the default for the Windows binary), +# whereas setting the tag to NO uses a Unix-style encoding (the default for +# all platforms other than Windows). + +USE_WINDOWS_ENCODING = NO + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = YES + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used + +FULL_PATH_NAMES = NO + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like the Qt-style comments (thus requiring an +# explicit @brief command for a brief description. + +JAVADOC_AUTOBRIEF = YES + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the DETAILS_AT_TOP tag is set to YES then Doxygen +# will output the detailed description near the top, like JavaDoc. +# If set to NO, the detailed description appears after the member +# documentation. + +DETAILS_AT_TOP = YES + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java sources +# only. Doxygen will then generate output that is more tailored for Java. +# For instance, namespaces will be presented as packages, qualified scopes +# will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = NO + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is YES. + +SHOW_DIRECTORIES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from the +# version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the progam writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = YES + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = docs/doxygen/warning.log + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = base data/fft data/fileio data/midi data/model data/osc plugin plugin/plugins transform rdf system + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm + +FILE_PATTERNS = *.h \ + *.C \ + *.cpp \ + *.cc + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = qrc_*.cpp \ + moc_*.cpp \ + *.moc.cpp \ + *_skel.cpp + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES (the default) +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES (the default) +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +#USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 3 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compressed HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be +# generated containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. + +GENERATE_TREEVIEW = YES + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. This is useful +# if you want to understand what is going on. On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_PREDEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = Q* *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = HAVE_FFTW3F HAVE_FISHSOUND HAVE_JACK HAVE_LIBLO HAVE_LRDF HAVE_MAD HAVE_OGGZ HAVE_PORTAUDIO HAVE_SAMPLERATE HAVE_SNDFILE HAVE_VAMP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = YES + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = NO + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will +# generate a call dependency graph for every global function or class method. +# Note that enabling this option will significantly increase the time of a run. +# So in most cases it will be better to enable call graphs for selected +# functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width +# (in pixels) of the graphs generated by dot. If a graph becomes larger than +# this value, doxygen will try to truncate the graph, so that it fits within +# the specified constraint. Beware that most browsers cannot cope with very +# large images. + +MAX_DOT_GRAPH_WIDTH = 1024 + +# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height +# (in pixels) of the graphs generated by dot. If a graph becomes larger than +# this value, doxygen will try to truncate the graph, so that it fits within +# the specified constraint. Beware that most browsers cannot cope with very +# large images. + +MAX_DOT_GRAPH_HEIGHT = 1024 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that a graph may be further truncated if the graph's +# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH +# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default), +# the graph is not depth-constrained. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, which results in a white background. +# Warning: Depending on the platform used, enabling this option may lead to +# badly anti-aliased labels on the edges of a graph (i.e. they become hard to +# read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff -r 4efa7429cd85 -r c10cb8782576 INSTALL.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/INSTALL.txt Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,53 @@ + +To compile from source +---------------------- + +This file provides various instructions useful when compiling the SV +libraries from source, but it doesn't have a complete recipe for any +one platform -- there are too many variables. However, you can find a +recipe for one platform (Ubuntu Linux) in the file INSTALL.ubuntu. + +To build, run + +$ ./configure && make + +The following additional libraries are required or optional when +building the SV core libraries: + +REQUIRED Qt v4.4 or newer http://qt.nokia.com/ +REQUIRED Vamp Plugin SDK v2.x http://www.vamp-plugins.org/ +REQUIRED Rubber Band Library http://www.breakfastquay.com/rubberband/ +REQUIRED libsndfile http://www.mega-nerd.com/libsndfile/ +REQUIRED libsamplerate http://www.mega-nerd.com/SRC/ +REQUIRED FFTW3 http://www.fftw.org/ +REQUIRED bzip2 library http://www.bzip.org/ +REQUIRED Redland RDF libraries http://librdf.org/ + +Optional MAD mp3 decoder http://www.underbit.com/products/mad/ +Optional Oggz and fishsound http://www.annodex.net/software/libraries.html +Optional liblo OSC library http://www.plugin.org.uk/liblo/ + +The Redland RDF libraries include the Raptor RDF parser library, +Rasqal RDF query library, and librdf, the Redland RDF datastore (which +depends on both of those). The SV libraries require all of these. + +If you are going to build the rest of the SV libraries, you will also +need one or more of: + +Optional JACK http://www.jackaudio.org/ +Optional PortAudio v19 http://www.portaudio.com/ +Optional PulseAudio http://www.pulseaudio.org/ + +Although JACK, PortAudio, and PulseAudio are individually optional, +you will need to have one or the other of them in order to get any +audio playback. Usually JACK is preferred on Linux, with PulseAudio +as a backup, and PortAudio is used elsewhere. + +On Linux, you will need the ALSA libraries (used for MIDI). + +If you happen to be using a Debian-based Linux, you probably want to +apt-get install the following packages: libqt4-dev libsndfile1-dev +libsamplerate0-dev fftw3-dev libbz2-dev libjack-dev libmad0-dev +liboggz1-dev libfishsound1-dev libasound2-dev liblo0-dev liblrdf0-dev +librdf0-dev . + diff -r 4efa7429cd85 -r c10cb8782576 acinclude.m4 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/acinclude.m4 Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,114 @@ + +AC_DEFUN([SV_MODULE_REQUIRED], +[ +SV_MODULE_MODULE=$1 +SV_MODULE_VERSION_TEST="$2" +SV_MODULE_HEADER=$3 +SV_MODULE_LIB=$4 +SV_MODULE_FUNC=$5 +SV_MODULE_HAVE=HAVE_$(echo $1 | tr '[a-z]' '[A-Z]') +SV_MODULE_FAILED=1 +if test -n "$$1_LIBS" ; then + AC_MSG_NOTICE([User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE]) + CXXFLAGS="$CXXFLAGS $$1_CFLAGS" + LIBS="$LIBS $$1_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + PKG_CHECK_MODULES($1,[$SV_MODULE_VERSION_TEST],[HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $$1_CFLAGS";LIBS="$LIBS $$1_LIBS";SV_MODULE_FAILED=""],[AC_MSG_NOTICE([Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means])]) +fi +if test -n "$SV_MODULE_FAILED"; then + AC_CHECK_HEADER([$SV_MODULE_HEADER],[HAVES="$HAVES $SV_MODULE_HAVE"],[AC_MSG_ERROR([Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE])]) + if test -n "$SV_MODULE_LIB"; then + AC_CHECK_LIB([$SV_MODULE_LIB],[$SV_MODULE_FUNC],[LIBS="$LIBS -l$SV_MODULE_LIB"],[AC_MSG_ERROR([Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE])]) + fi +fi +]) + +AC_DEFUN([SV_MODULE_OPTIONAL], +[ +SV_MODULE_MODULE=$1 +SV_MODULE_VERSION_TEST="$2" +SV_MODULE_HEADER=$3 +SV_MODULE_LIB=$4 +SV_MODULE_FUNC=$5 +SV_MODULE_HAVE=HAVE_$(echo $1 | tr '[a-z]' '[A-Z]') +SV_MODULE_FAILED=1 +if test -n "$$1_LIBS" ; then + AC_MSG_NOTICE([User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE]) + CXXFLAGS="$CXXFLAGS $$1_CFLAGS" + LIBS="$LIBS $$1_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + PKG_CHECK_MODULES($1,[$SV_MODULE_VERSION_TEST],[HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $$1_CFLAGS";LIBS="$LIBS $$1_LIBS";SV_MODULE_FAILED=""],[AC_MSG_NOTICE([Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means])]) +fi +if test -n "$SV_MODULE_FAILED"; then + AC_CHECK_HEADER([$SV_MODULE_HEADER],[HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED=""],[AC_MSG_NOTICE([Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE])]) + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + AC_CHECK_LIB([$SV_MODULE_LIB],[$SV_MODULE_FUNC],[LIBS="$LIBS -l$SV_MODULE_LIB"],[AC_MSG_NOTICE([Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE])]) + fi + fi +fi +]) + +# Check for Qt. The only part of Qt we use directly is qmake. + +AC_DEFUN([SV_CHECK_QT], +[ +AC_REQUIRE([AC_PROG_CXX]) + +if test x$QMAKE = x ; then + AC_CHECK_PROG(QMAKE, qmake-qt4, $QTDIR/bin/qmake-qt4,,$QTDIR/bin/) +fi +if test x$QMAKE = x ; then + AC_CHECK_PROG(QMAKE, qmake, $QTDIR/bin/qmake,,$QTDIR/bin/) +fi +if test x$QMAKE = x ; then + AC_CHECK_PROG(QMAKE, qmake.exe, $QTDIR/bin/qmake.exe,,$QTDIR/bin/) +fi +if test x$QMAKE = x ; then + AC_CHECK_PROG(QMAKE, qmake-qt4, qmake-qt4,,$PATH) +fi +if test x$QMAKE = x ; then + AC_CHECK_PROG(QMAKE, qmake, qmake,,$PATH) +fi +if test x$QMAKE = x ; then + AC_MSG_ERROR([ +Failed to find the required qmake-qt4 or qmake program. Please +ensure you have the necessary Qt4 development files installed, and +if necessary set QTDIR to the location of your Qt4 installation. +]) +fi + +# Suitable versions of qmake should print out something like: +# +# QMake version 2.01a +# Using Qt version 4.6.3 in /usr/lib +# +# This may be translated, so we check only for the numbers (2.x and 4.x +# in that order). +# +QMAKE_VERSION_OUTPUT=`$QMAKE -v` +case "$QMAKE_VERSION_OUTPUT" in + *2.*4.*) ;; + *) AC_MSG_WARN([ + *** The version of qmake found in "$QMAKE" looks like it might be + from the wrong version of Qt (Qt4 is required). Please check + that this is the correct version of qmake for Qt4 builds. +]) +esac + +case "`uname`" in + *Darwin*) QMAKE="$QMAKE -spec macx-g++";; +esac + +]) + diff -r 4efa7429cd85 -r c10cb8782576 base/Command.h --- a/base/Command.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/Command.h Sun Jul 01 11:53:00 2012 +0100 @@ -20,6 +20,8 @@ #include #include +#include "Debug.h" + class Command { public: diff -r 4efa7429cd85 -r c10cb8782576 base/Debug.cpp --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/Debug.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,86 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + This file copyright 2010-2011 Chris Cannam and QMUL. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +#include "Debug.h" +#include "ResourceFinder.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +QDebug & +getSVDebug() +{ + static QFile *logFile = 0; + static QDebug *debug = 0; + static QMutex mutex; + static char *prefix; + mutex.lock(); + if (!debug) { + prefix = new char[20]; + sprintf(prefix, "[%lu]", (unsigned long)QCoreApplication::applicationPid()); + QString pfx = ResourceFinder().getUserResourcePrefix(); + QDir logdir(QString("%1/%2").arg(pfx).arg("log")); + if (!logdir.exists()) logdir.mkpath(logdir.path()); + logFile = new QFile(logdir.path() + "/debug.log"); + if (logFile->open(QIODevice::WriteOnly | QIODevice::Truncate)) { + QDebug(QtDebugMsg) << (const char *)prefix + << "Opened debug log file " + << logFile->fileName(); + debug = new QDebug(logFile); + } else { + QDebug(QtWarningMsg) << (const char *)prefix + << "Failed to open debug log file " + << logFile->fileName() + << " for writing, using console debug instead"; + debug = new QDebug(QtDebugMsg); + delete logFile; + logFile = 0; + } + *debug << endl << (const char *)prefix << "Log started at " + << QDateTime::currentDateTime().toString(); + } + mutex.unlock(); + + QDebug &dref = *debug; + return dref << endl << (const char *)prefix; +} + +QDebug & +operator<<(QDebug &dbg, const std::string &s) +{ + dbg << QString::fromUtf8(s.c_str()); + return dbg; +} + +std::ostream & +operator<<(std::ostream &target, const QString &str) +{ + return target << str.toLocal8Bit().data(); +} + +std::ostream & +operator<<(std::ostream &target, const QUrl &u) +{ + return target << "<" << u.toString() << ">"; +} + diff -r 4efa7429cd85 -r c10cb8782576 base/Debug.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/Debug.h Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,65 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + This file copyright 2010-2011 Chris Cannam and QMUL. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +#ifndef _DEBUG_H_ +#define _DEBUG_H_ + +#include +#include +#include +#include + +class QString; +class QUrl; + +QDebug &operator<<(QDebug &, const std::string &); +std::ostream &operator<<(std::ostream &, const QString &); +std::ostream &operator<<(std::ostream &, const QUrl &); + +#ifndef NDEBUG + +extern QDebug &getSVDebug(); + +#define SVDEBUG getSVDebug() + +template +inline QDebug &operator<<(QDebug &d, const T &t) { + QString s; + QTextStream ts(&s); + ts << t; + d << s; + return d; +} + +#else + +class NoDebug +{ +public: + inline NoDebug() {} + inline ~NoDebug(){} + + template + inline NoDebug &operator<<(const T &) { return *this; } + + inline NoDebug &operator<<(QTextStreamFunction) { return *this; } +}; + +#define SVDEBUG NoDebug() + +#endif /* !NDEBUG */ + +#endif /* !_DEBUG_H_ */ + diff -r 4efa7429cd85 -r c10cb8782576 base/Exceptions.cpp --- a/base/Exceptions.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/Exceptions.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -21,7 +21,7 @@ m_file(file) { std::cerr << "ERROR: File not found: " - << file.toStdString() << std::endl; + << file << std::endl; } const char * @@ -35,7 +35,7 @@ m_file(file) { std::cerr << "ERROR: Failed to open file: " - << file.toStdString() << std::endl; + << file << std::endl; } const char * @@ -49,7 +49,7 @@ m_directory(directory) { std::cerr << "ERROR: Directory creation failed for directory: " - << directory.toStdString() << std::endl; + << directory << std::endl; } const char * @@ -63,7 +63,7 @@ m_file(file) { std::cerr << "ERROR: File read failed for file: " - << file.toStdString() << std::endl; + << file << std::endl; } const char * @@ -77,8 +77,8 @@ m_file(file), m_operation(op) { - std::cerr << "ERROR: File " << op.toStdString() << " failed for file: " - << file.toStdString() << std::endl; + std::cerr << "ERROR: File " << op << " failed for file: " + << file << std::endl; } const char * @@ -96,7 +96,7 @@ m_available(available) { std::cerr << "ERROR: Not enough disc space available in " - << directory.toStdString() << ": need " << required + << directory << ": need " << required << ", only have " << available << std::endl; } @@ -106,7 +106,7 @@ m_available(0) { std::cerr << "ERROR: Not enough disc space available in " - << directory.toStdString() << std::endl; + << directory << std::endl; } const char * diff -r 4efa7429cd85 -r c10cb8782576 base/Exceptions.h --- a/base/Exceptions.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/Exceptions.h Sun Jul 01 11:53:00 2012 +0100 @@ -20,6 +20,8 @@ #include +#include "Debug.h" + class FileNotFound : virtual public std::exception { public: diff -r 4efa7429cd85 -r c10cb8782576 base/LogRange.cpp --- a/base/LogRange.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/LogRange.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -26,7 +26,7 @@ if (min > max) std::swap(min, max); if (max == min) max = min + 1; -// std::cerr << "LogRange::mapRange: min = " << min << ", max = " << max << std::endl; +// SVDEBUG << "LogRange::mapRange: min = " << min << ", max = " << max << endl; if (min >= 0.f) { @@ -35,7 +35,7 @@ if (min == 0.f) min = std::min(logthresh, max); else min = log10f(min); -// std::cerr << "LogRange::mapRange: positive: min = " << min << ", max = " << max << std::endl; +// SVDEBUG << "LogRange::mapRange: positive: min = " << min << ", max = " << max << endl; } else if (max <= 0.f) { @@ -46,7 +46,7 @@ std::swap(min, max); -// std::cerr << "LogRange::mapRange: negative: min = " << min << ", max = " << max << std::endl; +// SVDEBUG << "LogRange::mapRange: negative: min = " << min << ", max = " << max << endl; } else { @@ -55,7 +55,7 @@ max = log10f(std::max(max, -min)); min = std::min(logthresh, max); -// std::cerr << "LogRange::mapRange: spanning: min = " << min << ", max = " << max << std::endl; +// SVDEBUG << "LogRange::mapRange: spanning: min = " << min << ", max = " << max << endl; } if (min == max) min = max - 1; @@ -104,8 +104,8 @@ float sd0 = sd(values, 0, mi); float sd1 = sd(values, mi, values.size() - mi); - std::cerr << "LogRange::useLogScale: sd0 = " - << sd0 << ", sd1 = " << sd1 << std::endl; + SVDEBUG << "LogRange::useLogScale: sd0 = " + << sd0 << ", sd1 = " << sd1 << endl; if (sd0 == 0 || sd1 == 0) return false; diff -r 4efa7429cd85 -r c10cb8782576 base/LogRange.h --- a/base/LogRange.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/LogRange.h Sun Jul 01 11:53:00 2012 +0100 @@ -17,6 +17,7 @@ #define _LOG_RANGE_H_ #include +#include "Debug.h" class LogRange { diff -r 4efa7429cd85 -r c10cb8782576 base/PlayParameterRepository.cpp --- a/base/PlayParameterRepository.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/PlayParameterRepository.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -124,7 +124,7 @@ PlayParameterRepository::playPluginConfigurationChanged(QString config) { PlayParameters *params = dynamic_cast(sender()); -// std::cerr << "PlayParameterRepository::playPluginConfigurationChanged" << std::endl; +// SVDEBUG << "PlayParameterRepository::playPluginConfigurationChanged" << endl; for (PlayableParameterMap::iterator i = m_playParameters.begin(); i != m_playParameters.end(); ++i) { if (i->second == params) { diff -r 4efa7429cd85 -r c10cb8782576 base/PlayParameters.cpp --- a/base/PlayParameters.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/PlayParameters.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -132,7 +132,7 @@ { if (m_playPluginConfiguration != configuration) { m_playPluginConfiguration = configuration; -// std::cerr << "PlayParameters(" << this << "): setPlayPluginConfiguration to \"" << configuration.toStdString() << "\"" << std::endl; +// std::cerr << "PlayParameters(" << this << "): setPlayPluginConfiguration to \"" << configuration << "\"" << std::endl; emit playPluginConfigurationChanged(configuration); emit playParametersChanged(); } diff -r 4efa7429cd85 -r c10cb8782576 base/Profiler.cpp --- a/base/Profiler.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/Profiler.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -22,6 +22,8 @@ #include #include "Profiler.h" +#include + #include #include #include diff -r 4efa7429cd85 -r c10cb8782576 base/ProgressPrinter.cpp --- a/base/ProgressPrinter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/ProgressPrinter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -52,6 +52,9 @@ ProgressPrinter::setMessage(QString message) { m_prefix = message; + if (m_prefix.length() > 70) { + m_prefix = m_prefix.left(70) + "..."; + } } void diff -r 4efa7429cd85 -r c10cb8782576 base/PropertyContainer.cpp --- a/base/PropertyContainer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/PropertyContainer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -68,7 +68,7 @@ void PropertyContainer::setProperty(const PropertyName &name, int) { - std::cerr << "WARNING: PropertyContainer[" << getPropertyContainerName().toStdString() << "]::setProperty(" << name.toStdString() << "): no implementation in subclass!" << std::endl; + std::cerr << "WARNING: PropertyContainer[" << getPropertyContainerName() << "]::setProperty(" << name << "): no implementation in subclass!" << std::endl; } Command * @@ -86,7 +86,7 @@ int value; if (!convertPropertyStrings(nameString, valueString, name, value)) { std::cerr << "WARNING: PropertyContainer::setProperty(\"" - << nameString.toStdString() << "\", \"" + << nameString << "\", \"" << valueString.toStdString() << "\"): Name and value conversion failed" << std::endl; return; @@ -101,7 +101,7 @@ int value; if (!convertPropertyStrings(nameString, valueString, name, value)) { std::cerr << "WARNING: PropertyContainer::getSetPropertyCommand(\"" - << nameString.toStdString() << "\", \"" + << nameString << "\", \"" << valueString.toStdString() << "\"): Name and value conversion failed" << std::endl; return 0; @@ -135,7 +135,7 @@ } if (name == "") { - std::cerr << "PropertyContainer::convertPropertyStrings: Unable to match name string \"" << nameString.toStdString() << "\"" << std::endl; + std::cerr << "PropertyContainer::convertPropertyStrings: Unable to match name string \"" << nameString << "\"" << std::endl; return false; } @@ -192,7 +192,7 @@ break; case InvalidProperty: - std::cerr << "PropertyContainer::convertPropertyStrings: Invalid property name \"" << name.toStdString() << "\"" << std::endl; + SVDEBUG << "PropertyContainer::convertPropertyStrings: Invalid property name \"" << name << "\"" << endl; return false; } @@ -204,10 +204,10 @@ bool ok = false; int i = valueString.toInt(&ok); if (!ok) { - std::cerr << "PropertyContainer::convertPropertyStrings: Unable to parse value string \"" << valueString.toStdString() << "\"" << std::endl; + std::cerr << "PropertyContainer::convertPropertyStrings: Unable to parse value string \"" << valueString << "\"" << std::endl; return false; } else if (i < min || i > max) { - std::cerr << "PropertyContainer::convertPropertyStrings: Property value \"" << i << "\" outside valid range " << min << " to " << max << std::endl; + SVDEBUG << "PropertyContainer::convertPropertyStrings: Property value \"" << i << "\" outside valid range " << min << " to " << max << endl; return false; } diff -r 4efa7429cd85 -r c10cb8782576 base/RangeMapper.cpp --- a/base/RangeMapper.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/RangeMapper.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -43,8 +43,8 @@ * (m_maxpos - m_minpos)); if (position < m_minpos) position = m_minpos; if (position > m_maxpos) position = m_maxpos; -// std::cerr << "LinearRangeMapper::getPositionForValue: " << value << " -> " -// << position << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", minval " << m_minval << ", maxval " << m_maxval << ")" << std::endl; +// SVDEBUG << "LinearRangeMapper::getPositionForValue: " << value << " -> " +// << position << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", minval " << m_minval << ", maxval " << m_maxval << ")" << endl; if (m_inverted) return m_maxpos - position; else return position; } @@ -58,8 +58,8 @@ * (m_maxval - m_minval)); if (value < m_minval) value = m_minval; if (value > m_maxval) value = m_maxval; -// std::cerr << "LinearRangeMapper::getValueForPosition: " << position << " -> " -// << value << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", minval " << m_minval << ", maxval " << m_maxval << ")" << std::endl; +// SVDEBUG << "LinearRangeMapper::getValueForPosition: " << position << " -> " +// << value << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", minval " << m_minval << ", maxval " << m_maxval << ")" << endl; return value; } @@ -76,7 +76,7 @@ std::cerr << "LogRangeMapper: minpos " << minpos << ", maxpos " << maxpos << ", minval " << minval << ", maxval " << maxval << ", minlog " << m_minlog << ", ratio " << m_ratio - << ", unit " << unit.toStdString() << std::endl; + << ", unit " << unit << std::endl; assert(m_maxpos != m_minpos); @@ -109,8 +109,8 @@ int position = (log10(value) - m_minlog) * m_ratio + m_minpos; if (position < m_minpos) position = m_minpos; if (position > m_maxpos) position = m_maxpos; -// std::cerr << "LogRangeMapper::getPositionForValue: " << value << " -> " -// << position << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", ratio " << m_ratio << ", minlog " << m_minlog << ")" << std::endl; +// SVDEBUG << "LogRangeMapper::getPositionForValue: " << value << " -> " +// << position << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", ratio " << m_ratio << ", minlog " << m_minlog << ")" << endl; if (m_inverted) return m_maxpos - position; else return position; } @@ -120,8 +120,8 @@ { if (m_inverted) position = m_maxpos - position; float value = powf(10, (position - m_minpos) / m_ratio + m_minlog); -// std::cerr << "LogRangeMapper::getValueForPosition: " << position << " -> " -// << value << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", ratio " << m_ratio << ", minlog " << m_minlog << ")" << std::endl; +// SVDEBUG << "LogRangeMapper::getValueForPosition: " << position << " -> " +// << value << " (minpos " << m_minpos << ", maxpos " << m_maxpos << ", ratio " << m_ratio << ", minlog " << m_minlog << ")" << endl; return value; } diff -r 4efa7429cd85 -r c10cb8782576 base/RangeMapper.h --- a/base/RangeMapper.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/RangeMapper.h Sun Jul 01 11:53:00 2012 +0100 @@ -18,6 +18,8 @@ #include +#include "Debug.h" + class RangeMapper { diff -r 4efa7429cd85 -r c10cb8782576 base/RealTime.cpp --- a/base/RealTime.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/RealTime.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -242,8 +242,8 @@ int nsec = atoi(snsec.c_str()); if (negative) sec = -sec; -// std::cerr << "RealTime::fromString: string " << s << " -> " -// << sec << " sec, " << nsec << " nsec" << std::endl; +// SVDEBUG << "RealTime::fromString: string " << s << " -> " +// << sec << " sec, " << nsec << " nsec" << endl; return RealTime(sec, nsec); } diff -r 4efa7429cd85 -r c10cb8782576 base/ResizeableBitset.h --- a/base/ResizeableBitset.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/ResizeableBitset.h Sun Jul 01 11:53:00 2012 +0100 @@ -18,6 +18,8 @@ #include #include +#include +#include class ResizeableBitset { diff -r 4efa7429cd85 -r c10cb8782576 base/ResourceFinder.cpp --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/ResourceFinder.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,297 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +/* + This is a modified version of a source file from the + Rosegarden MIDI and audio sequencer and notation editor. + This file copyright 2005-2011 Chris Cannam and the Rosegarden + development team. +*/ + +#include "ResourceFinder.h" + +#include +#include +#include +#include +#include + +#include +#include + +/** + Resource files may be found in three places: + + * Bundled into the application as Qt4 resources. These may be + opened using Qt classes such as QFile, with "fake" file paths + starting with a colon. For example ":icons/fileopen.png". + + * Installed with the package, or in the user's equivalent home + directory location. For example, + + - on Linux, in /usr/share/ or /usr/local/share/ + - on Linux, in $HOME/.local/share/ + + - on OS/X, in /Library/Application Support/ + - on OS/X, in $HOME/Library/Application Support/ + + - on Windows, in %ProgramFiles%// + - on Windows, in (where?) something from http://msdn.microsoft.com/en-us/library/dd378457%28v=vs.85%29.aspx ? + + These locations are searched in reverse order (user-installed + copies take priority over system-installed copies take priority + over bundled copies). Also, /usr/local takes priority over /usr. +*/ + +QStringList +ResourceFinder::getSystemResourcePrefixList() +{ + // returned in order of priority + + QStringList list; + +#ifdef Q_OS_WIN32 + char *programFiles = getenv("ProgramFiles"); + if (programFiles && programFiles[0]) { + list << QString("%1/%2/%3") + .arg(programFiles) + .arg(qApp->organizationName()) + .arg(qApp->applicationName()); + } else { + list << QString("C:/Program Files/%1/%2") + .arg(qApp->organizationName()) + .arg(qApp->applicationName()); + } +#else +#ifdef Q_OS_MAC + list << QString("/Library/Application Support/%1") + .arg(qApp->applicationName()); +#else + list << QString("/usr/local/share/%1") + .arg(qApp->applicationName()); + list << QString("/usr/share/%1") + .arg(qApp->applicationName()); +#endif +#endif + + return list; +} + +QString +ResourceFinder::getUserResourcePrefix() +{ +#ifdef Q_OS_WIN32 + char *homedrive = getenv("HOMEDRIVE"); + char *homepath = getenv("HOMEPATH"); + QString home; + if (homedrive && homepath) { + home = QString("%1%2").arg(homedrive).arg(homepath); + } else { + home = QDir::home().absolutePath(); + } + if (home == "") return ""; + return QString("%1/.%2").arg(home).arg(qApp->applicationName()); //!!! wrong +#else + char *home = getenv("HOME"); + if (!home || !home[0]) return ""; +#ifdef Q_OS_MAC + return QString("%1/Library/Application Support/%2") + .arg(home) + .arg(qApp->applicationName()); +#else + return QString("%1/.local/share/%2") + .arg(home) + .arg(qApp->applicationName()); +#endif +#endif +} + +QStringList +ResourceFinder::getResourcePrefixList() +{ + // returned in order of priority + + QStringList list; + + QString user = getUserResourcePrefix(); + if (user != "") list << user; + + list << getSystemResourcePrefixList(); + + list << ":"; // bundled resource location + + return list; +} + +QString +ResourceFinder::getResourcePath(QString resourceCat, QString fileName) +{ + // We don't simply call getResourceDir here, because that returns + // only the "installed file" location. We also want to search the + // bundled resources and user-saved files. + + QStringList prefixes = getResourcePrefixList(); + + if (resourceCat != "") resourceCat = "/" + resourceCat; + + for (QStringList::const_iterator i = prefixes.begin(); + i != prefixes.end(); ++i) { + + QString prefix = *i; + + SVDEBUG << "ResourceFinder::getResourcePath: Looking up file \"" << fileName << "\" for category \"" << resourceCat << "\" in prefix \"" << prefix << "\"" << endl; + + QString path = + QString("%1%2/%3").arg(prefix).arg(resourceCat).arg(fileName); + if (QFileInfo(path).exists() && QFileInfo(path).isReadable()) { + std::cerr << "Found it!" << std::endl; + return path; + } + } + + return ""; +} + +QString +ResourceFinder::getResourceDir(QString resourceCat) +{ + // Returns only the "installed file" location + + QStringList prefixes = getSystemResourcePrefixList(); + + if (resourceCat != "") resourceCat = "/" + resourceCat; + + for (QStringList::const_iterator i = prefixes.begin(); + i != prefixes.end(); ++i) { + + QString prefix = *i; + QString path = QString("%1%2").arg(prefix).arg(resourceCat); + if (QFileInfo(path).exists() && + QFileInfo(path).isDir() && + QFileInfo(path).isReadable()) { + return path; + } + } + + return ""; +} + +QString +ResourceFinder::getResourceSavePath(QString resourceCat, QString fileName) +{ + QString dir = getResourceSaveDir(resourceCat); + if (dir == "") return ""; + + return dir + "/" + fileName; +} + +QString +ResourceFinder::getResourceSaveDir(QString resourceCat) +{ + // Returns the "user" location + + QString user = getUserResourcePrefix(); + if (user == "") return ""; + + if (resourceCat != "") resourceCat = "/" + resourceCat; + + QDir userDir(user); + if (!userDir.exists()) { + if (!userDir.mkpath(user)) { + std::cerr << "ResourceFinder::getResourceSaveDir: ERROR: Failed to create user resource path \"" << user << "\"" << std::endl; + return ""; + } + } + + if (resourceCat != "") { + QString save = QString("%1%2").arg(user).arg(resourceCat); + QDir saveDir(save); + if (!saveDir.exists()) { + if (!userDir.mkpath(save)) { + std::cerr << "ResourceFinder::getResourceSaveDir: ERROR: Failed to create user resource path \"" << save << "\"" << std::endl; + return ""; + } + } + return save; + } else { + return user; + } +} + +QStringList +ResourceFinder::getResourceFiles(QString resourceCat, QString fileExt) +{ + QStringList results; + QStringList prefixes = getResourcePrefixList(); + + QStringList filters; + filters << QString("*.%1").arg(fileExt); + + for (QStringList::const_iterator i = prefixes.begin(); + i != prefixes.end(); ++i) { + + QString prefix = *i; + QString path; + + if (resourceCat != "") { + path = QString("%1/%2").arg(prefix).arg(resourceCat); + } else { + path = prefix; + } + + QDir dir(path); + if (!dir.exists()) continue; + + dir.setNameFilters(filters); + QStringList entries = dir.entryList + (QDir::Files | QDir::Readable, QDir::Name); + + for (QStringList::const_iterator j = entries.begin(); + j != entries.end(); ++j) { + results << QString("%1/%2").arg(path).arg(*j); + } + } + + return results; +} + +bool +ResourceFinder::unbundleResource(QString resourceCat, QString fileName) +{ + QString path = getResourcePath(resourceCat, fileName); + + if (!path.startsWith(':')) return true; + + // This is the lowest-priority alternative path for this + // resource, so we know that there must be no installed copy. + // Install one to the user location. + SVDEBUG << "ResourceFinder::unbundleResource: File " << fileName << " is bundled, un-bundling it" << endl; + QString target = getResourceSavePath(resourceCat, fileName); + QFile file(path); + if (!file.copy(target)) { + std::cerr << "ResourceFinder::unbundleResource: ERROR: Failed to un-bundle resource file \"" << fileName << "\" to user location \"" << target << "\"" << std::endl; + return false; + } + + QFile chmod(target); + chmod.setPermissions(QFile::ReadOwner | + QFile::ReadUser | /* for potential platform-independence */ + QFile::ReadGroup | + QFile::ReadOther | + QFile::WriteOwner| + QFile::WriteUser); /* for potential platform-independence */ + + return true; +} + diff -r 4efa7429cd85 -r c10cb8782576 base/ResourceFinder.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/ResourceFinder.h Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,138 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +/* + This is a modified version of a source file from the + Rosegarden MIDI and audio sequencer and notation editor. + This file copyright 2005-2011 Chris Cannam and the Rosegarden + development team. +*/ + +#ifndef _RESOURCE_FINDER_H_ +#define _RESOURCE_FINDER_H_ + +#include + +#include "Debug.h" + +class ResourceFinder +{ +public: + ResourceFinder() { } + virtual ~ResourceFinder() { } + + /** + * Return the location (as a true file path, or a Qt4 ":"-prefixed + * resource path) of the file best matching the given resource + * filename in the given resource category. + * + * Category should be a relative directory path without leading or + * trailing slashes, for example "chords". The fileName is the + * remainder of the file name without any path content, for + * example "user_chords.xml". + * + * Returns an empty string if no matching resource is found. + * + * Use this when you know that a particular resource is required + * and just need to locate it. + */ + QString getResourcePath(QString resourceCat, QString fileName); + + /** + * Return a list of full file paths for files with the given file + * extension, found in the given resource category. + * + * Category should be a relative directory path without leading or + * trailing slashes, for example "chords". File extension should + * be the extension without the dot, for example "xml". Returned + * list may mix true file paths in both installed and user + * locations with Qt4 ":"-prefixed resource paths. + * + * Use this when you need to enumerate the options available for + * use directly in the program (rather than e.g. offering the user + * a file-open dialog). + */ + QStringList getResourceFiles(QString resourceCat, QString fileExt); + + /** + * Return the true file path for installed resource files in the + * given resource category. Category should be a relative + * directory path without leading or trailing slashes, for example + * "chords". Note that resources may also exist in the Qt4 + * resource bundle; this method only returns the external + * (installed) resource location. Use getResourceFiles instead to + * return an authoritative list of available resources of a given + * type. + * + * Use this when you need a file path, e.g. for use in a file + * finder dialog. + */ + QString getResourceDir(QString resourceCat); + + /** + * Return the true file path for the location in which the named + * resource file in the given resource category should be saved. + * ResourceFinder will make a best effort to ensure this directory + * actually exists, before returning. + */ + QString getResourceSavePath(QString resourceCat, QString fileName); + + /** + * Return the true file path for the location in which resource + * files in the given resource category should be saved. + */ + QString getResourceSaveDir(QString resourceCat); + + /** + * If the named resource file in the given resource category is + * available only as a bundled resource, copy it out into the user + * location returned by getResourceSavePath so that it can be read + * by non-Qt code. Any subsequent call to getResourcePath for + * this resource should return a true file path (if the resource + * exists) in either user or system location, or an empty string + * (if the resource does not exist), but never a ":"-prefixed + * resource path. This function does not overwrite any existing + * unbundled copy of the resource. + * + * Return false if a system error occurs during unbundling + * (e.g. disk full). + */ + bool unbundleResource(QString resourceCat, QString fileName); + + /** + * Return the root path for user-specific resource installation + * for this application (i.e. resources beneath the user's home + * directory). + */ + QString getUserResourcePrefix(); + + /** + * Return the root paths for systemwide resource installations for + * this application. + */ + QStringList getSystemResourcePrefixList(); + + /** + * Return all root paths for resource installations for this + * application, in the order in which they will be searched. This + * list consists of the user-specific path + * (getUserResourcePrefix()) followed by the systemwide paths + * (getSystemResourcePrefixList()). + */ + QStringList getResourcePrefixList(); +}; + +#endif + + diff -r 4efa7429cd85 -r c10cb8782576 base/StorageAdviser.cpp --- a/base/StorageAdviser.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/StorageAdviser.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -36,9 +36,9 @@ int maximumSize) { #ifdef DEBUG_STORAGE_ADVISER - std::cerr << "StorageAdviser::recommend: Criteria " << criteria + SVDEBUG << "StorageAdviser::recommend: Criteria " << criteria << ", minimumSize " << minimumSize - << ", maximumSize " << maximumSize << std::endl; + << ", maximumSize " << maximumSize << endl; #endif if (m_baseRecommendation != NoRecommendation) { diff -r 4efa7429cd85 -r c10cb8782576 base/StringBits.cpp --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/StringBits.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,211 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +/* + This is a modified version of a source file from the + Rosegarden MIDI and audio sequencer and notation editor. + This file copyright 2000-2010 Chris Cannam. +*/ + +#include "StringBits.h" + +double +StringBits::stringToDoubleLocaleFree(QString s, bool *ok) +{ + int dp = 0; + int sign = 1; + int i = 0; + double result = 0.0; + int len = s.length(); + + result = 0.0; + + if (ok) *ok = true; + + while (i < len && s[i].isSpace()) ++i; + if (i < len && s[i] == '-') sign = -1; + + while (i < len) { + + QChar c = s[i]; + + if (c.isDigit()) { + + double d = c.digitValue(); + + if (dp > 0) { + for (int p = dp; p > 0; --p) d /= 10.0; + ++dp; + } else { + result *= 10.0; + } + + result += d; + + } else if (c == '.') { + + dp = 1; + + } else if (ok) { + *ok = false; + } + + ++i; + } + + return result * sign; +} + +QStringList +StringBits::splitQuoted(QString s, QChar separator) +{ + QStringList tokens; + QString tok; + + enum { sep, unq, q1, q2 } mode = sep; + + for (int i = 0; i < s.length(); ++i) { + + QChar c = s[i]; + + if (c == '\'') { + switch (mode) { + case sep: mode = q1; break; + case unq: case q2: tok += c; break; + case q1: mode = sep; tokens << tok; tok = ""; break; + } + + } else if (c == '"') { + switch (mode) { + case sep: mode = q2; break; + case unq: case q1: tok += c; break; + case q2: mode = sep; tokens << tok; tok = ""; break; + } + + } else if (c == separator || (separator == ' ' && c.isSpace())) { + switch (mode) { + case sep: if (separator != ' ') tokens << ""; break; + case unq: mode = sep; tokens << tok; tok = ""; break; + case q1: case q2: tok += c; break; + } + + } else if (c == '\\') { + if (++i < s.length()) { + c = s[i]; + switch (mode) { + case sep: mode = unq; tok += c; break; + default: tok += c; break; + } + } + + } else { + switch (mode) { + case sep: mode = unq; tok += c; break; + default: tok += c; break; + } + } + } + + if (tok != "" || mode != sep) tokens << tok; + return tokens; +} + +/* + +void testSplit() +{ + QStringList tests; + tests << "a b c d"; + tests << "a \"b c\" d"; + tests << "a 'b c' d"; + tests << "a \"b c\\\" d\""; + tests << "a 'b c\\' d'"; + tests << "a \"b c' d\""; + tests << "a 'b c\" d'"; + tests << "aa 'bb cc\" dd'"; + tests << "a'a 'bb' \\\"cc\" dd\\\""; + tests << " a'a \\\' 'bb' \' \\\"cc\" ' dd\\\" '"; + + for (int j = 0; j < tests.size(); ++j) { + cout << endl; + cout << tests[j] << endl; + cout << "->" << endl << "("; + QStringList l = splitQuoted(tests[j], ' '); + for (int i = 0; i < l.size(); ++i) { + if (i > 0) cout << ";"; + cout << l[i].toStdString(); + } + cout << ")" << endl; + } +} + +*/ + +/* + Results: + +a b c d +-> +(a;b;c;d) + +a "b c" d +-> +(a;b c;d) + +a 'b c' d +-> +(a;b c;d) + +a "b c\" d" +-> +(a;b c" d) + +a 'b c\' d' +-> +(a;b c' d) + +a "b c' d" +-> +(a;b c' d) + +a 'b c" d' +-> +(a;b c" d) + +aa 'bb cc" dd' +-> +(aa;bb cc" dd) + +a'a 'bb' \"cc" dd\" +-> +(a'a;bb;"cc";dd") + + a'a \' 'bb' ' \"cc" ' dd\" ' +-> +(a'a;';bb; "cc" ;dd";) + +*/ + +QStringList +StringBits::split(QString line, QChar separator, bool quoted) +{ + if (quoted) { + return splitQuoted(line, separator); + } else { + return line.split(separator, + separator == ' ' ? QString::SkipEmptyParts : + QString::KeepEmptyParts); + } +} + diff -r 4efa7429cd85 -r c10cb8782576 base/StringBits.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/StringBits.h Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,60 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +/* + This is a modified version of a source file from the + Rosegarden MIDI and audio sequencer and notation editor. + This file copyright 2000-2010 Chris Cannam. +*/ + +#ifndef _STRING_BITS_H_ +#define _STRING_BITS_H_ + +#include +#include +#include + +class StringBits +{ +public: + /** + * Convert a string to a double using basic "C"-locale syntax, + * i.e. always using '.' as a decimal point. We use this as a + * fallback when parsing files from an unknown source, if + * locale-specific conversion fails. Does not support e notation. + * If ok is non-NULL, *ok will be set to true if conversion + * succeeds or false otherwise. + */ + static double stringToDoubleLocaleFree(QString s, bool *ok = 0); + + /** + * Split a string at the given separator character, allowing + * quoted sections that contain the separator. If the separator + * is ' ', any (amount of) whitespace will be considered as a + * single separator. If the separator is another whitespace + * character such as '\t', it will be used literally. + */ + static QStringList splitQuoted(QString s, QChar separator); + + /** + * Split a string at the given separator character. If quoted is + * true, do so by calling splitQuoted (above). If quoted is + * false, use QString::split; if separator is ' ', use + * SkipEmptyParts behaviour, otherwise use KeepEmptyParts (this is + * analogous to the behaviour of splitQuoted). + */ + static QStringList split(QString s, QChar separator, bool quoted); +}; + +#endif diff -r 4efa7429cd85 -r c10cb8782576 base/TempDirectory.cpp --- a/base/TempDirectory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/TempDirectory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -14,6 +14,7 @@ */ #include "TempDirectory.h" +#include "ResourceFinder.h" #include "system/System.h" #include "Exceptions.h" @@ -25,6 +26,7 @@ #include #include #include +#include TempDirectory * TempDirectory::m_instance = new TempDirectory; @@ -42,7 +44,7 @@ TempDirectory::~TempDirectory() { - std::cerr << "TempDirectory::~TempDirectory" << std::endl; + SVDEBUG << "TempDirectory::~TempDirectory" << endl; cleanup(); } @@ -57,34 +59,26 @@ TempDirectory::getContainingPath() { QMutexLocker locker(&m_mutex); - + QSettings settings; settings.beginGroup("TempDirectory"); QString svDirParent = settings.value("create-in", "$HOME").toString(); settings.endGroup(); -#ifdef Q_OS_WIN32 - char *homedrive = getenv("HOMEDRIVE"); - char *homepath = getenv("HOMEPATH"); - if (homedrive && homepath) { - svDirParent.replace("$HOME", QString("%1%2").arg(homedrive).arg(homepath)); - } else { - svDirParent.replace("$HOME", QDir::home().absolutePath()); + QString svDir = ResourceFinder().getUserResourcePrefix(); + if (svDirParent != "$HOME") { + //!!! iffy + svDir.replace(QDir::home().absolutePath(), svDirParent); } -#else - svDirParent.replace("$HOME", QDir::home().absolutePath()); -#endif - QString svDirBase = ".sv1"; - QString svDir = QDir(svDirParent).filePath(svDirBase); if (!QFileInfo(svDir).exists()) { - if (!QDir(svDirParent).mkdir(svDirBase)) { + if (!QDir(svDirParent).mkpath(svDir)) { throw DirectoryCreationFailed(QString("%1 directory in %2") - .arg(svDirBase).arg(svDirParent)); + .arg(svDir).arg(svDirParent)); } } else if (!QFileInfo(svDir).isDir()) { - throw DirectoryCreationFailed(QString("%1/%2 is not a directory") - .arg(svDirParent).arg(svDirBase)); + throw DirectoryCreationFailed(QString("%1 is not a directory") + .arg(svDir)); } cleanupAbandonedDirectories(svDir); @@ -214,7 +208,7 @@ if (!QFile(fi.absoluteFilePath()).remove()) { std::cerr << "WARNING: TempDirectory::cleanup: " << "Failed to unlink file \"" - << fi.absoluteFilePath().toStdString() << "\"" + << fi.absoluteFilePath() << "\"" << std::endl; } } @@ -225,13 +219,13 @@ if (!dir.cdUp()) { std::cerr << "WARNING: TempDirectory::cleanup: " << "Failed to cd to parent directory of " - << tmpdir.toStdString() << std::endl; + << tmpdir << std::endl; return; } if (!dir.rmdir(dirname)) { std::cerr << "WARNING: TempDirectory::cleanup: " << "Failed to remove directory " - << dirname.toStdString() << std::endl; + << dirname << std::endl; } } @@ -254,7 +248,7 @@ if (subdir.count() == 0) { std::cerr << "INFO: Found temporary directory with no .pid file in it!\n(directory=\"" - << dirpath.toStdString() << "\"). Removing it..." << std::endl; + << dirpath << "\"). Removing it..." << std::endl; cleanupDirectory(dirpath); std::cerr << "...done." << std::endl; continue; diff -r 4efa7429cd85 -r c10cb8782576 base/TempWriteFile.cpp --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/TempWriteFile.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,69 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +#include "TempWriteFile.h" + +#include "Exceptions.h" + +#include +#include +#include + +TempWriteFile::TempWriteFile(QString target) : + m_target(target) +{ + QTemporaryFile temp(m_target + "."); + temp.setAutoRemove(false); + temp.open(); // creates the file and opens it atomically + if (temp.error()) { + std::cerr << "TempWriteFile: Failed to create temporary file in directory of " << m_target << ": " << temp.errorString() << std::endl; + throw FileOperationFailed(temp.fileName(), "creation"); + } + + m_temp = temp.fileName(); + temp.close(); // does not remove the file +} + +TempWriteFile::~TempWriteFile() +{ + if (m_temp != "") { + QDir dir(QFileInfo(m_temp).dir()); + dir.remove(m_temp); + } +} + +QString +TempWriteFile::getTemporaryFilename() +{ + return m_temp; +} + +void +TempWriteFile::moveToTarget() +{ + if (m_temp == "") return; + + QDir dir(QFileInfo(m_temp).dir()); + // According to http://doc.trolltech.com/4.4/qdir.html#rename + // some systems fail, if renaming over an existing file. + // Therefore, delete first the existing file. + if (dir.exists(m_target)) dir.remove(m_target); + if (!dir.rename(m_temp, m_target)) { + std::cerr << "TempWriteFile: Failed to rename temporary file " << m_temp << " to target " << m_target << std::endl; + throw FileOperationFailed(m_temp, "rename"); + } + + m_temp = ""; +} + diff -r 4efa7429cd85 -r c10cb8782576 base/TempWriteFile.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/base/TempWriteFile.h Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,60 @@ +/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ + +/* + Sonic Visualiser + An audio file viewer and annotation editor. + Centre for Digital Music, Queen Mary, University of London. + + This program is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. See the file + COPYING included with this distribution for more information. +*/ + +#ifndef _TEMP_WRITE_FILE_H_ +#define _TEMP_WRITE_FILE_H_ + +#include + +/** + * A class that manages the creation of a temporary file with a given + * prefix and the renaming of that file to the prefix after use. For + * use when saving a file over an existing one, to avoid clobbering + * the original before the save is complete. + */ + +class TempWriteFile +{ +public: + TempWriteFile(QString targetFileName); // may throw FileOperationFailed + + /** + * Destroy the temporary file object. If moveToTarget has not + * been called, the associated temporary file will be deleted + * without being copied to the target location. + */ + ~TempWriteFile(); + + /** + * Return the name of the temporary file. Unless the constructor + * threw an exception, this file will have been created already + * (but it will not be open). + * + * (If moveToTarget has already been called, return an empty + * string.) + */ + QString getTemporaryFilename(); + + /** + * Rename the temporary file to the target filename. + */ + void moveToTarget(); + +protected: + QString m_target; + QString m_temp; +}; + + +#endif diff -r 4efa7429cd85 -r c10cb8782576 base/XmlExportable.cpp --- a/base/XmlExportable.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/base/XmlExportable.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -25,7 +25,7 @@ XmlExportable::toXmlString(QString indent, QString extraAttributes) const { -// std::cerr << "XmlExportable::toXmlString" << std::endl; +// SVDEBUG << "XmlExportable::toXmlString" << endl; QString s; diff -r 4efa7429cd85 -r c10cb8782576 base/XmlExportable.h --- a/base/XmlExportable.h Mon Nov 29 12:45:39 2010 +0000 +++ b/base/XmlExportable.h Sun Jul 01 11:53:00 2012 +0100 @@ -18,6 +18,8 @@ #include +#include "Debug.h" + class QTextStream; class XmlExportable diff -r 4efa7429cd85 -r c10cb8782576 base/base.pro --- a/base/base.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions -QT -= gui - -TARGET = svbase - -DEPENDPATH += . -INCLUDEPATH += . .. -OBJECTS_DIR = tmp_obj -MOC_DIR = tmp_moc - -# Input -HEADERS += AudioLevel.h \ - AudioPlaySource.h \ - Clipboard.h \ - Command.h \ - Exceptions.h \ - LogRange.h \ - Pitch.h \ - Playable.h \ - PlayParameterRepository.h \ - PlayParameters.h \ - Preferences.h \ - Profiler.h \ - ProgressPrinter.h \ - ProgressReporter.h \ - PropertyContainer.h \ - RangeMapper.h \ - RealTime.h \ - RecentFiles.h \ - Resampler.h \ - ResizeableBitset.h \ - RingBuffer.h \ - Scavenger.h \ - Selection.h \ - Serialiser.h \ - StorageAdviser.h \ - TempDirectory.h \ - TextMatcher.h \ - Thread.h \ - UnitDatabase.h \ - ViewManagerBase.h \ - Window.h \ - XmlExportable.h \ - ZoomConstraint.h -SOURCES += AudioLevel.cpp \ - Clipboard.cpp \ - Command.cpp \ - Exceptions.cpp \ - LogRange.cpp \ - Pitch.cpp \ - PlayParameterRepository.cpp \ - PlayParameters.cpp \ - Preferences.cpp \ - Profiler.cpp \ - ProgressPrinter.cpp \ - ProgressReporter.cpp \ - PropertyContainer.cpp \ - RangeMapper.cpp \ - RealTime.cpp \ - RecentFiles.cpp \ - Resampler.cpp \ - Selection.cpp \ - Serialiser.cpp \ - StorageAdviser.cpp \ - TempDirectory.cpp \ - TextMatcher.cpp \ - Thread.cpp \ - UnitDatabase.cpp \ - ViewManagerBase.cpp \ - XmlExportable.cpp diff -r 4efa7429cd85 -r c10cb8782576 bootstrap.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bootstrap.sh Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,2 @@ +#!/bin/sh +aclocal -I . && autoconf diff -r 4efa7429cd85 -r c10cb8782576 config.pri.in --- a/config.pri.in Mon Nov 29 12:45:39 2010 +0000 +++ b/config.pri.in Sun Jul 01 11:53:00 2012 +0100 @@ -1,15 +1,18 @@ +CONFIG += @QMAKE_CONFIG@ DEFINES += @HAVES@ -##!!! -DEFINES += NO_SV_GUI +QMAKE_CC = @CC@ +QMAKE_CXX = @CXX@ +QMAKE_LINK = @CXX@ QMAKE_CFLAGS += @CFLAGS@ QMAKE_CXXFLAGS += @CXXFLAGS@ +linux*:LIBS += -lasound + macx*:DEFINES += HAVE_COREAUDIO -macx*:LIBS += -framework CoreAudio -framework AudioUnit -macx*:INCLUDEPATH += /Developer/Extras/CoreAudio/PublicUtility/ +macx*:LIBS += -framework CoreAudio -framework CoreMidi -framework AudioUnit -framework AudioToolbox -framework CoreFoundation -framework CoreServices LIBS += @LIBS@ diff -r 4efa7429cd85 -r c10cb8782576 configure --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/configure Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,8231 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.69 for SVcore 1.8. +# +# Report bugs to . +# +# +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." + else + $as_echo "$0: Please tell bug-autoconf@gnu.org and +$0: cannam@all-day-breakfast.com about your system, +$0: including any error possibly output before this +$0: message. Then install a modern shell, or manually run +$0: the script under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='SVcore' +PACKAGE_TARNAME='svcore' +PACKAGE_VERSION='1.8' +PACKAGE_STRING='SVcore 1.8' +PACKAGE_BUGREPORT='cannam@all-day-breakfast.com' +PACKAGE_URL='' + +ac_unique_file="base/Pitch.h" +# Factoring default headers for most tests. +ac_includes_default="\ +#include +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_SYS_STAT_H +# include +#endif +#ifdef STDC_HEADERS +# include +# include +#else +# ifdef HAVE_STDLIB_H +# include +# endif +#endif +#ifdef HAVE_STRING_H +# if !defined STDC_HEADERS && defined HAVE_MEMORY_H +# include +# endif +# include +#endif +#ifdef HAVE_STRINGS_H +# include +#endif +#ifdef HAVE_INTTYPES_H +# include +#endif +#ifdef HAVE_STDINT_H +# include +#endif +#ifdef HAVE_UNISTD_H +# include +#endif" + +ac_subst_vars='LTLIBOBJS +LIBOBJS +CODENAME +QMAKE_CONFIG +HAVES +CXXFLAGS_MINIMAL +CUT +SHA1SUM +MAKEDEPEND +XARGS +PERL +id3tag_LIBS +id3tag_CFLAGS +mad_LIBS +mad_CFLAGS +fishsound_LIBS +fishsound_CFLAGS +oggz_LIBS +oggz_CFLAGS +lrdf_LIBS +lrdf_CFLAGS +libpulse_LIBS +libpulse_CFLAGS +JACK_LIBS +JACK_CFLAGS +portaudio_2_0_LIBS +portaudio_2_0_CFLAGS +liblo_LIBS +liblo_CFLAGS +dataquay_LIBS +dataquay_CFLAGS +rubberband_LIBS +rubberband_CFLAGS +vamphostsdk_LIBS +vamphostsdk_CFLAGS +vamp_LIBS +vamp_CFLAGS +samplerate_LIBS +samplerate_CFLAGS +sndfile_LIBS +sndfile_CFLAGS +fftw3f_LIBS +fftw3f_CFLAGS +fftw3_LIBS +fftw3_CFLAGS +bz2_LIBS +bz2_CFLAGS +QMAKE +PKG_CONFIG_LIBDIR +PKG_CONFIG_PATH +PKG_CONFIG +EGREP +GREP +CXXCPP +MKDIR_P +INSTALL_DATA +INSTALL_SCRIPT +INSTALL_PROGRAM +ac_ct_CXX +CXXFLAGS +CXX +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +enable_debug +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS +CXX +CXXFLAGS +CCC +CXXCPP +PKG_CONFIG +PKG_CONFIG_PATH +PKG_CONFIG_LIBDIR +bz2_CFLAGS +bz2_LIBS +fftw3_CFLAGS +fftw3_LIBS +fftw3f_CFLAGS +fftw3f_LIBS +sndfile_CFLAGS +sndfile_LIBS +samplerate_CFLAGS +samplerate_LIBS +vamp_CFLAGS +vamp_LIBS +vamphostsdk_CFLAGS +vamphostsdk_LIBS +rubberband_CFLAGS +rubberband_LIBS +dataquay_CFLAGS +dataquay_LIBS +liblo_CFLAGS +liblo_LIBS +portaudio_2_0_CFLAGS +portaudio_2_0_LIBS +JACK_CFLAGS +JACK_LIBS +libpulse_CFLAGS +libpulse_LIBS +lrdf_CFLAGS +lrdf_LIBS +oggz_CFLAGS +oggz_LIBS +fishsound_CFLAGS +fishsound_LIBS +mad_CFLAGS +mad_LIBS +id3tag_CFLAGS +id3tag_LIBS' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures SVcore 1.8 to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/svcore] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of SVcore 1.8:";; + esac + cat <<\_ACEOF + +Optional Features: + --disable-option-checking ignore unrecognized --enable/--with options + --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) + --enable-FEATURE[=ARG] include FEATURE [ARG=yes] + --enable-debug enable debug support [default=no] + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CXX C++ compiler command + CXXFLAGS C++ compiler flags + CXXCPP C++ preprocessor + PKG_CONFIG path to pkg-config utility + PKG_CONFIG_PATH + directories to add to pkg-config's search path + PKG_CONFIG_LIBDIR + path overriding pkg-config's built-in search path + bz2_CFLAGS C compiler flags for bz2, overriding pkg-config + bz2_LIBS linker flags for bz2, overriding pkg-config + fftw3_CFLAGS + C compiler flags for fftw3, overriding pkg-config + fftw3_LIBS linker flags for fftw3, overriding pkg-config + fftw3f_CFLAGS + C compiler flags for fftw3f, overriding pkg-config + fftw3f_LIBS linker flags for fftw3f, overriding pkg-config + sndfile_CFLAGS + C compiler flags for sndfile, overriding pkg-config + sndfile_LIBS + linker flags for sndfile, overriding pkg-config + samplerate_CFLAGS + C compiler flags for samplerate, overriding pkg-config + samplerate_LIBS + linker flags for samplerate, overriding pkg-config + vamp_CFLAGS C compiler flags for vamp, overriding pkg-config + vamp_LIBS linker flags for vamp, overriding pkg-config + vamphostsdk_CFLAGS + C compiler flags for vamphostsdk, overriding pkg-config + vamphostsdk_LIBS + linker flags for vamphostsdk, overriding pkg-config + rubberband_CFLAGS + C compiler flags for rubberband, overriding pkg-config + rubberband_LIBS + linker flags for rubberband, overriding pkg-config + dataquay_CFLAGS + C compiler flags for dataquay, overriding pkg-config + dataquay_LIBS + linker flags for dataquay, overriding pkg-config + liblo_CFLAGS + C compiler flags for liblo, overriding pkg-config + liblo_LIBS linker flags for liblo, overriding pkg-config + portaudio_2_0_CFLAGS + C compiler flags for portaudio_2_0, overriding pkg-config + portaudio_2_0_LIBS + linker flags for portaudio_2_0, overriding pkg-config + JACK_CFLAGS C compiler flags for JACK, overriding pkg-config + JACK_LIBS linker flags for JACK, overriding pkg-config + libpulse_CFLAGS + C compiler flags for libpulse, overriding pkg-config + libpulse_LIBS + linker flags for libpulse, overriding pkg-config + lrdf_CFLAGS C compiler flags for lrdf, overriding pkg-config + lrdf_LIBS linker flags for lrdf, overriding pkg-config + oggz_CFLAGS C compiler flags for oggz, overriding pkg-config + oggz_LIBS linker flags for oggz, overriding pkg-config + fishsound_CFLAGS + C compiler flags for fishsound, overriding pkg-config + fishsound_LIBS + linker flags for fishsound, overriding pkg-config + mad_CFLAGS C compiler flags for mad, overriding pkg-config + mad_LIBS linker flags for mad, overriding pkg-config + id3tag_CFLAGS + C compiler flags for id3tag, overriding pkg-config + id3tag_LIBS linker flags for id3tag, overriding pkg-config + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to . +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +SVcore configure 1.8 +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_cxx_try_compile LINENO +# ---------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_cxx_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_compile + +# ac_fn_cxx_try_cpp LINENO +# ------------------------ +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_cpp + +# ac_fn_cxx_try_run LINENO +# ------------------------ +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_cxx_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_run + +# ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES +# --------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_cxx_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## ------------------------------------------- ## +## Report this to cannam@all-day-breakfast.com ## +## ------------------------------------------- ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_header_mongrel + +# ac_fn_cxx_check_header_compile LINENO HEADER VAR INCLUDES +# --------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_cxx_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_header_compile + +# ac_fn_cxx_try_link LINENO +# ------------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_cxx_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_link +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by SVcore $as_me 1.8, which was +generated by GNU Autoconf 2.69. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + $as_echo "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + $as_echo "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + $as_echo "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + $as_echo "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + # We do not want a PATH search for config.site. + case $CONFIG_SITE in #(( + -*) ac_site_file1=./$CONFIG_SITE;; + */*) ac_site_file1=$CONFIG_SITE;; + *) ac_site_file1=./$CONFIG_SITE;; + esac +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + + +# Autoconf will set CXXFLAGS; we don't usually want it to, because we +# either define our own flags (at least if GCC is in use) or else use +# the user's preferences. We need to ensure CXXFLAGS is only set if +# the user has expressly set it. So, save the user's (or empty) +# setting now and restore it after Autoconf has done its bit of +# piddling about. +USER_CXXFLAGS="$CXXFLAGS" + +# If the user supplied CFLAGS but not CXXFLAGS, use CFLAGS instead +if test x"$USER_CXXFLAGS" = x; then + if test x"$CFLAGS" != x; then + USER_CXXFLAGS="$CFLAGS" + fi +fi + +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else + ac_file='' +fi +if test -z "$ac_file"; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +$as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "C compiler cannot create executables +See \`config.log' for more details" "$LINENO" 5; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } + +rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if ${ac_cv_objext+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu + +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu +if test -z "$CXX"; then + if test -n "$CCC"; then + CXX=$CCC + else + if test -n "$ac_tool_prefix"; then + for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CXX"; then + ac_cv_prog_CXX="$CXX" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CXX=$ac_cv_prog_CXX +if test -n "$CXX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 +$as_echo "$CXX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CXX" && break + done +fi +if test -z "$CXX"; then + ac_ct_CXX=$CXX + for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CXX"; then + ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CXX="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CXX=$ac_cv_prog_ac_ct_CXX +if test -n "$ac_ct_CXX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 +$as_echo "$ac_ct_CXX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CXX" && break +done + + if test "x$ac_ct_CXX" = x; then + CXX="g++" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CXX=$ac_ct_CXX + fi +fi + + fi +fi +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 +$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } +if ${ac_cv_cxx_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_cxx_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 +$as_echo "$ac_cv_cxx_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GXX=yes +else + GXX= +fi +ac_test_CXXFLAGS=${CXXFLAGS+set} +ac_save_CXXFLAGS=$CXXFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 +$as_echo_n "checking whether $CXX accepts -g... " >&6; } +if ${ac_cv_prog_cxx_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_cxx_werror_flag=$ac_cxx_werror_flag + ac_cxx_werror_flag=yes + ac_cv_prog_cxx_g=no + CXXFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_cv_prog_cxx_g=yes +else + CXXFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + +else + ac_cxx_werror_flag=$ac_save_cxx_werror_flag + CXXFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_cv_prog_cxx_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_cxx_werror_flag=$ac_save_cxx_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 +$as_echo "$ac_cv_prog_cxx_g" >&6; } +if test "$ac_test_CXXFLAGS" = set; then + CXXFLAGS=$ac_save_CXXFLAGS +elif test $ac_cv_prog_cxx_g = yes; then + if test "$GXX" = yes; then + CXXFLAGS="-g -O2" + else + CXXFLAGS="-g" + fi +else + if test "$GXX" = yes; then + CXXFLAGS="-O2" + else + CXXFLAGS= + fi +fi +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu + +ac_aux_dir= +for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do + if test -f "$ac_dir/install-sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install-sh -c" + break + elif test -f "$ac_dir/install.sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install.sh -c" + break + elif test -f "$ac_dir/shtool"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/shtool install -c" + break + fi +done +if test -z "$ac_aux_dir"; then + as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 +fi + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. +ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. +ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. + + +# Find a good install program. We prefer a C program (faster), +# so one script is as good as another. But avoid the broken or +# incompatible versions: +# SysV /etc/install, /usr/sbin/install +# SunOS /usr/etc/install +# IRIX /sbin/install +# AIX /bin/install +# AmigaOS /C/install, which installs bootblocks on floppy discs +# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag +# AFS /usr/afsws/bin/install, which mishandles nonexistent args +# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" +# OS/2's system install, which has a completely different semantic +# ./install, which can be erroneously created by make from ./install.sh. +# Reject install programs that cannot install multiple files. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 +$as_echo_n "checking for a BSD-compatible install... " >&6; } +if test -z "$INSTALL"; then +if ${ac_cv_path_install+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + # Account for people who put trailing slashes in PATH elements. +case $as_dir/ in #(( + ./ | .// | /[cC]/* | \ + /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ + ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ + /usr/ucb/* ) ;; + *) + # OSF1 and SCO ODT 3.0 have their own names for install. + # Don't use installbsd from OSF since it installs stuff as root + # by default. + for ac_prog in ginstall scoinst install; do + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then + if test $ac_prog = install && + grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # AIX install. It has an incompatible calling convention. + : + elif test $ac_prog = install && + grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # program-specific install script used by HP pwplus--don't use. + : + else + rm -rf conftest.one conftest.two conftest.dir + echo one > conftest.one + echo two > conftest.two + mkdir conftest.dir + if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && + test -s conftest.one && test -s conftest.two && + test -s conftest.dir/conftest.one && + test -s conftest.dir/conftest.two + then + ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" + break 3 + fi + fi + fi + done + done + ;; +esac + + done +IFS=$as_save_IFS + +rm -rf conftest.one conftest.two conftest.dir + +fi + if test "${ac_cv_path_install+set}" = set; then + INSTALL=$ac_cv_path_install + else + # As a last resort, use the slow shell script. Don't cache a + # value for INSTALL within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + INSTALL=$ac_install_sh + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 +$as_echo "$INSTALL" >&6; } + +# Use test -z because SunOS4 sh mishandles braces in ${var-val}. +# It thinks the first close brace ends the variable substitution. +test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' + +test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' + +test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 +$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } +if test -z "$MKDIR_P"; then + if ${ac_cv_path_mkdir+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in mkdir gmkdir; do + for ac_exec_ext in '' $ac_executable_extensions; do + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue + case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( + 'mkdir (GNU coreutils) '* | \ + 'mkdir (coreutils) '* | \ + 'mkdir (fileutils) '4.1*) + ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext + break 3;; + esac + done + done + done +IFS=$as_save_IFS + +fi + + test -d ./--version && rmdir ./--version + if test "${ac_cv_path_mkdir+set}" = set; then + MKDIR_P="$ac_cv_path_mkdir -p" + else + # As a last resort, use the slow shell script. Don't cache a + # value for MKDIR_P within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + MKDIR_P="$ac_install_sh -d" + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 +$as_echo "$MKDIR_P" >&6; } + + + +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 +$as_echo_n "checking how to run the C++ preprocessor... " >&6; } +if test -z "$CXXCPP"; then + if ${ac_cv_prog_CXXCPP+:} false; then : + $as_echo_n "(cached) " >&6 +else + # Double quotes because CXXCPP needs to be expanded + for CXXCPP in "$CXX -E" "/lib/cpp" + do + ac_preproc_ok=false +for ac_cxx_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi + + done + ac_cv_prog_CXXCPP=$CXXCPP + +fi + CXXCPP=$ac_cv_prog_CXXCPP +else + ac_cv_prog_CXXCPP=$CXXCPP +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 +$as_echo "$CXXCPP" >&6; } +ac_preproc_ok=false +for ac_cxx_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } +fi + +ac_ext=cpp +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_cxx_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if ${ac_cv_path_GREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if ${ac_cv_path_EGREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if ${ac_cv_header_stdc+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_cxx_try_run "$LINENO"; then : + +else + ac_cv_header_stdc=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then + +$as_echo "#define STDC_HEADERS 1" >>confdefs.h + +fi + + +# These are the flags Autoconf guesses for us; we use them later if +# the user has set none and we are not using GCC (so lack our own +# preferred flags) +AUTOCONF_CXXFLAGS="$CXXFLAGS" + + + + + + + + +if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. +set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_PKG_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $PKG_CONFIG in + [\\/]* | ?:[\\/]*) + ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PKG_CONFIG=$ac_cv_path_PKG_CONFIG +if test -n "$PKG_CONFIG"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 +$as_echo "$PKG_CONFIG" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_path_PKG_CONFIG"; then + ac_pt_PKG_CONFIG=$PKG_CONFIG + # Extract the first word of "pkg-config", so it can be a program name with args. +set dummy pkg-config; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $ac_pt_PKG_CONFIG in + [\\/]* | ?:[\\/]*) + ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG +if test -n "$ac_pt_PKG_CONFIG"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 +$as_echo "$ac_pt_PKG_CONFIG" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_pt_PKG_CONFIG" = x; then + PKG_CONFIG="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + PKG_CONFIG=$ac_pt_PKG_CONFIG + fi +else + PKG_CONFIG="$ac_cv_path_PKG_CONFIG" +fi + +fi +if test -n "$PKG_CONFIG"; then + _pkg_min_version=0.9.0 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 +$as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } + if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + PKG_CONFIG="" + fi +fi + + + + +if test x$QMAKE = x ; then + # Extract the first word of "qmake-qt4", so it can be a program name with args. +set dummy qmake-qt4; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_QMAKE+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$QMAKE"; then + ac_cv_prog_QMAKE="$QMAKE" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $QTDIR/bin/ +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_QMAKE="$QTDIR/bin/qmake-qt4" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +QMAKE=$ac_cv_prog_QMAKE +if test -n "$QMAKE"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $QMAKE" >&5 +$as_echo "$QMAKE" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test x$QMAKE = x ; then + # Extract the first word of "qmake", so it can be a program name with args. +set dummy qmake; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_QMAKE+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$QMAKE"; then + ac_cv_prog_QMAKE="$QMAKE" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $QTDIR/bin/ +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_QMAKE="$QTDIR/bin/qmake" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +QMAKE=$ac_cv_prog_QMAKE +if test -n "$QMAKE"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $QMAKE" >&5 +$as_echo "$QMAKE" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test x$QMAKE = x ; then + # Extract the first word of "qmake.exe", so it can be a program name with args. +set dummy qmake.exe; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_QMAKE+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$QMAKE"; then + ac_cv_prog_QMAKE="$QMAKE" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $QTDIR/bin/ +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_QMAKE="$QTDIR/bin/qmake.exe" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +QMAKE=$ac_cv_prog_QMAKE +if test -n "$QMAKE"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $QMAKE" >&5 +$as_echo "$QMAKE" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test x$QMAKE = x ; then + # Extract the first word of "qmake-qt4", so it can be a program name with args. +set dummy qmake-qt4; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_QMAKE+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$QMAKE"; then + ac_cv_prog_QMAKE="$QMAKE" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_QMAKE="qmake-qt4" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +QMAKE=$ac_cv_prog_QMAKE +if test -n "$QMAKE"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $QMAKE" >&5 +$as_echo "$QMAKE" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test x$QMAKE = x ; then + # Extract the first word of "qmake", so it can be a program name with args. +set dummy qmake; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_QMAKE+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$QMAKE"; then + ac_cv_prog_QMAKE="$QMAKE" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_QMAKE="qmake" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +QMAKE=$ac_cv_prog_QMAKE +if test -n "$QMAKE"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $QMAKE" >&5 +$as_echo "$QMAKE" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test x$QMAKE = x ; then + as_fn_error $? " +Failed to find the required qmake-qt4 or qmake program. Please +ensure you have the necessary Qt4 development files installed, and +if necessary set QTDIR to the location of your Qt4 installation. +" "$LINENO" 5 +fi + +# Suitable versions of qmake should print out something like: +# +# QMake version 2.01a +# Using Qt version 4.6.3 in /usr/lib +# +# This may be translated, so we check only for the numbers (2.x and 4.x +# in that order). +# +QMAKE_VERSION_OUTPUT=`$QMAKE -v` +case "$QMAKE_VERSION_OUTPUT" in + *2.*4.*) ;; + *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: + *** The version of qmake found in \"$QMAKE\" looks like it might be + from the wrong version of Qt (Qt4 is required). Please check + that this is the correct version of qmake for Qt4 builds. +" >&5 +$as_echo "$as_me: WARNING: + *** The version of qmake found in \"$QMAKE\" looks like it might be + from the wrong version of Qt (Qt4 is required). Please check + that this is the correct version of qmake for Qt4 builds. +" >&2;} +esac + +case "`uname`" in + *Darwin*) QMAKE="$QMAKE -spec macx-g++";; +esac + + + +SV_DEFINES_DEBUG="-DDEBUG -DBUILD_DEBUG -DWANT_TIMING" +SV_DEFINES_RELEASE="-DNDEBUG -DBUILD_RELEASE -DNO_TIMING" +SV_DEFINES_MINIMAL="$SV_DEFINES_RELEASE" + +# Now we have: USER_CXXFLAGS contains any flags the user set +# explicitly; AUTOCONF_CXXFLAGS contains flags that Autoconf thought +# we should use. If we have GCC, we override the latter but then +# allow ourselves to be overridden (later) by the former + +CXXFLAGS_DEBUG="$AUTOCONF_CXXFLAGS" +CXXFLAGS_RELEASE="$AUTOCONF_CXXFLAGS" +CXXFLAGS_MINIMAL="$AUTOCONF_CXXFLAGS" + +if test "x$GCC" = "xyes"; then + CXXFLAGS_DEBUG="-Wall -Woverloaded-virtual -Wextra -Wformat-nonliteral -Wformat-security -Winit-self -Wswitch-enum -g -pipe" + CXXFLAGS_RELEASE="-g0 -O2 -Wall -pipe" + CXXFLAGS_MINIMAL="-g0 -O0" +fi + +CXXFLAGS_BUILD="$CXXFLAGS_RELEASE" +SV_DEFINES_BUILD="$SV_DEFINES_RELEASE" + +QMAKE_CONFIG="release" + +# Check whether --enable-debug was given. +if test "${enable_debug+set}" = set; then : + enableval=$enable_debug; { $as_echo "$as_me:${as_lineno-$LINENO}: enabling debug build" >&5 +$as_echo "$as_me: enabling debug build" >&6;} +QMAKE_CONFIG="debug" +CXXFLAGS_BUILD="$CXXFLAGS_DEBUG" +SV_DEFINES_BUILD="$SV_DEFINES_DEBUG" +fi + + +if test x"$USER_CXXFLAGS" != x; then + { $as_echo "$as_me:${as_lineno-$LINENO}: The CXXFLAGS environment variable is set to \"$USER_CXXFLAGS\"." >&5 +$as_echo "$as_me: The CXXFLAGS environment variable is set to \"$USER_CXXFLAGS\"." >&6;} + { $as_echo "$as_me:${as_lineno-$LINENO}: Overriding default compiler flags with the above user setting." >&5 +$as_echo "$as_me: Overriding default compiler flags with the above user setting." >&6;} + CXXFLAGS_BUILD="$USER_CXXFLAGS" + CXXFLAGS_MINIMAL="$USER_CXXFLAGS" +fi + +CXXFLAGS="$CXXFLAGS_BUILD $SV_DEFINES_BUILD" + +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_cxx_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + + +SV_MODULE_MODULE=bz2 +SV_MODULE_VERSION_TEST="" +SV_MODULE_HEADER=bzlib.h +SV_MODULE_LIB=bz2 +SV_MODULE_FUNC=BZ2_bzReadOpen +SV_MODULE_HAVE=HAVE_$(echo bz2 | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$bz2_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $bz2_CFLAGS" + LIBS="$LIBS $bz2_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for bz2" >&5 +$as_echo_n "checking for bz2... " >&6; } + +if test -n "$bz2_CFLAGS"; then + pkg_cv_bz2_CFLAGS="$bz2_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_bz2_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$bz2_LIBS"; then + pkg_cv_bz2_LIBS="$bz2_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_bz2_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + bz2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + bz2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$bz2_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + bz2_CFLAGS=$pkg_cv_bz2_CFLAGS + bz2_LIBS=$pkg_cv_bz2_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $bz2_CFLAGS";LIBS="$LIBS $bz2_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=fftw3 +SV_MODULE_VERSION_TEST="fftw3 >= 3.0.0" +SV_MODULE_HEADER=fftw3.h +SV_MODULE_LIB=fftw3 +SV_MODULE_FUNC=fftw_execute +SV_MODULE_HAVE=HAVE_$(echo fftw3 | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$fftw3_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $fftw3_CFLAGS" + LIBS="$LIBS $fftw3_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fftw3" >&5 +$as_echo_n "checking for fftw3... " >&6; } + +if test -n "$fftw3_CFLAGS"; then + pkg_cv_fftw3_CFLAGS="$fftw3_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fftw3_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$fftw3_LIBS"; then + pkg_cv_fftw3_LIBS="$fftw3_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fftw3_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + fftw3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + fftw3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$fftw3_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + fftw3_CFLAGS=$pkg_cv_fftw3_CFLAGS + fftw3_LIBS=$pkg_cv_fftw3_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $fftw3_CFLAGS";LIBS="$LIBS $fftw3_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=fftw3f +SV_MODULE_VERSION_TEST="fftw3f >= 3.0.0" +SV_MODULE_HEADER=fftw3.h +SV_MODULE_LIB=fftw3f +SV_MODULE_FUNC=fftwf_execute +SV_MODULE_HAVE=HAVE_$(echo fftw3f | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$fftw3f_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $fftw3f_CFLAGS" + LIBS="$LIBS $fftw3f_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fftw3f" >&5 +$as_echo_n "checking for fftw3f... " >&6; } + +if test -n "$fftw3f_CFLAGS"; then + pkg_cv_fftw3f_CFLAGS="$fftw3f_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fftw3f_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$fftw3f_LIBS"; then + pkg_cv_fftw3f_LIBS="$fftw3f_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fftw3f_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + fftw3f_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + fftw3f_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$fftw3f_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + fftw3f_CFLAGS=$pkg_cv_fftw3f_CFLAGS + fftw3f_LIBS=$pkg_cv_fftw3f_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $fftw3f_CFLAGS";LIBS="$LIBS $fftw3f_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=sndfile +SV_MODULE_VERSION_TEST="sndfile >= 1.0.16" +SV_MODULE_HEADER=sndfile.h +SV_MODULE_LIB=sndfile +SV_MODULE_FUNC=sf_open +SV_MODULE_HAVE=HAVE_$(echo sndfile | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$sndfile_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $sndfile_CFLAGS" + LIBS="$LIBS $sndfile_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sndfile" >&5 +$as_echo_n "checking for sndfile... " >&6; } + +if test -n "$sndfile_CFLAGS"; then + pkg_cv_sndfile_CFLAGS="$sndfile_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_sndfile_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$sndfile_LIBS"; then + pkg_cv_sndfile_LIBS="$sndfile_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_sndfile_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + sndfile_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + sndfile_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$sndfile_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + sndfile_CFLAGS=$pkg_cv_sndfile_CFLAGS + sndfile_LIBS=$pkg_cv_sndfile_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $sndfile_CFLAGS";LIBS="$LIBS $sndfile_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=samplerate +SV_MODULE_VERSION_TEST="samplerate >= 0.1.2" +SV_MODULE_HEADER=samplerate.h +SV_MODULE_LIB=samplerate +SV_MODULE_FUNC=src_new +SV_MODULE_HAVE=HAVE_$(echo samplerate | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$samplerate_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $samplerate_CFLAGS" + LIBS="$LIBS $samplerate_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for samplerate" >&5 +$as_echo_n "checking for samplerate... " >&6; } + +if test -n "$samplerate_CFLAGS"; then + pkg_cv_samplerate_CFLAGS="$samplerate_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_samplerate_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$samplerate_LIBS"; then + pkg_cv_samplerate_LIBS="$samplerate_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_samplerate_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + samplerate_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + samplerate_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$samplerate_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + samplerate_CFLAGS=$pkg_cv_samplerate_CFLAGS + samplerate_LIBS=$pkg_cv_samplerate_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $samplerate_CFLAGS";LIBS="$LIBS $samplerate_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=vamp +SV_MODULE_VERSION_TEST="vamp >= 2.1" +SV_MODULE_HEADER=vamp/vamp.h +SV_MODULE_LIB= +SV_MODULE_FUNC= +SV_MODULE_HAVE=HAVE_$(echo vamp | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$vamp_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $vamp_CFLAGS" + LIBS="$LIBS $vamp_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for vamp" >&5 +$as_echo_n "checking for vamp... " >&6; } + +if test -n "$vamp_CFLAGS"; then + pkg_cv_vamp_CFLAGS="$vamp_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_vamp_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$vamp_LIBS"; then + pkg_cv_vamp_LIBS="$vamp_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_vamp_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + vamp_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + vamp_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$vamp_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + vamp_CFLAGS=$pkg_cv_vamp_CFLAGS + vamp_LIBS=$pkg_cv_vamp_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $vamp_CFLAGS";LIBS="$LIBS $vamp_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=vamphostsdk +SV_MODULE_VERSION_TEST="vamp-hostsdk >= 2.3.1" +SV_MODULE_HEADER=vamp-hostsdk/PluginLoader.h +SV_MODULE_LIB=vamp-hostsdk +SV_MODULE_FUNC=libvamphostsdk_v_2_3_1_present +SV_MODULE_HAVE=HAVE_$(echo vamphostsdk | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$vamphostsdk_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $vamphostsdk_CFLAGS" + LIBS="$LIBS $vamphostsdk_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for vamphostsdk" >&5 +$as_echo_n "checking for vamphostsdk... " >&6; } + +if test -n "$vamphostsdk_CFLAGS"; then + pkg_cv_vamphostsdk_CFLAGS="$vamphostsdk_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_vamphostsdk_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$vamphostsdk_LIBS"; then + pkg_cv_vamphostsdk_LIBS="$vamphostsdk_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_vamphostsdk_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + vamphostsdk_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + vamphostsdk_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$vamphostsdk_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + vamphostsdk_CFLAGS=$pkg_cv_vamphostsdk_CFLAGS + vamphostsdk_LIBS=$pkg_cv_vamphostsdk_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $vamphostsdk_CFLAGS";LIBS="$LIBS $vamphostsdk_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=rubberband +SV_MODULE_VERSION_TEST="rubberband" +SV_MODULE_HEADER=rubberband/RubberBandStretcher.h +SV_MODULE_LIB=rubberband +SV_MODULE_FUNC=rubberband_new +SV_MODULE_HAVE=HAVE_$(echo rubberband | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$rubberband_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $rubberband_CFLAGS" + LIBS="$LIBS $rubberband_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for rubberband" >&5 +$as_echo_n "checking for rubberband... " >&6; } + +if test -n "$rubberband_CFLAGS"; then + pkg_cv_rubberband_CFLAGS="$rubberband_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_rubberband_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$rubberband_LIBS"; then + pkg_cv_rubberband_LIBS="$rubberband_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_rubberband_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + rubberband_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + rubberband_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$rubberband_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + rubberband_CFLAGS=$pkg_cv_rubberband_CFLAGS + rubberband_LIBS=$pkg_cv_rubberband_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $rubberband_CFLAGS";LIBS="$LIBS $rubberband_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + +SV_MODULE_MODULE=dataquay +SV_MODULE_VERSION_TEST="dataquay >= 0.9" +SV_MODULE_HEADER=dataquay/Uri.h +SV_MODULE_LIB=dataquay +SV_MODULE_FUNC=dataquay_v_0_9_present +SV_MODULE_HAVE=HAVE_$(echo dataquay | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$dataquay_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $dataquay_CFLAGS" + LIBS="$LIBS $dataquay_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for dataquay" >&5 +$as_echo_n "checking for dataquay... " >&6; } + +if test -n "$dataquay_CFLAGS"; then + pkg_cv_dataquay_CFLAGS="$dataquay_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_dataquay_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$dataquay_LIBS"; then + pkg_cv_dataquay_LIBS="$dataquay_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_dataquay_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + dataquay_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + dataquay_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$dataquay_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find required module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + dataquay_CFLAGS=$pkg_cv_dataquay_CFLAGS + dataquay_LIBS=$pkg_cv_dataquay_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $dataquay_CFLAGS";LIBS="$LIBS $dataquay_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE" +else + as_fn_error $? "Failed to find header $SV_MODULE_HEADER for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + as_fn_error $? "Failed to find library $SV_MODULE_LIB for required module $SV_MODULE_MODULE" "$LINENO" 5 +fi + + fi +fi + + + +SV_MODULE_MODULE=liblo +SV_MODULE_VERSION_TEST="" +SV_MODULE_HEADER=lo/lo.h +SV_MODULE_LIB=lo +SV_MODULE_FUNC=lo_address_new +SV_MODULE_HAVE=HAVE_$(echo liblo | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$liblo_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $liblo_CFLAGS" + LIBS="$LIBS $liblo_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for liblo" >&5 +$as_echo_n "checking for liblo... " >&6; } + +if test -n "$liblo_CFLAGS"; then + pkg_cv_liblo_CFLAGS="$liblo_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_liblo_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$liblo_LIBS"; then + pkg_cv_liblo_LIBS="$liblo_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_liblo_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + liblo_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + liblo_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$liblo_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + liblo_CFLAGS=$pkg_cv_liblo_CFLAGS + liblo_LIBS=$pkg_cv_liblo_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $liblo_CFLAGS";LIBS="$LIBS $liblo_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=portaudio_2_0 +SV_MODULE_VERSION_TEST="portaudio-2.0 >= 19" +SV_MODULE_HEADER=portaudio.h +SV_MODULE_LIB=portaudio +SV_MODULE_FUNC=Pa_IsFormatSupported +SV_MODULE_HAVE=HAVE_$(echo portaudio_2_0 | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$portaudio_2_0_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $portaudio_2_0_CFLAGS" + LIBS="$LIBS $portaudio_2_0_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for portaudio_2_0" >&5 +$as_echo_n "checking for portaudio_2_0... " >&6; } + +if test -n "$portaudio_2_0_CFLAGS"; then + pkg_cv_portaudio_2_0_CFLAGS="$portaudio_2_0_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_portaudio_2_0_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$portaudio_2_0_LIBS"; then + pkg_cv_portaudio_2_0_LIBS="$portaudio_2_0_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_portaudio_2_0_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + portaudio_2_0_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + portaudio_2_0_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$portaudio_2_0_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + portaudio_2_0_CFLAGS=$pkg_cv_portaudio_2_0_CFLAGS + portaudio_2_0_LIBS=$pkg_cv_portaudio_2_0_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $portaudio_2_0_CFLAGS";LIBS="$LIBS $portaudio_2_0_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=JACK +SV_MODULE_VERSION_TEST="jack >= 0.100" +SV_MODULE_HEADER=jack/jack.h +SV_MODULE_LIB=jack +SV_MODULE_FUNC=jack_client_open +SV_MODULE_HAVE=HAVE_$(echo JACK | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$JACK_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $JACK_CFLAGS" + LIBS="$LIBS $JACK_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JACK" >&5 +$as_echo_n "checking for JACK... " >&6; } + +if test -n "$JACK_CFLAGS"; then + pkg_cv_JACK_CFLAGS="$JACK_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JACK_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$JACK_LIBS"; then + pkg_cv_JACK_LIBS="$JACK_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JACK_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + JACK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + JACK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$JACK_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + JACK_CFLAGS=$pkg_cv_JACK_CFLAGS + JACK_LIBS=$pkg_cv_JACK_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $JACK_CFLAGS";LIBS="$LIBS $JACK_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=libpulse +SV_MODULE_VERSION_TEST="libpulse >= 0.9" +SV_MODULE_HEADER=pulse/pulseaudio.h +SV_MODULE_LIB=pulse +SV_MODULE_FUNC=pa_stream_new +SV_MODULE_HAVE=HAVE_$(echo libpulse | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$libpulse_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $libpulse_CFLAGS" + LIBS="$LIBS $libpulse_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for libpulse" >&5 +$as_echo_n "checking for libpulse... " >&6; } + +if test -n "$libpulse_CFLAGS"; then + pkg_cv_libpulse_CFLAGS="$libpulse_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_libpulse_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$libpulse_LIBS"; then + pkg_cv_libpulse_LIBS="$libpulse_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_libpulse_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + libpulse_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + libpulse_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$libpulse_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + libpulse_CFLAGS=$pkg_cv_libpulse_CFLAGS + libpulse_LIBS=$pkg_cv_libpulse_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $libpulse_CFLAGS";LIBS="$LIBS $libpulse_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=lrdf +SV_MODULE_VERSION_TEST="lrdf >= 0.2" +SV_MODULE_HEADER=lrdf.h +SV_MODULE_LIB=lrdf +SV_MODULE_FUNC=lrdf_init +SV_MODULE_HAVE=HAVE_$(echo lrdf | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$lrdf_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $lrdf_CFLAGS" + LIBS="$LIBS $lrdf_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for lrdf" >&5 +$as_echo_n "checking for lrdf... " >&6; } + +if test -n "$lrdf_CFLAGS"; then + pkg_cv_lrdf_CFLAGS="$lrdf_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_lrdf_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$lrdf_LIBS"; then + pkg_cv_lrdf_LIBS="$lrdf_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_lrdf_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + lrdf_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + lrdf_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$lrdf_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + lrdf_CFLAGS=$pkg_cv_lrdf_CFLAGS + lrdf_LIBS=$pkg_cv_lrdf_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $lrdf_CFLAGS";LIBS="$LIBS $lrdf_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=oggz +SV_MODULE_VERSION_TEST="oggz >= 1.0.0" +SV_MODULE_HEADER=oggz/oggz.h +SV_MODULE_LIB=oggz +SV_MODULE_FUNC=oggz_run +SV_MODULE_HAVE=HAVE_$(echo oggz | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$oggz_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $oggz_CFLAGS" + LIBS="$LIBS $oggz_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for oggz" >&5 +$as_echo_n "checking for oggz... " >&6; } + +if test -n "$oggz_CFLAGS"; then + pkg_cv_oggz_CFLAGS="$oggz_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_oggz_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$oggz_LIBS"; then + pkg_cv_oggz_LIBS="$oggz_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_oggz_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + oggz_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + oggz_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$oggz_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + oggz_CFLAGS=$pkg_cv_oggz_CFLAGS + oggz_LIBS=$pkg_cv_oggz_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $oggz_CFLAGS";LIBS="$LIBS $oggz_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=fishsound +SV_MODULE_VERSION_TEST="fishsound >= 1.0.0" +SV_MODULE_HEADER=fishsound/fishsound.h +SV_MODULE_LIB=fishsound +SV_MODULE_FUNC=fish_sound_new +SV_MODULE_HAVE=HAVE_$(echo fishsound | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$fishsound_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $fishsound_CFLAGS" + LIBS="$LIBS $fishsound_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fishsound" >&5 +$as_echo_n "checking for fishsound... " >&6; } + +if test -n "$fishsound_CFLAGS"; then + pkg_cv_fishsound_CFLAGS="$fishsound_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fishsound_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$fishsound_LIBS"; then + pkg_cv_fishsound_LIBS="$fishsound_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_fishsound_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + fishsound_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + fishsound_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$fishsound_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + fishsound_CFLAGS=$pkg_cv_fishsound_CFLAGS + fishsound_LIBS=$pkg_cv_fishsound_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $fishsound_CFLAGS";LIBS="$LIBS $fishsound_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=mad +SV_MODULE_VERSION_TEST="mad >= 0.15.0" +SV_MODULE_HEADER=mad.h +SV_MODULE_LIB=mad +SV_MODULE_FUNC=mad_decoder_init +SV_MODULE_HAVE=HAVE_$(echo mad | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$mad_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $mad_CFLAGS" + LIBS="$LIBS $mad_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for mad" >&5 +$as_echo_n "checking for mad... " >&6; } + +if test -n "$mad_CFLAGS"; then + pkg_cv_mad_CFLAGS="$mad_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_mad_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$mad_LIBS"; then + pkg_cv_mad_LIBS="$mad_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_mad_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + mad_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + mad_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$mad_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + mad_CFLAGS=$pkg_cv_mad_CFLAGS + mad_LIBS=$pkg_cv_mad_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $mad_CFLAGS";LIBS="$LIBS $mad_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + +SV_MODULE_MODULE=id3tag +SV_MODULE_VERSION_TEST="id3tag >= 0.15.0" +SV_MODULE_HEADER=id3tag.h +SV_MODULE_LIB=id3tag +SV_MODULE_FUNC=id3_tag_new +SV_MODULE_HAVE=HAVE_$(echo id3tag | tr 'a-z' 'A-Z') +SV_MODULE_FAILED=1 +if test -n "$id3tag_LIBS" ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: User set ${SV_MODULE_MODULE}_LIBS explicitly, skipping test for $SV_MODULE_MODULE" >&6;} + CXXFLAGS="$CXXFLAGS $id3tag_CFLAGS" + LIBS="$LIBS $id3tag_LIBS" + SV_MODULE_FAILED="" +fi +if test -z "$SV_MODULE_VERSION_TEST" ; then + SV_MODULE_VERSION_TEST=$SV_MODULE_MODULE +fi +if test -n "$SV_MODULE_FAILED" && test -n "$PKG_CONFIG"; then + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for id3tag" >&5 +$as_echo_n "checking for id3tag... " >&6; } + +if test -n "$id3tag_CFLAGS"; then + pkg_cv_id3tag_CFLAGS="$id3tag_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_id3tag_CFLAGS=`$PKG_CONFIG --cflags "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$id3tag_LIBS"; then + pkg_cv_id3tag_LIBS="$id3tag_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$SV_MODULE_VERSION_TEST\""; } >&5 + ($PKG_CONFIG --exists --print-errors "$SV_MODULE_VERSION_TEST") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_id3tag_LIBS=`$PKG_CONFIG --libs "$SV_MODULE_VERSION_TEST" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + id3tag_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + else + id3tag_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$SV_MODULE_VERSION_TEST" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$id3tag_PKG_ERRORS" >&5 + + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&5 +$as_echo "$as_me: Failed to find optional module $SV_MODULE_MODULE using pkg-config, trying again by old-fashioned means" >&6;} +else + id3tag_CFLAGS=$pkg_cv_id3tag_CFLAGS + id3tag_LIBS=$pkg_cv_id3tag_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVES="$HAVES $SV_MODULE_HAVE";CXXFLAGS="$CXXFLAGS $id3tag_CFLAGS";LIBS="$LIBS $id3tag_LIBS";SV_MODULE_FAILED="" +fi +fi +if test -n "$SV_MODULE_FAILED"; then + as_ac_Header=`$as_echo "ac_cv_header_$SV_MODULE_HEADER" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$SV_MODULE_HEADER" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + HAVES="$HAVES $SV_MODULE_HAVE";SV_MODULE_FAILED="" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find header $SV_MODULE_HEADER for optional module $SV_MODULE_MODULE" >&6;} +fi + + + if test -z "$SV_MODULE_FAILED"; then + if test -n "$SV_MODULE_LIB"; then + as_ac_Lib=`$as_echo "ac_cv_lib_$SV_MODULE_LIB''_$SV_MODULE_FUNC" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB" >&5 +$as_echo_n "checking for $SV_MODULE_FUNC in -l$SV_MODULE_LIB... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-l$SV_MODULE_LIB $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $SV_MODULE_FUNC (); +int +main () +{ +return $SV_MODULE_FUNC (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$as_ac_Lib=yes" +else + eval "$as_ac_Lib=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + LIBS="$LIBS -l$SV_MODULE_LIB" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&5 +$as_echo "$as_me: Failed to find library $SV_MODULE_LIB for optional module $SV_MODULE_MODULE" >&6;} +fi + + fi + fi +fi + + + + + + + + + + + + + + + + + + + + + +ac_config_files="$ac_config_files config.pri" + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +# Transform confdefs.h into DEFS. +# Protect against shell expansion while executing Makefile rules. +# Protect against Makefile macro expansion. +# +# If the first sed substitution is executed (which looks for macros that +# take arguments), then branch to the quote section. Otherwise, +# look for a macro that doesn't take arguments. +ac_script=' +:mline +/\\$/{ + N + s,\\\n,, + b mline +} +t clear +:clear +s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g +t quote +s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g +t quote +b any +:quote +s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g +s/\[/\\&/g +s/\]/\\&/g +s/\$/$$/g +H +:any +${ + g + s/^\n// + s/\n/ /g + p +} +' +DEFS=`sed -n "$ac_script" confdefs.h` + + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by SVcore $as_me 1.8, which was +generated by GNU Autoconf 2.69. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + +Configuration files: +$config_files + +Report bugs to ." + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" +ac_cs_version="\\ +SVcore config.status 1.8 +configured by $0, generated by GNU Autoconf 2.69, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2012 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +INSTALL='$INSTALL' +MKDIR_P='$MKDIR_P' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + $as_echo "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + $as_echo "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h | --help | --hel | -h ) + $as_echo "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + $as_echo "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "config.pri") CONFIG_FILES="$CONFIG_FILES config.pri" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + + +eval set X " :F $CONFIG_FILES " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + + case $INSTALL in + [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; + *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; + esac + ac_MKDIR_P=$MKDIR_P + case $MKDIR_P in + [\\/$]* | ?:[\\/]* ) ;; + */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; + esac +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +s&@INSTALL@&$ac_INSTALL&;t t +s&@MKDIR_P@&$ac_MKDIR_P&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + + +if ! $QMAKE -r; then + as_fn_error $? "qmake failed: Command was \"$QMAKE -r\"" "$LINENO" 5 +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: + +Configuration complete. +Please check the above messages for any warnings that you +might care about, and then run \"make\". + +The file config.pri contains the configuration settings for +qmake. If you want to adjust these by hand, edit config.pri +and run \"qmake -r\" again to regenerate the Makefile. +" >&5 +$as_echo "$as_me: + +Configuration complete. +Please check the above messages for any warnings that you +might care about, and then run \"make\". + +The file config.pri contains the configuration settings for +qmake. If you want to adjust these by hand, edit config.pri +and run \"qmake -r\" again to regenerate the Makefile. +" >&6;} diff -r 4efa7429cd85 -r c10cb8782576 configure.ac --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/configure.ac Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,131 @@ + +AC_INIT(SVcore, 1.8, cannam@all-day-breakfast.com) + +AC_CONFIG_SRCDIR(base/Pitch.h) + +# Autoconf will set CXXFLAGS; we don't usually want it to, because we +# either define our own flags (at least if GCC is in use) or else use +# the user's preferences. We need to ensure CXXFLAGS is only set if +# the user has expressly set it. So, save the user's (or empty) +# setting now and restore it after Autoconf has done its bit of +# piddling about. +USER_CXXFLAGS="$CXXFLAGS" + +# If the user supplied CFLAGS but not CXXFLAGS, use CFLAGS instead +if test x"$USER_CXXFLAGS" = x; then + if test x"$CFLAGS" != x; then + USER_CXXFLAGS="$CFLAGS" + fi +fi + +AC_LANG_CPLUSPLUS + +AC_PROG_CC +AC_PROG_CXX +AC_PROG_INSTALL +AC_PROG_MKDIR_P + +AC_HEADER_STDC + +# These are the flags Autoconf guesses for us; we use them later if +# the user has set none and we are not using GCC (so lack our own +# preferred flags) +AUTOCONF_CXXFLAGS="$CXXFLAGS" + +PKG_PROG_PKG_CONFIG + +SV_CHECK_QT + +SV_DEFINES_DEBUG="-DDEBUG -DBUILD_DEBUG -DWANT_TIMING" +SV_DEFINES_RELEASE="-DNDEBUG -DBUILD_RELEASE -DNO_TIMING" +SV_DEFINES_MINIMAL="$SV_DEFINES_RELEASE" + +# Now we have: USER_CXXFLAGS contains any flags the user set +# explicitly; AUTOCONF_CXXFLAGS contains flags that Autoconf thought +# we should use. If we have GCC, we override the latter but then +# allow ourselves to be overridden (later) by the former + +CXXFLAGS_DEBUG="$AUTOCONF_CXXFLAGS" +CXXFLAGS_RELEASE="$AUTOCONF_CXXFLAGS" +CXXFLAGS_MINIMAL="$AUTOCONF_CXXFLAGS" + +if test "x$GCC" = "xyes"; then + CXXFLAGS_DEBUG="-Wall -Woverloaded-virtual -Wextra -Wformat-nonliteral -Wformat-security -Winit-self -Wswitch-enum -g -pipe" + CXXFLAGS_RELEASE="-g0 -O2 -Wall -pipe" + CXXFLAGS_MINIMAL="-g0 -O0" +fi + +CXXFLAGS_BUILD="$CXXFLAGS_RELEASE" +SV_DEFINES_BUILD="$SV_DEFINES_RELEASE" + +QMAKE_CONFIG="release" + +AC_ARG_ENABLE(debug, [AS_HELP_STRING([--enable-debug],[enable debug support [default=no]])],[AC_MSG_NOTICE([enabling debug build]) +QMAKE_CONFIG="debug" +CXXFLAGS_BUILD="$CXXFLAGS_DEBUG" +SV_DEFINES_BUILD="$SV_DEFINES_DEBUG"]) + +if test x"$USER_CXXFLAGS" != x; then + AC_MSG_NOTICE([The CXXFLAGS environment variable is set to "$USER_CXXFLAGS".]) + AC_MSG_NOTICE(Overriding default compiler flags with the above user setting.) + CXXFLAGS_BUILD="$USER_CXXFLAGS" + CXXFLAGS_MINIMAL="$USER_CXXFLAGS" +fi + +CXXFLAGS="$CXXFLAGS_BUILD $SV_DEFINES_BUILD" + +SV_MODULE_REQUIRED([bz2],[],[bzlib.h],[bz2],[BZ2_bzReadOpen]) +SV_MODULE_REQUIRED([fftw3],[fftw3 >= 3.0.0],[fftw3.h],[fftw3],[fftw_execute]) +SV_MODULE_REQUIRED([fftw3f],[fftw3f >= 3.0.0],[fftw3.h],[fftw3f],[fftwf_execute]) +SV_MODULE_REQUIRED([sndfile],[sndfile >= 1.0.16],[sndfile.h],[sndfile],[sf_open]) +SV_MODULE_REQUIRED([samplerate],[samplerate >= 0.1.2],[samplerate.h],[samplerate],[src_new]) +SV_MODULE_REQUIRED([vamp],[vamp >= 2.1],[vamp/vamp.h],[],[]) +SV_MODULE_REQUIRED([vamphostsdk],[vamp-hostsdk >= 2.3.1],[vamp-hostsdk/PluginLoader.h],[vamp-hostsdk],[libvamphostsdk_v_2_3_1_present]) +SV_MODULE_REQUIRED([rubberband],[rubberband],[rubberband/RubberBandStretcher.h],[rubberband],[rubberband_new]) +SV_MODULE_REQUIRED([dataquay],[dataquay >= 0.9],[dataquay/Uri.h],[dataquay],[dataquay_v_0_9_present]) + +SV_MODULE_OPTIONAL([liblo],[],[lo/lo.h],[lo],[lo_address_new]) +SV_MODULE_OPTIONAL([portaudio_2_0],[portaudio-2.0 >= 19],[portaudio.h],[portaudio],[Pa_IsFormatSupported]) +SV_MODULE_OPTIONAL([JACK],[jack >= 0.100],[jack/jack.h],[jack],[jack_client_open]) +SV_MODULE_OPTIONAL([libpulse],[libpulse >= 0.9],[pulse/pulseaudio.h],[pulse],[pa_stream_new]) +SV_MODULE_OPTIONAL([lrdf],[lrdf >= 0.2],[lrdf.h],[lrdf],[lrdf_init]) +SV_MODULE_OPTIONAL([oggz],[oggz >= 1.0.0],[oggz/oggz.h],[oggz],[oggz_run]) +SV_MODULE_OPTIONAL([fishsound],[fishsound >= 1.0.0],[fishsound/fishsound.h],[fishsound],[fish_sound_new]) +SV_MODULE_OPTIONAL([mad],[mad >= 0.15.0],[mad.h],[mad],[mad_decoder_init]) +SV_MODULE_OPTIONAL([id3tag],[id3tag >= 0.15.0],[id3tag.h],[id3tag],[id3_tag_new]) + +AC_SUBST(PERL) +AC_SUBST(XARGS) +AC_SUBST(MAKEDEPEND) +AC_SUBST(SHA1SUM) +AC_SUBST(CUT) + +AC_SUBST(CFLAGS) +AC_SUBST(CXXFLAGS) +AC_SUBST(CXXFLAGS_MINIMAL) +AC_SUBST(HAVES) +AC_SUBST(LIBS) + +AC_SUBST(CC) +AC_SUBST(CXX) + +AC_SUBST(QMAKE_CONFIG) + +AC_SUBST(CODENAME) + +AC_OUTPUT([config.pri]) + +if ! $QMAKE -r; then + AC_MSG_ERROR([qmake failed: Command was "$QMAKE -r"]) +fi + +AC_MSG_NOTICE([ + +Configuration complete. +Please check the above messages for any warnings that you +might care about, and then run "make". + +The file config.pri contains the configuration settings for +qmake. If you want to adjust these by hand, edit config.pri +and run "qmake -r" again to regenerate the Makefile. +]) diff -r 4efa7429cd85 -r c10cb8782576 data/data.pro --- a/data/data.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,131 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = fftw3f sndfile mad quicktime id3tag oggz fishsound liblo -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions -QT += network -QT -= gui - -TARGET = svdata - -DEPENDPATH += fft fileio model osc .. -INCLUDEPATH += . fft fileio model osc .. -OBJECTS_DIR = tmp_obj -MOC_DIR = tmp_moc - -# Set up suitable platform defines for RtMidi -linux*: DEFINES += __LINUX_ALSASEQ__ -macx*: DEFINES += __MACOSX_CORE__ -win*: DEFINES += __WINDOWS_MM__ -solaris*: DEFINES += __RTMIDI_DUMMY_ONLY__ - -# Input -HEADERS += fft/FFTapi.h \ - fft/FFTCacheReader.h \ - fft/FFTCacheStorageType.h \ - fft/FFTCacheWriter.h \ - fft/FFTDataServer.h \ - fft/FFTFileCacheReader.h \ - fft/FFTFileCacheWriter.h \ - fft/FFTMemoryCache.h \ - fileio/AudioFileReader.h \ - fileio/AudioFileReaderFactory.h \ - fileio/BZipFileDevice.h \ - fileio/CachedFile.h \ - fileio/CodedAudioFileReader.h \ - fileio/CSVFileReader.h \ - fileio/CSVFileWriter.h \ - fileio/CSVFormat.h \ - fileio/DataFileReader.h \ - fileio/DataFileReaderFactory.h \ - fileio/FileFinder.h \ - fileio/FileReadThread.h \ - fileio/FileSource.h \ - fileio/MatchFileReader.h \ - fileio/MatrixFile.h \ - fileio/MIDIFileReader.h \ - fileio/MIDIFileWriter.h \ - fileio/MP3FileReader.h \ - fileio/OggVorbisFileReader.h \ - fileio/PlaylistFileReader.h \ - fileio/QuickTimeFileReader.h \ - fileio/ResamplingWavFileReader.h \ - fileio/WavFileReader.h \ - fileio/WavFileWriter.h \ - midi/MIDIEvent.h \ - midi/MIDIInput.h \ - midi/rtmidi/RtError.h \ - midi/rtmidi/RtMidi.h \ - model/AggregateWaveModel.h \ - model/AlignmentModel.h \ - model/Dense3DModelPeakCache.h \ - model/DenseThreeDimensionalModel.h \ - model/DenseTimeValueModel.h \ - model/EditableDenseThreeDimensionalModel.h \ - model/FFTModel.h \ - model/ImageModel.h \ - model/IntervalModel.h \ - model/Labeller.h \ - model/Model.h \ - model/ModelDataTableModel.h \ - model/NoteModel.h \ - model/PathModel.h \ - model/PowerOfSqrtTwoZoomConstraint.h \ - model/PowerOfTwoZoomConstraint.h \ - model/RangeSummarisableTimeValueModel.h \ - model/RegionModel.h \ - model/SparseModel.h \ - model/SparseOneDimensionalModel.h \ - model/SparseTimeValueModel.h \ - model/SparseValueModel.h \ - model/TabularModel.h \ - model/TextModel.h \ - model/WaveFileModel.h \ - model/WritableWaveFileModel.h \ - osc/OSCMessage.h \ - osc/OSCQueue.h -SOURCES += fft/FFTapi.cpp \ - fft/FFTDataServer.cpp \ - fft/FFTFileCacheReader.cpp \ - fft/FFTFileCacheWriter.cpp \ - fft/FFTMemoryCache.cpp \ - fileio/AudioFileReader.cpp \ - fileio/AudioFileReaderFactory.cpp \ - fileio/BZipFileDevice.cpp \ - fileio/CachedFile.cpp \ - fileio/CodedAudioFileReader.cpp \ - fileio/CSVFileReader.cpp \ - fileio/CSVFileWriter.cpp \ - fileio/CSVFormat.cpp \ - fileio/DataFileReaderFactory.cpp \ - fileio/FileReadThread.cpp \ - fileio/FileSource.cpp \ - fileio/MatchFileReader.cpp \ - fileio/MatrixFile.cpp \ - fileio/MIDIFileReader.cpp \ - fileio/MIDIFileWriter.cpp \ - fileio/MP3FileReader.cpp \ - fileio/OggVorbisFileReader.cpp \ - fileio/PlaylistFileReader.cpp \ - fileio/QuickTimeFileReader.cpp \ - fileio/ResamplingWavFileReader.cpp \ - fileio/WavFileReader.cpp \ - fileio/WavFileWriter.cpp \ - midi/MIDIInput.cpp \ - midi/rtmidi/RtMidi.cpp \ - model/AggregateWaveModel.cpp \ - model/AlignmentModel.cpp \ - model/Dense3DModelPeakCache.cpp \ - model/DenseTimeValueModel.cpp \ - model/EditableDenseThreeDimensionalModel.cpp \ - model/FFTModel.cpp \ - model/Model.cpp \ - model/ModelDataTableModel.cpp \ - model/PowerOfSqrtTwoZoomConstraint.cpp \ - model/PowerOfTwoZoomConstraint.cpp \ - model/RangeSummarisableTimeValueModel.cpp \ - model/WaveFileModel.cpp \ - model/WritableWaveFileModel.cpp \ - osc/OSCMessage.cpp \ - osc/OSCQueue.cpp diff -r 4efa7429cd85 -r c10cb8782576 data/fft/FFTDataServer.cpp --- a/data/fft/FFTDataServer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fft/FFTDataServer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -191,7 +191,7 @@ if (server->getFillCompletion() < 50) distance += 100; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::getFuzzyInstance: Distance for server " << server << " is " << distance << ", best is " << bestdist << std::endl; + SVDEBUG << "FFTDataServer::getFuzzyInstance: Distance for server " << server << " is " << distance << ", best is " << bestdist << endl; #endif if (bestdist == -1 || distance < bestdist) { @@ -204,7 +204,7 @@ if (bestdist >= 0) { FFTDataServer *server = best->second.first; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::getFuzzyInstance: We like server " << server << " (with distance " << bestdist << ")" << std::endl; + SVDEBUG << "FFTDataServer::getFuzzyInstance: We like server " << server << " (with distance " << bestdist << ")" << endl; #endif claimInstance(server, false); return server; @@ -228,7 +228,7 @@ FFTDataServer::findServer(QString n) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::findServer(\"" << n.toStdString() << "\")" << std::endl; + SVDEBUG << "FFTDataServer::findServer(\"" << n << "\")" << endl; #endif if (m_servers.find(n) != m_servers.end()) { @@ -236,7 +236,7 @@ FFTDataServer *server = m_servers[n].first; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::findServer(\"" << n.toStdString() << "\"): found " << server << std::endl; + SVDEBUG << "FFTDataServer::findServer(\"" << n << "\"): found " << server << endl; #endif claimInstance(server, false); @@ -245,7 +245,7 @@ } #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::findServer(\"" << n.toStdString() << "\"): not found" << std::endl; + SVDEBUG << "FFTDataServer::findServer(\"" << n << "\"): not found" << endl; #endif return 0; @@ -264,7 +264,7 @@ "FFTDataServer::claimInstance::m_serverMapMutex"); #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::claimInstance(" << server << ")" << std::endl; + SVDEBUG << "FFTDataServer::claimInstance(" << server << ")" << endl; #endif for (ServerMap::iterator i = m_servers.begin(); i != m_servers.end(); ++i) { @@ -275,7 +275,7 @@ if (*j == server) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::claimInstance: found in released server list, removing from it" << std::endl; + SVDEBUG << "FFTDataServer::claimInstance: found in released server list, removing from it" << endl; #endif m_releasedServers.erase(j); break; @@ -285,7 +285,7 @@ ++i->second.second; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::claimInstance: new refcount is " << i->second.second << std::endl; + SVDEBUG << "FFTDataServer::claimInstance: new refcount is " << i->second.second << endl; #endif return; @@ -309,7 +309,7 @@ "FFTDataServer::releaseInstance::m_serverMapMutex"); #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::releaseInstance(" << server << ")" << std::endl; + SVDEBUG << "FFTDataServer::releaseInstance(" << server << ")" << endl; #endif // -- if ref count > 0, decrement and return @@ -332,18 +332,18 @@ /*!!! if (server->m_lastUsedCache == -1) { // never used #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::releaseInstance: instance " + SVDEBUG << "FFTDataServer::releaseInstance: instance " << server << " has never been used, erasing" - << std::endl; + << endl; #endif delete server; m_servers.erase(i); } else { */ #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::releaseInstance: instance " + SVDEBUG << "FFTDataServer::releaseInstance: instance " << server << " no longer in use, marking for possible collection" - << std::endl; + << endl; #endif bool found = false; for (ServerQueue::iterator j = m_releasedServers.begin(); @@ -361,9 +361,9 @@ //!!! } } else { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::releaseInstance: instance " + SVDEBUG << "FFTDataServer::releaseInstance: instance " << server << " now has refcount " << i->second.second - << std::endl; + << endl; #endif } return; @@ -378,8 +378,8 @@ FFTDataServer::purgeLimbo(int maxSize) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::purgeLimbo(" << maxSize << "): " - << m_releasedServers.size() << " candidates" << std::endl; + SVDEBUG << "FFTDataServer::purgeLimbo(" << maxSize << "): " + << m_releasedServers.size() << " candidates" << endl; #endif while (int(m_releasedServers.size()) > maxSize) { @@ -389,8 +389,8 @@ bool found = false; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::purgeLimbo: considering candidate " - << server << std::endl; + SVDEBUG << "FFTDataServer::purgeLimbo: considering candidate " + << server << endl; #endif for (ServerMap::iterator i = m_servers.begin(); i != m_servers.end(); ++i) { @@ -405,8 +405,8 @@ break; } #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::purgeLimbo: looks OK, erasing it" - << std::endl; + SVDEBUG << "FFTDataServer::purgeLimbo: looks OK, erasing it" + << endl; #endif m_servers.erase(i); @@ -426,8 +426,8 @@ } #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::purgeLimbo(" << maxSize << "): " - << m_releasedServers.size() << " remain" << std::endl; + SVDEBUG << "FFTDataServer::purgeLimbo(" << maxSize << "): " + << m_releasedServers.size() << " remain" << endl; #endif } @@ -439,8 +439,8 @@ "FFTDataServer::modelAboutToBeDeleted::m_serverMapMutex"); #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::modelAboutToBeDeleted(" << model << ")" - << std::endl; + SVDEBUG << "FFTDataServer::modelAboutToBeDeleted(" << model << ")" + << endl; #endif for (ServerMap::iterator i = m_servers.begin(); i != m_servers.end(); ++i) { @@ -450,12 +450,12 @@ if (server->getModel() == model) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::modelAboutToBeDeleted: server is " - << server << std::endl; + SVDEBUG << "FFTDataServer::modelAboutToBeDeleted: server is " + << server << endl; #endif if (i->second.second > 0) { - std::cerr << "WARNING: FFTDataServer::modelAboutToBeDeleted: Model " << model << " (\"" << model->objectName().toStdString() << "\") is about to be deleted, but is still being referred to by FFT server " << server << " with non-zero refcount " << i->second.second << std::endl; + std::cerr << "WARNING: FFTDataServer::modelAboutToBeDeleted: Model " << model << " (\"" << model->objectName() << "\") is about to be deleted, but is still being referred to by FFT server " << server << " with non-zero refcount " << i->second.second << std::endl; server->suspendWrites(); return; } @@ -463,14 +463,14 @@ j != m_releasedServers.end(); ++j) { if (*j == server) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::modelAboutToBeDeleted: erasing from released servers" << std::endl; + SVDEBUG << "FFTDataServer::modelAboutToBeDeleted: erasing from released servers" << endl; #endif m_releasedServers.erase(j); break; } } #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::modelAboutToBeDeleted: erasing server" << std::endl; + SVDEBUG << "FFTDataServer::modelAboutToBeDeleted: erasing server" << endl; #endif m_servers.erase(i); delete server; @@ -814,13 +814,15 @@ success = true; - } catch (std::exception e) { + } catch (std::exception &e) { delete cb->fileCacheWriter; cb->fileCacheWriter = 0; std::cerr << "ERROR: Failed to construct disc cache for FFT data: " << e.what() << std::endl; + + throw; } } @@ -839,7 +841,7 @@ // preconditions: m_caches[c] exists and contains a file writer; // m_cacheVectorLock is not locked by this thread #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::makeCacheReader(" << c << ")" << std::endl; + SVDEBUG << "FFTDataServer::makeCacheReader(" << c << ")" << endl; #endif QThread *me = QThread::currentThread(); @@ -851,7 +853,7 @@ cb->fileCacheReader[me] = new FFTFileCacheReader(cb->fileCacheWriter); - } catch (std::exception e) { + } catch (std::exception &e) { delete cb->fileCacheReader[me]; cb->fileCacheReader.erase(me); @@ -873,7 +875,7 @@ cb = m_caches.at(deleteCandidate); if (cb && cb->fileCacheReader.find(me) != cb->fileCacheReader.end()) { #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::makeCacheReader: Deleting probably unpopular reader " << deleteCandidate << " for this thread (as I create reader " << c << ")" << std::endl; + SVDEBUG << "FFTDataServer::makeCacheReader: Deleting probably unpopular reader " << deleteCandidate << " for this thread (as I create reader " << c << ")" << endl; #endif delete cb->fileCacheReader[me]; cb->fileCacheReader.erase(me); @@ -889,20 +891,29 @@ if (x >= m_width || y >= m_height) return 0; - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return 0; + float val = 0; - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getMagnitudeAt: filling"); + try { + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return 0; + + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getMagnitudeAt: filling"); #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::getMagnitudeAt: calling fillColumn(" - << x << ")" << std::endl; + SVDEBUG << "FFTDataServer::getMagnitudeAt: calling fillColumn(" + << x << ")" << endl; #endif - fillColumn(x); + fillColumn(x); + } + + val = cache->getMagnitudeAt(col, y); + + } catch (std::exception &e) { + m_error = e.what(); } - return cache->getMagnitudeAt(col, y); + + return val; } bool @@ -918,18 +929,23 @@ count = (m_height - minbin) / step; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return false; + try { + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return false; - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getMagnitudesAt: filling"); - fillColumn(x); + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getMagnitudesAt: filling"); + fillColumn(x); + } + + cache->getMagnitudesAt(col, values, minbin, count, step); + + } catch (std::exception &e) { + m_error = e.what(); + return false; } - cache->getMagnitudesAt(col, values, minbin, count, step); - return true; } @@ -940,16 +956,25 @@ if (x >= m_width || y >= m_height) return 0; - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return 0; + float val = 0; - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getNormalizedMagnitudeAt: filling"); - fillColumn(x); + try { + + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return 0; + + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getNormalizedMagnitudeAt: filling"); + fillColumn(x); + } + val = cache->getNormalizedMagnitudeAt(col, y); + + } catch (std::exception &e) { + m_error = e.what(); } - return cache->getNormalizedMagnitudeAt(col, y); + + return val; } bool @@ -965,18 +990,24 @@ count = (m_height - minbin) / step; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return false; + try { - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getNormalizedMagnitudesAt: filling"); - fillColumn(x); - } + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return false; - for (size_t i = 0; i < count; ++i) { - values[i] = cache->getNormalizedMagnitudeAt(col, i * step + minbin); + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getNormalizedMagnitudesAt: filling"); + fillColumn(x); + } + + for (size_t i = 0; i < count; ++i) { + values[i] = cache->getNormalizedMagnitudeAt(col, i * step + minbin); + } + + } catch (std::exception &e) { + m_error = e.what(); + return false; } return true; @@ -989,16 +1020,25 @@ if (x >= m_width) return 0; - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return 0; + float val = 0; - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getMaximumMagnitudeAt: filling"); - fillColumn(x); + try { + + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return 0; + + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getMaximumMagnitudeAt: filling"); + fillColumn(x); + } + val = cache->getMaximumMagnitudeAt(col); + + } catch (std::exception &e) { + m_error = e.what(); } - return cache->getMaximumMagnitudeAt(col); + + return val; } float @@ -1008,16 +1048,25 @@ if (x >= m_width || y >= m_height) return 0; - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return 0; + float val = 0; - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getPhaseAt: filling"); - fillColumn(x); + try { + + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return 0; + + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getPhaseAt: filling"); + fillColumn(x); + } + val = cache->getPhaseAt(col, y); + + } catch (std::exception &e) { + m_error = e.what(); } - return cache->getPhaseAt(col, y); + + return val; } bool @@ -1033,18 +1082,24 @@ count = (m_height - minbin) / step; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return false; + try { - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getPhasesAt: filling"); - fillColumn(x); - } + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return false; - for (size_t i = 0; i < count; ++i) { - values[i] = cache->getPhaseAt(col, i * step + minbin); + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getPhasesAt: filling"); + fillColumn(x); + } + + for (size_t i = 0; i < count; ++i) { + values[i] = cache->getPhaseAt(col, i * step + minbin); + } + + } catch (std::exception &e) { + m_error = e.what(); + return false; } return true; @@ -1061,25 +1116,30 @@ return; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); + try { - if (!cache) { - real = 0; - imaginary = 0; - return; + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + + if (!cache) { + real = 0; + imaginary = 0; + return; + } + + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getValuesAt: filling"); +#ifdef DEBUG_FFT_SERVER + SVDEBUG << "FFTDataServer::getValuesAt(" << x << ", " << y << "): filling" << endl; +#endif + fillColumn(x); + } + + cache->getValuesAt(col, y, real, imaginary); + + } catch (std::exception &e) { + m_error = e.what(); } - - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getValuesAt: filling"); -#ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::getValuesAt(" << x << ", " << y << "): filling" << std::endl; -#endif - fillColumn(x); - } - - cache->getValuesAt(col, y, real, imaginary); } bool @@ -1095,18 +1155,24 @@ count = (m_height - minbin) / step; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return false; + try { - //!!! n.b. can throw - if (!cache->haveSetColumnAt(col)) { - Profiler profiler("FFTDataServer::getValuesAt: filling"); - fillColumn(x); - } + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return false; - for (size_t i = 0; i < count; ++i) { - cache->getValuesAt(col, i * step + minbin, reals[i], imaginaries[i]); + if (!cache->haveSetColumnAt(col)) { + Profiler profiler("FFTDataServer::getValuesAt: filling"); + fillColumn(x); + } + + for (size_t i = 0; i < count; ++i) { + cache->getValuesAt(col, i * step + minbin, reals[i], imaginaries[i]); + } + + } catch (std::exception &e) { + m_error = e.what(); + return false; } return true; @@ -1123,7 +1189,7 @@ /*!!! if (m_lastUsedCache == -1) { if (m_suspended) { - std::cerr << "FFTDataServer::isColumnReady(" << x << "): no cache, calling resume" << std::endl; + SVDEBUG << "FFTDataServer::isColumnReady(" << x << "): no cache, calling resume" << endl; resume(); } m_fillThread->start(); @@ -1132,12 +1198,18 @@ return false; } - size_t col; - FFTCacheReader *cache = getCacheReader(x, col); - if (!cache) return true; + try { - //!!! n.b. can throw - return cache->haveSetColumnAt(col); + size_t col; + FFTCacheReader *cache = getCacheReader(x, col); + if (!cache) return true; + + return cache->haveSetColumnAt(col); + + } catch (std::exception &e) { + m_error = e.what(); + return false; + } } void @@ -1186,12 +1258,12 @@ endFrame -= winsize / 2; #ifdef DEBUG_FFT_SERVER_FILL - std::cerr << "FFTDataServer::fillColumn: requesting frames " + SVDEBUG << "FFTDataServer::fillColumn: requesting frames " << startFrame + pfx << " -> " << endFrame << " ( = " << endFrame - (startFrame + pfx) << ") at index " << off + pfx << " in buffer of size " << m_fftSize << " with window size " << m_windowSize - << " from channel " << m_channel << std::endl; + << " from channel " << m_channel << endl; #endif QMutexLocker locker(&m_fftBuffersLock); @@ -1298,7 +1370,7 @@ } if (m_suspended) { -// std::cerr << "FFTDataServer::fillColumn(" << x << "): calling resume" << std::endl; +// SVDEBUG << "FFTDataServer::fillColumn(" << x << "): calling resume" << endl; // resume(); } } @@ -1307,6 +1379,7 @@ FFTDataServer::fillComplete() { for (int i = 0; i < int(m_caches.size()); ++i) { + if (!m_caches[i]) continue; if (m_caches[i]->memoryCache) { m_caches[i]->memoryCache->allColumnsWritten(); } @@ -1316,6 +1389,14 @@ } } +QString +FFTDataServer::getError() const +{ + if (m_error != "") return m_error; + else if (m_fillThread) return m_fillThread->getError(); + else return ""; +} + size_t FFTDataServer::getFillCompletion() const { @@ -1365,7 +1446,7 @@ FFTDataServer::FillThread::run() { #ifdef DEBUG_FFT_SERVER_FILL - std::cerr << "FFTDataServer::FillThread::run()" << std::endl; + SVDEBUG << "FFTDataServer::FillThread::run()" << endl; #endif m_extent = 0; @@ -1373,7 +1454,7 @@ while (!m_server.m_model->isReady() && !m_server.m_exiting) { #ifdef DEBUG_FFT_SERVER_FILL - std::cerr << "FFTDataServer::FillThread::run(): waiting for model " << m_server.m_model << " to be ready" << std::endl; + SVDEBUG << "FFTDataServer::FillThread::run(): waiting for model " << m_server.m_model << " to be ready" << endl; #endif sleep(1); } @@ -1392,7 +1473,16 @@ for (size_t f = m_fillFrom; f < end; f += m_server.m_windowIncrement) { - m_server.fillColumn(int((f - start) / m_server.m_windowIncrement)); + try { + m_server.fillColumn(int((f - start) / m_server.m_windowIncrement)); + } catch (std::exception &e) { + SVDEBUG << "FFTDataServer::FillThread::run: exception: " << e.what() << endl; + m_error = e.what(); + m_server.fillComplete(); + m_completion = 100; + m_extent = end; + return; + } if (m_server.m_exiting) return; @@ -1432,7 +1522,16 @@ for (size_t f = start; f < remainingEnd; f += m_server.m_windowIncrement) { - m_server.fillColumn(int((f - start) / m_server.m_windowIncrement)); + try { + m_server.fillColumn(int((f - start) / m_server.m_windowIncrement)); + } catch (std::exception &e) { + SVDEBUG << "FFTDataServer::FillThread::run: exception: " << e.what() << endl; + m_error = e.what(); + m_server.fillComplete(); + m_completion = 100; + m_extent = end; + return; + } if (m_server.m_exiting) return; @@ -1468,7 +1567,7 @@ m_extent = end; #ifdef DEBUG_FFT_SERVER - std::cerr << "FFTDataServer::FillThread::run exiting" << std::endl; + SVDEBUG << "FFTDataServer::FillThread::run exiting" << endl; #endif } diff -r 4efa7429cd85 -r c10cb8782576 data/fft/FFTDataServer.h --- a/data/fft/FFTDataServer.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fft/FFTDataServer.h Sun Jul 01 11:53:00 2012 +0100 @@ -106,6 +106,7 @@ return getMagnitudeAt(x, y) > threshold; } + QString getError() const; size_t getFillCompletion() const; size_t getFillExtent() const; @@ -246,6 +247,7 @@ size_t getExtent() const { return m_extent; } size_t getCompletion() const { return m_completion ? m_completion : 1; } + QString getError() const { return m_error; } virtual void run(); protected: @@ -253,11 +255,13 @@ size_t m_extent; size_t m_completion; size_t m_fillFrom; + QString m_error; }; bool m_exiting; bool m_suspended; FillThread *m_fillThread; + QString m_error; void deleteProcessingData(); void fillColumn(size_t x); diff -r 4efa7429cd85 -r c10cb8782576 data/fft/FFTFileCacheReader.cpp --- a/data/fft/FFTFileCacheReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fft/FFTFileCacheReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -157,7 +157,7 @@ void FFTFileCacheReader::getValuesAt(size_t x, size_t y, float &real, float &imag) const { -// std::cerr << "FFTFileCacheReader::getValuesAt(" << x << "," << y << ")" << std::endl; +// SVDEBUG << "FFTFileCacheReader::getValuesAt(" << x << "," << y << ")" << endl; switch (m_storageType) { @@ -216,7 +216,7 @@ { if (m_readbuf && m_readbufGood && (m_readbufCol == x || (m_readbufWidth > 1 && m_readbufCol+1 == x))) { -// std::cerr << "FFTFileCacheReader::haveSetColumnAt: short-circuiting; we know about this one" << std::endl; +// SVDEBUG << "FFTFileCacheReader::haveSetColumnAt: short-circuiting; we know about this one" << endl; return true; } return m_mfc->haveSetColumnAt(x); @@ -236,7 +236,7 @@ { Profiler profiler("FFTFileCacheReader::populateReadBuf", false); -// std::cerr << "FFTFileCacheReader::populateReadBuf(" << x << ")" << std::endl; +// SVDEBUG << "FFTFileCacheReader::populateReadBuf(" << x << ")" << endl; if (!m_readbuf) { m_readbuf = new char[m_mfc->getHeight() * 2 * m_mfc->getCellSize()]; diff -r 4efa7429cd85 -r c10cb8782576 data/fft/FFTFileCacheWriter.cpp --- a/data/fft/FFTFileCacheWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fft/FFTFileCacheWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -188,7 +188,7 @@ FFTFileCacheWriter::allColumnsWritten() { #ifdef DEBUG_FFT_FILE_CACHE_WRITER - std::cerr << "FFTFileCacheWriter::allColumnsWritten" << std::endl; + SVDEBUG << "FFTFileCacheWriter::allColumnsWritten" << endl; #endif m_mfc->close(); } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/AudioFileReader.h --- a/data/fileio/AudioFileReader.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/AudioFileReader.h Sun Jul 01 11:53:00 2012 +0100 @@ -21,6 +21,7 @@ #include "FileSource.h" #include +#include typedef std::vector SampleBlock; @@ -38,7 +39,8 @@ size_t getFrameCount() const { return m_frameCount; } size_t getChannelCount() const { return m_channelCount; } size_t getSampleRate() const { return m_sampleRate; } - size_t getNativeRate() const { return m_sampleRate; } // if resampled + + virtual size_t getNativeRate() const { return m_sampleRate; } // if resampled /** * Return the location of the audio data in the reader (as passed @@ -60,6 +62,9 @@ */ virtual QString getMaker() const { return ""; } + typedef std::map TagMap; + virtual TagMap getTags() const { return TagMap(); } + /** * Return interleaved samples for count frames from index start. * The resulting sample block will contain count * diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/AudioFileReaderFactory.cpp --- a/data/fileio/AudioFileReaderFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/AudioFileReaderFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -77,10 +77,15 @@ { QString err; - std::cerr << "AudioFileReaderFactory::createReader(\"" << source.getLocation().toStdString() << "\"): Requested rate: " << targetRate << std::endl; + SVDEBUG << "AudioFileReaderFactory::createReader(\"" << source.getLocation() << "\"): Requested rate: " << targetRate << endl; - if (!source.isOK() || !source.isAvailable()) { - std::cerr << "AudioFileReaderFactory::createReader(\"" << source.getLocation().toStdString() << "\": Source unavailable" << std::endl; + if (!source.isOK()) { + std::cerr << "AudioFileReaderFactory::createReader(\"" << source.getLocation() << "\": Failed to retrieve source (transmission error?): " << source.getErrorString() << std::endl; + return 0; + } + + if (!source.isAvailable()) { + SVDEBUG << "AudioFileReaderFactory::createReader(\"" << source.getLocation() << "\": Source not found" << endl; return 0; } @@ -97,7 +102,7 @@ reader->isOK() && reader->getSampleRate() != targetRate) { - std::cerr << "AudioFileReaderFactory::createReader: WAV file rate: " << reader->getSampleRate() << ", creating resampling reader" << std::endl; + SVDEBUG << "AudioFileReaderFactory::createReader: WAV file rate: " << reader->getSampleRate() << ", creating resampling reader" << endl; delete reader; reader = new ResamplingWavFileReader @@ -208,7 +213,7 @@ reader->isOK() && reader->getSampleRate() != targetRate) { - std::cerr << "AudioFileReaderFactory::createReader: WAV file rate: " << reader->getSampleRate() << ", creating resampling reader" << std::endl; + SVDEBUG << "AudioFileReaderFactory::createReader: WAV file rate: " << reader->getSampleRate() << ", creating resampling reader" << endl; delete reader; reader = new ResamplingWavFileReader @@ -312,10 +317,10 @@ std::cerr << "AudioFileReaderFactory: Preferred reader for " << "url \"" << source.getLocation().toStdString() << "\" (content type \"" - << source.getContentType().toStdString() << "\") failed"; + << source.getContentType() << "\") failed"; if (reader->getError() != "") { - std::cerr << ": \"" << reader->getError().toStdString() << "\""; + std::cerr << ": \"" << reader->getError() << "\""; } std::cerr << std::endl; delete reader; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/BZipFileDevice.cpp --- a/data/fileio/BZipFileDevice.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/BZipFileDevice.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -30,7 +30,7 @@ BZipFileDevice::~BZipFileDevice() { -// std::cerr << "BZipFileDevice::~BZipFileDevice(" << m_fileName.toStdString() << ")" << std::endl; +// SVDEBUG << "BZipFileDevice::~BZipFileDevice(" << m_fileName << ")" << endl; if (m_bzFile) close(); } @@ -88,7 +88,7 @@ return false; } -// std::cerr << "BZipFileDevice: opened \"" << m_fileName.toStdString() << "\" for writing" << std::endl; +// std::cerr << "BZipFileDevice: opened \"" << m_fileName << "\" for writing" << std::endl; setErrorString(QString()); setOpenMode(mode); @@ -115,7 +115,7 @@ return false; } -// std::cerr << "BZipFileDevice: opened \"" << m_fileName.toStdString() << "\" for reading" << std::endl; +// std::cerr << "BZipFileDevice: opened \"" << m_fileName << "\" for reading" << std::endl; m_atEnd = false; @@ -178,7 +178,7 @@ int bzError = BZ_OK; int read = BZ2_bzRead(&bzError, m_bzFile, data, maxSize); -// std::cerr << "BZipFileDevice::readData: requested " << maxSize << ", read " << read << std::endl; +// SVDEBUG << "BZipFileDevice::readData: requested " << maxSize << ", read " << read << endl; if (bzError != BZ_OK) { if (bzError != BZ_STREAM_END) { @@ -187,7 +187,7 @@ m_ok = false; return -1; } else { -// std::cerr << "BZipFileDevice::readData: reached end of file" << std::endl; +// SVDEBUG << "BZipFileDevice::readData: reached end of file" << endl; m_atEnd = true; } } @@ -201,7 +201,7 @@ int bzError = BZ_OK; BZ2_bzWrite(&bzError, m_bzFile, (void *)data, maxSize); -// std::cerr << "BZipFileDevice::writeData: " << maxSize << " to write" << std::endl; +// SVDEBUG << "BZipFileDevice::writeData: " << maxSize << " to write" << endl; if (bzError != BZ_OK) { std::cerr << "BZipFileDevice::writeData: error condition" << std::endl; @@ -210,7 +210,7 @@ return -1; } -// std::cerr << "BZipFileDevice::writeData: wrote " << maxSize << std::endl; +// SVDEBUG << "BZipFileDevice::writeData: wrote " << maxSize << endl; return maxSize; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CSVFileReader.cpp --- a/data/fileio/CSVFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CSVFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -17,9 +17,11 @@ #include "model/Model.h" #include "base/RealTime.h" +#include "base/StringBits.h" #include "model/SparseOneDimensionalModel.h" #include "model/SparseTimeValueModel.h" #include "model/EditableDenseThreeDimensionalModel.h" +#include "model/RegionModel.h" #include "DataFileReaderFactory.h" #include @@ -29,11 +31,13 @@ #include #include +#include CSVFileReader::CSVFileReader(QString path, CSVFormat format, size_t mainModelSampleRate) : m_format(format), m_file(0), + m_warnings(0), m_mainModelSampleRate(mainModelSampleRate) { m_file = new QFile(path); @@ -55,10 +59,10 @@ CSVFileReader::~CSVFileReader() { - std::cerr << "CSVFileReader::~CSVFileReader: file is " << m_file << std::endl; + SVDEBUG << "CSVFileReader::~CSVFileReader: file is " << m_file << endl; if (m_file) { - std::cerr << "CSVFileReader::CSVFileReader: Closing file" << std::endl; + SVDEBUG << "CSVFileReader::CSVFileReader: Closing file" << endl; m_file->close(); } delete m_file; @@ -76,27 +80,64 @@ return m_error; } +size_t +CSVFileReader::convertTimeValue(QString s, int lineno, size_t sampleRate, + size_t windowSize) const +{ + QRegExp nonNumericRx("[^0-9eE.,+-]"); + unsigned int warnLimit = 10; + + CSVFormat::TimeUnits timeUnits = m_format.getTimeUnits(); + + size_t calculatedFrame = 0; + + bool ok = false; + QString numeric = s; + numeric.remove(nonNumericRx); + + if (timeUnits == CSVFormat::TimeSeconds) { + + double time = numeric.toDouble(&ok); + if (!ok) time = StringBits::stringToDoubleLocaleFree(numeric, &ok); + calculatedFrame = int(time * sampleRate + 0.5); + + } else { + + long n = numeric.toLong(&ok); + if (n >= 0) calculatedFrame = n; + + if (timeUnits == CSVFormat::TimeWindows) { + calculatedFrame *= windowSize; + } + } + + if (!ok) { + if (m_warnings < warnLimit) { + std::cerr << "WARNING: CSVFileReader::load: " + << "Bad time format (\"" << s.toStdString() + << "\") in data line " + << lineno+1 << std::endl; + } else if (m_warnings == warnLimit) { + std::cerr << "WARNING: Too many warnings" << std::endl; + } + ++m_warnings; + } + + return calculatedFrame; +} + Model * CSVFileReader::load() const { if (!m_file) return 0; -/*!!! - CSVFormatDialog *dialog = new CSVFormatDialog - (0, m_file, m_mainModelSampleRate); - if (dialog->exec() == QDialog::Rejected) { - delete dialog; - throw DataFileReaderFactory::ImportCancelled; - } -*/ - - CSVFormat::ModelType modelType = m_format.getModelType(); + CSVFormat::ModelType modelType = m_format.getModelType(); CSVFormat::TimingType timingType = m_format.getTimingType(); - CSVFormat::TimeUnits timeUnits = m_format.getTimeUnits(); - QString separator = m_format.getSeparator(); - QString::SplitBehavior behaviour = m_format.getSplitBehaviour(); + CSVFormat::TimeUnits timeUnits = m_format.getTimeUnits(); size_t sampleRate = m_format.getSampleRate(); size_t windowSize = m_format.getWindowSize(); + QChar separator = m_format.getSeparator(); + bool allowQuoting = m_format.getAllowQuoting(); if (timingType == CSVFormat::ExplicitTiming) { if (modelType == CSVFormat::ThreeDimensionalModel) { @@ -114,6 +155,7 @@ SparseOneDimensionalModel *model1 = 0; SparseTimeValueModel *model2 = 0; + RegionModel *model2a = 0; EditableDenseThreeDimensionalModel *model3 = 0; Model *model = 0; @@ -126,7 +168,23 @@ float min = 0.0, max = 0.0; size_t frameNo = 0; + size_t duration = 0; + size_t endFrame = 0; + + bool haveAnyValue = false; + bool haveEndTime = false; + size_t startFrame = 0; // for calculation of dense model resolution + bool firstEverValue = true; + + std::map labelCountMap; + + int valueColumns = 0; + for (int i = 0; i < m_format.getColumnCount(); ++i) { + if (m_format.getColumnPurpose(i) == CSVFormat::ColumnValue) { + ++valueColumns; + } + } while (!in.atEnd()) { @@ -150,8 +208,7 @@ if (line.startsWith("#")) continue; - QStringList list = line.split(separator, behaviour); - + QStringList list = StringBits::split(line, separator, allowQuoting); if (!model) { switch (modelType) { @@ -166,110 +223,112 @@ model = model2; break; + case CSVFormat::TwoDimensionalModelWithDuration: + model2a = new RegionModel(sampleRate, windowSize, false); + model = model2a; + break; + case CSVFormat::ThreeDimensionalModel: model3 = new EditableDenseThreeDimensionalModel (sampleRate, windowSize, - list.size(), + valueColumns, EditableDenseThreeDimensionalModel::NoCompression); model = model3; break; } } - QStringList tidyList; - QRegExp nonNumericRx("[^0-9eE.,+-]"); + float value = 0.f; + QString label = ""; + + duration = 0.f; + haveEndTime = false; for (int i = 0; i < list.size(); ++i) { - - QString s(list[i].trimmed()); - if (s.length() >= 2 && s.startsWith("\"") && s.endsWith("\"")) { - s = s.mid(1, s.length() - 2); - } else if (s.length() >= 2 && s.startsWith("'") && s.endsWith("'")) { - s = s.mid(1, s.length() - 2); + QString s = list[i]; + + CSVFormat::ColumnPurpose purpose = m_format.getColumnPurpose(i); + + switch (purpose) { + + case CSVFormat::ColumnUnknown: + break; + + case CSVFormat::ColumnStartTime: + frameNo = convertTimeValue(s, lineno, sampleRate, windowSize); + break; + + case CSVFormat::ColumnEndTime: + endFrame = convertTimeValue(s, lineno, sampleRate, windowSize); + haveEndTime = true; + break; + + case CSVFormat::ColumnDuration: + duration = convertTimeValue(s, lineno, sampleRate, windowSize); + break; + + case CSVFormat::ColumnValue: + value = s.toFloat(); + haveAnyValue = true; + break; + + case CSVFormat::ColumnLabel: + label = s; + ++labelCountMap[label]; + break; } + } - if (i == 0 && timingType == CSVFormat::ExplicitTiming) { - - bool ok = false; - QString numeric = s; - numeric.remove(nonNumericRx); - - if (timeUnits == CSVFormat::TimeSeconds) { - - double time = numeric.toDouble(&ok); - frameNo = int(time * sampleRate + 0.5); - - } else { - - frameNo = numeric.toInt(&ok); - - if (timeUnits == CSVFormat::TimeWindows) { - frameNo *= windowSize; - } - } - - if (!ok) { - if (warnings < warnLimit) { - std::cerr << "WARNING: CSVFileReader::load: " - << "Bad time format (\"" << s.toStdString() - << "\") in data line " - << lineno+1 << ":" << std::endl; - std::cerr << line.toStdString() << std::endl; - } else if (warnings == warnLimit) { - std::cerr << "WARNING: Too many warnings" << std::endl; - } - ++warnings; - } - } else { - tidyList.push_back(s); + if (haveEndTime) { // ... calculate duration now all cols read + if (endFrame > frameNo) { + duration = endFrame - frameNo; } } if (modelType == CSVFormat::OneDimensionalModel) { - SparseOneDimensionalModel::Point point - (frameNo, - tidyList.size() > 0 ? tidyList[tidyList.size()-1] : - QString("%1").arg(lineno+1)); - + SparseOneDimensionalModel::Point point(frameNo, label); model1->addPoint(point); } else if (modelType == CSVFormat::TwoDimensionalModel) { - SparseTimeValueModel::Point point - (frameNo, - tidyList.size() > 0 ? tidyList[0].toFloat() : 0.0, - tidyList.size() > 1 ? tidyList[1] : QString("%1").arg(lineno+1)); + SparseTimeValueModel::Point point(frameNo, value, label); + model2->addPoint(point); - model2->addPoint(point); + } else if (modelType == CSVFormat::TwoDimensionalModelWithDuration) { + + RegionModel::Point point(frameNo, value, duration, label); + model2a->addPoint(point); } else if (modelType == CSVFormat::ThreeDimensionalModel) { DenseThreeDimensionalModel::Column values; - for (int i = 0; i < tidyList.size(); ++i) { + for (int i = 0; i < list.size(); ++i) { + + if (m_format.getColumnPurpose(i) != CSVFormat::ColumnValue) { + continue; + } bool ok = false; float value = list[i].toFloat(&ok); - if (i > 0 || timingType != CSVFormat::ExplicitTiming) { - values.push_back(value); - } + values.push_back(value); - bool firstEver = (lineno == 0 && i == 0); - - if (firstEver || value < min) min = value; - if (firstEver || value > max) max = value; - - if (firstEver) { + if (firstEverValue || value < min) min = value; + if (firstEverValue || value > max) max = value; + + if (firstEverValue) { startFrame = frameNo; model3->setStartFrame(startFrame); } else if (lineno == 1 && timingType == CSVFormat::ExplicitTiming) { model3->setResolution(frameNo - startFrame); } + + firstEverValue = false; if (!ok) { if (warnings < warnLimit) { @@ -278,7 +337,7 @@ << list[i].toStdString() << "\" in data line " << lineno+1 << ":" << std::endl; - std::cerr << line.toStdString() << std::endl; + std::cerr << line << std::endl; ++warnings; } else if (warnings == warnLimit) { // std::cerr << "WARNING: Too many warnings" << std::endl; @@ -286,8 +345,8 @@ } } -// std::cerr << "Setting bin values for count " << lineno << ", frame " -// << frameNo << ", time " << RealTime::frame2RealTime(frameNo, sampleRate) << std::endl; +// SVDEBUG << "Setting bin values for count " << lineno << ", frame " +// << frameNo << ", time " << RealTime::frame2RealTime(frameNo, sampleRate) << endl; model3->setColumn(lineno, values); } @@ -300,6 +359,47 @@ } } + if (!haveAnyValue) { + if (model2a) { + // assign values for regions based on label frequency; we + // have this in our labelCountMap, sort of + + std::map > countLabelValueMap; + for (std::map::iterator i = labelCountMap.begin(); + i != labelCountMap.end(); ++i) { + countLabelValueMap[i->second][i->first] = 0.f; + } + + float v = 0.f; + for (std::map >::iterator i = + countLabelValueMap.end(); i != countLabelValueMap.begin(); ) { + --i; + for (std::map::iterator j = i->second.begin(); + j != i->second.end(); ++j) { + j->second = v; + v = v + 1.f; + } + } + + std::map pointMap; + for (RegionModel::PointList::const_iterator i = + model2a->getPoints().begin(); + i != model2a->getPoints().end(); ++i) { + RegionModel::Point p(*i); + v = countLabelValueMap[labelCountMap[p.label]][p.label]; + RegionModel::Point pp(p.frame, v, p.duration, p.label); + pointMap[p] = pp; + } + + for (std::map::iterator i = + pointMap.begin(); i != pointMap.end(); ++i) { + model2a->deletePoint(i->first); + model2a->addPoint(i->second); + } + } + } + if (modelType == CSVFormat::ThreeDimensionalModel) { model3->setMinimumLevel(min); model3->setMaximumLevel(max); diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CSVFileReader.h --- a/data/fileio/CSVFileReader.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CSVFileReader.h Sun Jul 01 11:53:00 2012 +0100 @@ -39,7 +39,11 @@ CSVFormat m_format; QFile *m_file; QString m_error; + mutable int m_warnings; size_t m_mainModelSampleRate; + + size_t convertTimeValue(QString, int lineno, size_t sampleRate, + size_t windowSize) const; }; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CSVFileWriter.cpp --- a/data/fileio/CSVFileWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CSVFileWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -21,6 +21,9 @@ #include "model/NoteModel.h" #include "model/TextModel.h" +#include "base/TempWriteFile.h" +#include "base/Exceptions.h" + #include #include @@ -51,16 +54,25 @@ void CSVFileWriter::write() { - QFile file(m_path); - if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) { - m_error = tr("Failed to open file %1 for writing").arg(m_path); - return; + try { + TempWriteFile temp(m_path); + + QFile file(temp.getTemporaryFilename()); + if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) { + m_error = tr("Failed to open file %1 for writing") + .arg(temp.getTemporaryFilename()); + return; + } + + QTextStream out(&file); + out << m_model->toDelimitedDataString(m_delimiter); + + file.close(); + temp.moveToTarget(); + + } catch (FileOperationFailed &f) { + m_error = f.what(); } - - QTextStream out(&file); - out << m_model->toDelimitedDataString(m_delimiter); - - file.close(); } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CSVFormat.cpp --- a/data/fileio/CSVFormat.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CSVFormat.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -15,6 +15,8 @@ #include "CSVFormat.h" +#include "base/StringBits.h" + #include #include #include @@ -23,36 +25,39 @@ #include -CSVFormat::CSVFormat(QString filename) : - m_modelType(TwoDimensionalModel), - m_timingType(ExplicitTiming), - m_timeUnits(TimeSeconds), - m_separator(","), +CSVFormat::CSVFormat(QString path) : + m_separator(""), m_sampleRate(44100), m_windowSize(1024), - m_behaviour(QString::KeepEmptyParts), - m_maxExampleCols(0) + m_allowQuoting(true) { - QFile file(filename); + guessFormatFor(path); +} + +void +CSVFormat::guessFormatFor(QString path) +{ + m_modelType = TwoDimensionalModel; + m_timingType = ExplicitTiming; + m_timeUnits = TimeSeconds; + + m_maxExampleCols = 0; + m_columnCount = 0; + m_variableColumnCount = false; + + m_example.clear(); + m_columnQualities.clear(); + m_columnPurposes.clear(); + m_prevValues.clear(); + + QFile file(path); if (!file.exists()) return; if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) return; QTextStream in(&file); in.seek(0); - unsigned int lineno = 0; - - bool nonIncreasingPrimaries = false; - bool nonNumericPrimaries = false; - bool floatPrimaries = false; - bool variableItemCount = false; - int itemCount = 1; - int earliestNonNumericItem = -1; - - float prevPrimary = 0.0; - - m_maxExampleCols = 0; - m_separator = ""; + int lineno = 0; while (!in.atEnd()) { @@ -64,139 +69,261 @@ for (size_t li = 0; li < lines.size(); ++li) { QString line = lines[li]; + if (line.startsWith("#") || line == "") continue; - if (line.startsWith("#")) continue; + guessQualities(line, lineno); - m_behaviour = QString::KeepEmptyParts; + if (++lineno == 50) break; + } + } - if (m_separator == "") { - //!!! to do: ask the user - if (line.split(",").size() >= 2) m_separator = ","; - else if (line.split("\t").size() >= 2) m_separator = "\t"; - else if (line.split("|").size() >= 2) m_separator = "|"; - else if (line.split("/").size() >= 2) m_separator = "/"; - else if (line.split(":").size() >= 2) m_separator = ":"; - else { - m_separator = " "; - m_behaviour = QString::SkipEmptyParts; + guessPurposes(); +} + +void +CSVFormat::guessSeparator(QString line) +{ + char candidates[] = { ',', '\t', ' ', '|', '/', ':' }; + for (int i = 0; i < sizeof(candidates)/sizeof(candidates[0]); ++i) { + if (StringBits::split(line, candidates[i], m_allowQuoting).size() >= 2) { + m_separator = candidates[i]; + return; + } + } + m_separator = " "; +} + +void +CSVFormat::guessQualities(QString line, int lineno) +{ + if (m_separator == "") guessSeparator(line); + + QStringList list = StringBits::split(line, m_separator[0], m_allowQuoting); + + int cols = list.size(); + if (lineno == 0 || (cols < m_columnCount)) m_columnCount = cols; + if (cols != m_columnCount) m_variableColumnCount = true; + + // All columns are regarded as having these qualities until we see + // something that indicates otherwise: + + ColumnQualities defaultQualities = + ColumnNumeric | ColumnIntegral | ColumnIncreasing; + + for (int i = 0; i < cols; ++i) { + + while (m_columnQualities.size() <= i) { + m_columnQualities.push_back(defaultQualities); + m_prevValues.push_back(0.f); + } + + QString s(list[i]); + bool ok = false; + + ColumnQualities qualities = m_columnQualities[i]; + + bool numeric = (qualities & ColumnNumeric); + bool integral = (qualities & ColumnIntegral); + bool increasing = (qualities & ColumnIncreasing); + bool large = (qualities & ColumnLarge); // this one defaults to off + + float value = 0.f; + + //!!! how to take into account headers? + + if (numeric) { + value = s.toFloat(&ok); + if (!ok) { + value = (float)StringBits::stringToDoubleLocaleFree(s, &ok); + } + if (ok) { + if (lineno < 2 && value > 1000.f) large = true; + } else { + numeric = false; + } + } + + if (numeric) { + + if (integral) { + if (s.contains('.') || s.contains(',')) { + integral = false; } } -// std::cerr << "separator = \"" << m_separator.toStdString() << "\"" << std::endl; - - QStringList list = line.split(m_separator, m_behaviour); - QStringList tidyList; - - for (int i = 0; i < list.size(); ++i) { - - QString s(list[i]); - bool numeric = false; - - if (s.length() >= 2 && s.startsWith("\"") && s.endsWith("\"")) { - s = s.mid(1, s.length() - 2); - } else if (s.length() >= 2 && s.startsWith("'") && s.endsWith("'")) { - s = s.mid(1, s.length() - 2); - } else { - float f = s.toFloat(&numeric); -// std::cerr << "converted \"" << s.toStdString() << "\" to float, got " << f << " and success = " << numeric << std::endl; - } - - tidyList.push_back(s); - - if (lineno == 0 || (list.size() < itemCount)) { - itemCount = list.size(); - } else { - if (itemCount != list.size()) { - variableItemCount = true; - } - } - - if (i == 0) { // primary - - if (numeric) { - - float primary = s.toFloat(); - - if (lineno > 0 && primary <= prevPrimary) { - nonIncreasingPrimaries = true; - } - - if (s.contains(".") || s.contains(",")) { - floatPrimaries = true; - } - - prevPrimary = primary; - - } else { - nonNumericPrimaries = true; - } - } else { // secondary - - if (!numeric) { - if (earliestNonNumericItem < 0 || - i < earliestNonNumericItem) { - earliestNonNumericItem = i; - } - } + if (increasing) { + if (lineno > 0 && value <= m_prevValues[i]) { + increasing = false; } } - if (lineno < 10) { - m_example.push_back(tidyList); - if (lineno == 0 || tidyList.size() > m_maxExampleCols) { - m_maxExampleCols = tidyList.size(); - } - } + m_prevValues[i] = value; + } - ++lineno; + m_columnQualities[i] = + (numeric ? ColumnNumeric : 0) | + (integral ? ColumnIntegral : 0) | + (increasing ? ColumnIncreasing : 0) | + (large ? ColumnLarge : 0); + } - if (lineno == 50) break; + if (lineno < 10) { + m_example.push_back(list); + if (lineno == 0 || cols > m_maxExampleCols) { + m_maxExampleCols = cols; } } - if (nonNumericPrimaries || nonIncreasingPrimaries) { +// std::cerr << "Estimated column qualities: "; +// for (int i = 0; i < m_columnCount; ++i) { +// std::cerr << int(m_columnQualities[i]) << " "; +// } +// std::cerr << std::endl; +} + +void +CSVFormat::guessPurposes() +{ + m_timingType = CSVFormat::ImplicitTiming; + m_timeUnits = CSVFormat::TimeWindows; - // Primaries are probably not a series of times + int timingColumnCount = 0; + + for (int i = 0; i < m_columnCount; ++i) { + + ColumnPurpose purpose = ColumnUnknown; + bool primary = (i == 0); - m_timingType = CSVFormat::ImplicitTiming; - m_timeUnits = CSVFormat::TimeWindows; - - if (nonNumericPrimaries) { - m_modelType = CSVFormat::OneDimensionalModel; - } else if (itemCount == 1 || variableItemCount || - (earliestNonNumericItem != -1)) { - m_modelType = CSVFormat::TwoDimensionalModel; - } else { - m_modelType = CSVFormat::ThreeDimensionalModel; - } + ColumnQualities qualities = m_columnQualities[i]; - } else { + bool numeric = (qualities & ColumnNumeric); + bool integral = (qualities & ColumnIntegral); + bool increasing = (qualities & ColumnIncreasing); + bool large = (qualities & ColumnLarge); - // Increasing numeric primaries -- likely to be time + bool timingColumn = (numeric && increasing); - m_timingType = CSVFormat::ExplicitTiming; + if (timingColumn) { - if (floatPrimaries) { - m_timeUnits = CSVFormat::TimeSeconds; - } else { - m_timeUnits = CSVFormat::TimeAudioFrames; - } + ++timingColumnCount; + + if (primary) { - if (itemCount == 1) { - m_modelType = CSVFormat::OneDimensionalModel; - } else if (variableItemCount || (earliestNonNumericItem != -1)) { - if (earliestNonNumericItem != -1 && earliestNonNumericItem < 2) { - m_modelType = CSVFormat::OneDimensionalModel; - } else { - m_modelType = CSVFormat::TwoDimensionalModel; - } - } else { - m_modelType = CSVFormat::ThreeDimensionalModel; - } + purpose = ColumnStartTime; + + m_timingType = ExplicitTiming; + + if (integral && large) { + m_timeUnits = TimeAudioFrames; + } else { + m_timeUnits = TimeSeconds; + } + + } else { + + if (timingColumnCount == 2 && m_timingType == ExplicitTiming) { + purpose = ColumnEndTime; + } + } + } + + if (purpose == ColumnUnknown) { + if (numeric) { + purpose = ColumnValue; + } else { + purpose = ColumnLabel; + } + } + + setColumnPurpose(i, purpose); + } + + int valueCount = 0; + for (int i = 0; i < m_columnCount; ++i) { + if (m_columnPurposes[i] == ColumnValue) ++valueCount; } - std::cerr << "Estimated model type: " << m_modelType << std::endl; - std::cerr << "Estimated timing type: " << m_timingType << std::endl; - std::cerr << "Estimated units: " << m_timeUnits << std::endl; + if (valueCount == 2 && timingColumnCount == 1) { + // If we have exactly two apparent value columns and only one + // timing column, but one value column is integral and the + // other is not, guess that whichever one matches the integral + // status of the time column is either duration or end time + if (m_timingType == ExplicitTiming) { + int a = -1, b = -1; + for (int i = 0; i < m_columnCount; ++i) { + if (m_columnPurposes[i] == ColumnValue) { + if (a == -1) a = i; + else b = i; + } + } + if ((m_columnQualities[a] & ColumnIntegral) != + (m_columnQualities[b] & ColumnIntegral)) { + int timecol = a; + if ((m_columnQualities[a] & ColumnIntegral) != + (m_columnQualities[0] & ColumnIntegral)) { + timecol = b; + } + if (m_columnQualities[timecol] & ColumnIncreasing) { + // This shouldn't happen; should have been settled above + m_columnPurposes[timecol] = ColumnEndTime; + } else { + m_columnPurposes[timecol] = ColumnDuration; + } + --valueCount; + } + } + } + + if (timingColumnCount > 1) { + m_modelType = TwoDimensionalModelWithDuration; + } else { + if (valueCount == 0) { + m_modelType = OneDimensionalModel; + } else if (valueCount == 1) { + m_modelType = TwoDimensionalModel; + } else { + m_modelType = ThreeDimensionalModel; + } + } + +// std::cerr << "Estimated column purposes: "; +// for (int i = 0; i < m_columnCount; ++i) { +// std::cerr << int(m_columnPurposes[i]) << " "; +// } +// std::cerr << std::endl; + +// std::cerr << "Estimated model type: " << m_modelType << std::endl; +// std::cerr << "Estimated timing type: " << m_timingType << std::endl; +// std::cerr << "Estimated units: " << m_timeUnits << std::endl; } +CSVFormat::ColumnPurpose +CSVFormat::getColumnPurpose(int i) +{ + while (m_columnPurposes.size() <= i) { + m_columnPurposes.push_back(ColumnUnknown); + } + return m_columnPurposes[i]; +} + +CSVFormat::ColumnPurpose +CSVFormat::getColumnPurpose(int i) const +{ + if (m_columnPurposes.size() <= i) { + return ColumnUnknown; + } + return m_columnPurposes[i]; +} + +void +CSVFormat::setColumnPurpose(int i, ColumnPurpose p) +{ + while (m_columnPurposes.size() <= i) { + m_columnPurposes.push_back(ColumnUnknown); + } + m_columnPurposes[i] = p; +} + + + + diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CSVFormat.h --- a/data/fileio/CSVFormat.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CSVFormat.h Sun Jul 01 11:53:00 2012 +0100 @@ -25,6 +25,7 @@ enum ModelType { OneDimensionalModel, TwoDimensionalModel, + TwoDimensionalModelWithDuration, ThreeDimensionalModel }; @@ -32,14 +33,29 @@ ExplicitTiming, ImplicitTiming }; - + enum TimeUnits { TimeSeconds, TimeAudioFrames, TimeWindows }; - CSVFormat(QString path); // guess format + enum ColumnPurpose { + ColumnUnknown, + ColumnStartTime, + ColumnEndTime, + ColumnDuration, + ColumnValue, + ColumnLabel + }; + + enum ColumnQuality { + ColumnNumeric = 0x1, + ColumnIntegral = 0x2, + ColumnIncreasing = 0x4, + ColumnLarge = 0x8 + }; + typedef unsigned int ColumnQualities; CSVFormat() : // arbitrary defaults m_modelType(TwoDimensionalModel), @@ -48,43 +64,85 @@ m_separator(","), m_sampleRate(44100), m_windowSize(1024), - m_behaviour(QString::KeepEmptyParts) + m_columnCount(0), + m_variableColumnCount(false), + m_allowQuoting(true), + m_maxExampleCols(0) { } - ModelType getModelType() const { return m_modelType; } - TimingType getTimingType() const { return m_timingType; } - TimeUnits getTimeUnits() const { return m_timeUnits; } - QString getSeparator() const { return m_separator; } - size_t getSampleRate() const { return m_sampleRate; } - size_t getWindowSize() const { return m_windowSize; } + CSVFormat(QString path); // guess format - QString::SplitBehavior getSplitBehaviour() const { return m_behaviour; } - - void setModelType(ModelType t) { m_modelType = t; } - void setTimingType(TimingType t) { m_timingType = t; } - void setTimeUnits(TimeUnits t) { m_timeUnits = t; } - void setSeparator(QString s) { m_separator = s; } - void setSampleRate(size_t r) { m_sampleRate = r; } - void setWindowSize(size_t s) { m_windowSize = s; } + /** + * Guess the format of the given CSV file, setting the fields in + * this object accordingly. If the current separator is the empty + * string, the separator character will also be guessed; otherwise + * the current separator will be used. The other properties of + * this object will be set according to guesses from the file. + */ + void guessFormatFor(QString path); + + ModelType getModelType() const { return m_modelType; } + TimingType getTimingType() const { return m_timingType; } + TimeUnits getTimeUnits() const { return m_timeUnits; } + size_t getSampleRate() const { return m_sampleRate; } + size_t getWindowSize() const { return m_windowSize; } + int getColumnCount() const { return m_columnCount; } + bool getAllowQuoting() const { return m_allowQuoting; } + QChar getSeparator() const { + if (m_separator == "") return ' '; + else return m_separator[0]; + } - void setSplitBehaviour(QString::SplitBehavior b) { m_behaviour = b; } + void setModelType(ModelType t) { m_modelType = t; } + void setTimingType(TimingType t) { m_timingType = t; } + void setTimeUnits(TimeUnits t) { m_timeUnits = t; } + void setSeparator(QChar s) { m_separator = s; } + void setSampleRate(size_t r) { m_sampleRate = r; } + void setWindowSize(size_t s) { m_windowSize = s; } + void setColumnCount(int c) { m_columnCount = c; } + void setAllowQuoting(bool q) { m_allowQuoting = q; } + + QList getColumnPurposes() const { return m_columnPurposes; } + void setColumnPurposes(QList cl) { m_columnPurposes = cl; } + + ColumnPurpose getColumnPurpose(int i); + ColumnPurpose getColumnPurpose(int i) const; + void setColumnPurpose(int i, ColumnPurpose p); - // only valid if constructor that guesses format was used: + // read-only; only valid if format has been guessed: + QList getColumnQualities() const { return m_columnQualities; } + + // read-only; only valid if format has been guessed: QList getExample() const { return m_example; } int getMaxExampleCols() const { return m_maxExampleCols; } + +protected: + ModelType m_modelType; + TimingType m_timingType; + TimeUnits m_timeUnits; + QString m_separator; + size_t m_sampleRate; + size_t m_windowSize; -protected: - ModelType m_modelType; - TimingType m_timingType; - TimeUnits m_timeUnits; - QString m_separator; - size_t m_sampleRate; - size_t m_windowSize; + int m_columnCount; + bool m_variableColumnCount; - QString::SplitBehavior m_behaviour; + QList m_columnQualities; + QList m_columnPurposes; + + QList m_prevValues; + + bool m_allowQuoting; QList m_example; int m_maxExampleCols; + + void guessSeparator(QString line); + void guessQualities(QString line, int lineno); + void guessPurposes(); + + void guessFormatFor_Old(QString path); + }; #endif diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CachedFile.cpp --- a/data/fileio/CachedFile.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CachedFile.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -78,9 +78,9 @@ { Profiler p("CachedFile::CachedFile[1]"); - std::cerr << "CachedFile::CachedFile: origin is \"" - << origin.toStdString() << "\"" << std::endl; - check(); + SVDEBUG << "CachedFile::CachedFile: origin is \"" + << origin << "\"" << endl; + checkFile(); } CachedFile::CachedFile(QUrl url, @@ -93,9 +93,9 @@ { Profiler p("CachedFile::CachedFile[2]"); - std::cerr << "CachedFile::CachedFile: url is \"" - << url.toString().toStdString() << "\"" << std::endl; - check(); + SVDEBUG << "CachedFile::CachedFile: url is \"" + << url.toString() << "\"" << endl; + checkFile(); } CachedFile::~CachedFile() @@ -115,7 +115,7 @@ } void -CachedFile::check() +CachedFile::checkFile() { //!!! n.b. obvious race condition here if different CachedFile // objects for same url used in more than one thread -- need to @@ -132,20 +132,20 @@ m_localFilename = getLocalFilenameFor(m_origin); if (!QFileInfo(m_localFilename).exists()) { - std::cerr << "CachedFile::check: Local file does not exist, making a note that it hasn't been retrieved" << std::endl; + SVDEBUG << "CachedFile::check: Local file does not exist, making a note that it hasn't been retrieved" << endl; updateLastRetrieval(false); // empirically! } QDateTime lastRetrieval = getLastRetrieval(); if (lastRetrieval.isValid()) { - std::cerr << "CachedFile::check: Valid last retrieval at " - << lastRetrieval.toString().toStdString() << std::endl; + SVDEBUG << "CachedFile::check: Valid last retrieval at " + << lastRetrieval.toString() << endl; // this will not be the case if the file is missing, after // updateLastRetrieval(false) was called above m_ok = true; if (lastRetrieval.addDays(2) < QDateTime::currentDateTime()) { //!!! - std::cerr << "CachedFile::check: Out of date; trying to retrieve again" << std::endl; + SVDEBUG << "CachedFile::check: Out of date; trying to retrieve again" << endl; // doesn't matter if retrieval fails -- we just don't // update the last retrieval time @@ -154,17 +154,17 @@ // retrieval every single time if it isn't working if (retrieve()) { - std::cerr << "CachedFile::check: Retrieval succeeded" << std::endl; + SVDEBUG << "CachedFile::check: Retrieval succeeded" << endl; updateLastRetrieval(true); } else { std::cerr << "CachedFile::check: Retrieval failed, will try again later (using existing file for now)" << std::endl; } } } else { - std::cerr << "CachedFile::check: No valid last retrieval" << std::endl; + SVDEBUG << "CachedFile::check: No valid last retrieval" << endl; // there is no acceptable file if (retrieve()) { - std::cerr << "CachedFile::check: Retrieval succeeded" << std::endl; + SVDEBUG << "CachedFile::check: Retrieval succeeded" << endl; m_ok = true; updateLastRetrieval(true); } else { @@ -191,28 +191,28 @@ FileSource fs(m_origin, m_reporter, m_preferredContentType); if (!fs.isOK() || !fs.isAvailable()) { - std::cerr << "CachedFile::retrieve: ERROR: FileSource reported unavailable or failure" << std::endl; + SVDEBUG << "CachedFile::retrieve: ERROR: FileSource reported unavailable or failure" << endl; return false; } fs.waitForData(); if (!fs.isOK()) { - std::cerr << "CachedFile::retrieve: ERROR: FileSource reported failure during receive" << std::endl; + SVDEBUG << "CachedFile::retrieve: ERROR: FileSource reported failure during receive" << endl; return false; } QString tempName = fs.getLocalFilename(); QFile tempFile(tempName); if (!tempFile.exists()) { - std::cerr << "CachedFile::retrieve: ERROR: FileSource reported success, but local temporary file \"" << tempName.toStdString() << "\" does not exist" << std::endl; + SVDEBUG << "CachedFile::retrieve: ERROR: FileSource reported success, but local temporary file \"" << tempName << "\" does not exist" << endl; return false; } QFile previous(m_localFilename); if (previous.exists()) { if (!previous.remove()) { - std::cerr << "CachedFile::retrieve: ERROR: Failed to remove previous copy of cached file at \"" << m_localFilename.toStdString() << "\"" << std::endl; + std::cerr << "CachedFile::retrieve: ERROR: Failed to remove previous copy of cached file at \"" << m_localFilename << "\"" << std::endl; return false; } } @@ -222,11 +222,11 @@ //!!! disk space left) if (!tempFile.copy(m_localFilename)) { - std::cerr << "CachedFile::retrieve: ERROR: Failed to copy newly retrieved file from \"" << tempName.toStdString() << "\" to \"" << m_localFilename.toStdString() << "\"" << std::endl; + std::cerr << "CachedFile::retrieve: ERROR: Failed to copy newly retrieved file from \"" << tempName << "\" to \"" << m_localFilename << "\"" << std::endl; return false; } - std::cerr << "CachedFile::retrieve: Successfully copied newly retrieved file \"" << tempName.toStdString() << "\" to its home at \"" << m_localFilename.toStdString() << "\"" << std::endl; + SVDEBUG << "CachedFile::retrieve: Successfully copied newly retrieved file \"" << tempName << "\" to its home at \"" << m_localFilename << "\"" << endl; return true; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CachedFile.h --- a/data/fileio/CachedFile.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CachedFile.h Sun Jul 01 11:53:00 2012 +0100 @@ -46,7 +46,7 @@ ProgressReporter *m_reporter; bool m_ok; - void check(); + void checkFile(); bool retrieve(); QDateTime getLastRetrieval(); diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/CodedAudioFileReader.cpp --- a/data/fileio/CodedAudioFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/CodedAudioFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -22,6 +22,7 @@ #include "base/Serialiser.h" #include "base/Resampler.h" +#include #include #include #include @@ -40,7 +41,7 @@ m_resampler(0), m_resampleBuffer(0) { - std::cerr << "CodedAudioFileReader::CodedAudioFileReader: rate " << targetRate << std::endl; + SVDEBUG << "CodedAudioFileReader::CodedAudioFileReader: rate " << targetRate << endl; m_frameCount = 0; m_sampleRate = targetRate; @@ -54,14 +55,14 @@ if (m_cacheFileWritePtr) sf_close(m_cacheFileWritePtr); - std::cerr << "CodedAudioFileReader::~CodedAudioFileReader: deleting cache file reader" << std::endl; + SVDEBUG << "CodedAudioFileReader::~CodedAudioFileReader: deleting cache file reader" << endl; delete m_cacheFileReader; delete[] m_cacheWriteBuffer; if (m_cacheFileName != "") { if (!QFile(m_cacheFileName).remove()) { - std::cerr << "WARNING: CodedAudioFileReader::~CodedAudioFileReader: Failed to delete cache file \"" << m_cacheFileName.toStdString() << "\"" << std::endl; + std::cerr << "WARNING: CodedAudioFileReader::~CodedAudioFileReader: Failed to delete cache file \"" << m_cacheFileName << "\"" << std::endl; } } @@ -72,7 +73,7 @@ void CodedAudioFileReader::startSerialised(QString id) { - std::cerr << "CodedAudioFileReader::startSerialised(" << id.toStdString() << ")" << std::endl; + SVDEBUG << "CodedAudioFileReader::startSerialised(" << id << ")" << endl; delete m_serialiser; m_serialiser = new Serialiser(id); @@ -92,15 +93,15 @@ { QMutexLocker locker(&m_cacheMutex); - std::cerr << "CodedAudioFileReader::initialiseDecodeCache: file rate = " << m_fileRate << std::endl; + SVDEBUG << "CodedAudioFileReader::initialiseDecodeCache: file rate = " << m_fileRate << endl; if (m_fileRate == 0) { - std::cerr << "CodedAudioFileReader::initialiseDecodeCache: ERROR: File sample rate unknown (bug in subclass implementation?)" << std::endl; + SVDEBUG << "CodedAudioFileReader::initialiseDecodeCache: ERROR: File sample rate unknown (bug in subclass implementation?)" << endl; m_fileRate = 48000; // got to have something } if (m_sampleRate == 0) { m_sampleRate = m_fileRate; - std::cerr << "CodedAudioFileReader::initialiseDecodeCache: rate (from file) = " << m_fileRate << std::endl; + SVDEBUG << "CodedAudioFileReader::initialiseDecodeCache: rate (from file) = " << m_fileRate << endl; } if (m_fileRate != m_sampleRate) { std::cerr << "CodedAudioFileReader: resampling " << m_fileRate << " -> " << m_sampleRate << std::endl; @@ -144,7 +145,7 @@ m_cacheFileReader = new WavFileReader(m_cacheFileName); if (!m_cacheFileReader->isOK()) { - std::cerr << "ERROR: CodedAudioFileReader::initialiseDecodeCache: Failed to construct WAV file reader for temporary file: " << m_cacheFileReader->getError().toStdString() << std::endl; + std::cerr << "ERROR: CodedAudioFileReader::initialiseDecodeCache: Failed to construct WAV file reader for temporary file: " << m_cacheFileReader->getError() << std::endl; delete m_cacheFileReader; m_cacheFileReader = 0; m_cacheMode = CacheInMemory; @@ -152,7 +153,7 @@ } } else { - std::cerr << "CodedAudioFileReader::initialiseDecodeCache: failed to open cache file \"" << m_cacheFileName.toStdString() << "\" (" << m_channelCount << " channels, sample rate " << m_sampleRate << " for writing, falling back to in-memory cache" << std::endl; + std::cerr << "CodedAudioFileReader::initialiseDecodeCache: failed to open cache file \"" << m_cacheFileName << "\" (" << m_channelCount << " channels, sample rate " << m_sampleRate << " for writing, falling back to in-memory cache" << std::endl; m_cacheMode = CacheInMemory; } @@ -354,7 +355,7 @@ // locking) if (!m_initialised) { - std::cerr << "CodedAudioFileReader::getInterleavedFrames: not initialised" << std::endl; + SVDEBUG << "CodedAudioFileReader::getInterleavedFrames: not initialised" << endl; return; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/FileReadThread.cpp --- a/data/fileio/FileReadThread.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/FileReadThread.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -20,6 +20,7 @@ #include #include +#include //#define DEBUG_FILE_READ_THREAD 1 @@ -46,7 +47,7 @@ notifyCancelled(); #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::run() exiting" << std::endl; + SVDEBUG << "FileReadThread::run() exiting" << endl; #endif } @@ -54,7 +55,7 @@ FileReadThread::finish() { #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::finish()" << std::endl; + SVDEBUG << "FileReadThread::finish()" << endl; #endif { @@ -72,7 +73,7 @@ m_condition.wakeAll(); #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::finish() exiting" << std::endl; + SVDEBUG << "FileReadThread::finish() exiting" << endl; #endif } @@ -112,7 +113,7 @@ } #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::cancel(" << token << ") waking condition" << std::endl; + SVDEBUG << "FileReadThread::cancel(" << token << ") waking condition" << endl; #endif m_condition.wakeAll(); @@ -215,7 +216,7 @@ m_mutex.unlock(); #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::process: reading " << request.start << ", " << request.size << " on " << request.fd << std::endl; + SVDEBUG << "FileReadThread::process: reading " << request.start << ", " << request.size << " on " << request.fd << endl; #endif bool successful = false; @@ -288,14 +289,14 @@ m_queue.erase(token); m_readyRequests[token] = request; #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::process: done, marking as ready (success = " << m_readyRequests[token].successful << ")" << std::endl; + SVDEBUG << "FileReadThread::process: done, marking as ready (success = " << m_readyRequests[token].successful << ")" << endl; #endif } else { #ifdef DEBUG_FILE_READ_THREAD if (m_exiting) { - std::cerr << "FileReadThread::process: exiting" << std::endl; + SVDEBUG << "FileReadThread::process: exiting" << endl; } else { - std::cerr << "FileReadThread::process: request disappeared" << std::endl; + SVDEBUG << "FileReadThread::process: request disappeared" << endl; } #endif } @@ -311,7 +312,7 @@ int token = *m_newlyCancelled.begin(); #ifdef DEBUG_FILE_READ_THREAD - std::cerr << "FileReadThread::notifyCancelled: token " << token << std::endl; + SVDEBUG << "FileReadThread::notifyCancelled: token " << token << endl; #endif m_newlyCancelled.erase(token); diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/FileSource.cpp --- a/data/fileio/FileSource.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/FileSource.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -77,19 +77,28 @@ m_preferredContentType(preferredContentType), m_ok(false), m_lastStatus(0), + m_resource(fileOrUrl.startsWith(':')), m_remote(isRemote(fileOrUrl)), m_done(false), m_leaveLocalFile(false), m_reporter(reporter), m_refCounted(false) { + if (m_resource) { // qrc file + m_url = QUrl("qrc" + fileOrUrl); + } + + if (m_url.toString() == "") { + m_url = QUrl(fileOrUrl, QUrl::TolerantMode); + } + #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(" << fileOrUrl.toStdString() << ")" << std::endl; + std::cerr << "FileSource::FileSource(" << fileOrUrl << "): url <" << m_url.toString() << ">" << std::endl; incCount(m_url.toString()); #endif if (!canHandleScheme(m_url)) { - std::cerr << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString().toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString() << "\"" << endl; m_errorString = tr("Unsupported scheme in URL"); return; } @@ -99,7 +108,7 @@ if (!isRemote() && !isAvailable()) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource: Failed to open local file with URL \"" << m_url.toString().toStdString() << "; trying again assuming filename was encoded" << std::endl; + std::cerr << "FileSource::FileSource: Failed to open local file with URL \"" << m_url.toString() << "\"; trying again assuming filename was encoded" << std::endl; #endif m_url = QUrl::fromEncoded(fileOrUrl.toAscii()); init(); @@ -144,7 +153,7 @@ } #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(string) exiting" << std::endl; + SVDEBUG << "FileSource::FileSource(string) exiting" << endl; #endif } @@ -155,6 +164,7 @@ m_localFile(0), m_ok(false), m_lastStatus(0), + m_resource(false), m_remote(isRemote(url.toString())), m_done(false), m_leaveLocalFile(false), @@ -162,12 +172,12 @@ m_refCounted(false) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(" << url.toString().toStdString() << ") [as url]" << std::endl; + SVDEBUG << "FileSource::FileSource(" << url.toString() << ") [as url]" << endl; incCount(m_url.toString()); #endif if (!canHandleScheme(m_url)) { - std::cerr << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString().toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString() << "\"" << endl; m_errorString = tr("Unsupported scheme in URL"); return; } @@ -175,7 +185,7 @@ init(); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(url) exiting" << std::endl; + SVDEBUG << "FileSource::FileSource(url) exiting" << endl; #endif } @@ -187,6 +197,7 @@ m_localFile(0), m_ok(rf.m_ok), m_lastStatus(rf.m_lastStatus), + m_resource(rf.m_resource), m_remote(rf.m_remote), m_done(false), m_leaveLocalFile(false), @@ -194,12 +205,12 @@ m_refCounted(false) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(" << m_url.toString().toStdString() << ") [copy ctor]" << std::endl; + SVDEBUG << "FileSource::FileSource(" << m_url.toString() << ") [copy ctor]" << endl; incCount(m_url.toString()); #endif if (!canHandleScheme(m_url)) { - std::cerr << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString().toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource::FileSource: ERROR: Unsupported scheme in URL \"" << m_url.toString() << "\"" << endl; m_errorString = tr("Unsupported scheme in URL"); return; } @@ -209,8 +220,8 @@ } else { QMutexLocker locker(&m_mapMutex); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(copy ctor): ref count is " - << m_refCountMap[m_url] << std::endl; + SVDEBUG << "FileSource::FileSource(copy ctor): ref count is " + << m_refCountMap[m_url] << endl; #endif if (m_refCountMap[m_url] > 0) { m_refCountMap[m_url]++; @@ -228,18 +239,18 @@ m_done = true; #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(" << m_url.toString().toStdString() << ") [copy ctor]: note: local filename is \"" << m_localFilename.toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource::FileSource(" << m_url.toString() << ") [copy ctor]: note: local filename is \"" << m_localFilename << "\"" << endl; #endif #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::FileSource(copy ctor) exiting" << std::endl; + SVDEBUG << "FileSource::FileSource(copy ctor) exiting" << endl; #endif } FileSource::~FileSource() { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource(" << m_url.toString().toStdString() << ")::~FileSource" << std::endl; + std::cerr << "FileSource(" << m_url.toString() << ")::~FileSource" << std::endl; decCount(m_url.toString()); #endif @@ -251,7 +262,23 @@ void FileSource::init() { - if (!isRemote()) { + if (isResource()) { +#ifdef DEBUG_FILE_SOURCE + std::cerr << "FileSource::init: Is a resource" << std::endl; +#endif + QString resourceFile = m_url.toString(); + resourceFile.replace(QRegExp("^qrc:"), ":"); + + if (!QFileInfo(resourceFile).exists()) { +#ifdef DEBUG_FILE_SOURCE + std::cerr << "FileSource::init: Resource file of this name does not exist, switching to non-resource URL" << std::endl; +#endif + m_url = resourceFile; + m_resource = false; + } + } + + if (!isRemote() && !isResource()) { #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource::init: Not a remote URL" << std::endl; #endif @@ -266,7 +293,8 @@ #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource::init: URL translates to local filename \"" - << m_localFilename.toStdString() << "\"" << std::endl; + << m_localFilename << "\" (with literal=" << literal << ")" + << std::endl; #endif m_ok = true; m_lastStatus = 200; @@ -307,26 +335,62 @@ } if (m_localFilename == "") return; + m_localFile = new QFile(m_localFilename); m_localFile->open(QFile::WriteOnly); - QString scheme = m_url.scheme().toLower(); + if (isResource()) { + + // Absent resource file case was dealt with at the top -- this + // is the successful case + + QString resourceFileName = m_url.toString(); + resourceFileName.replace(QRegExp("^qrc:"), ":"); + QFile resourceFile(resourceFileName); + resourceFile.open(QFile::ReadOnly); + QByteArray ba(resourceFile.readAll()); + +#ifdef DEBUG_FILE_SOURCE + std::cerr << "Copying " << ba.size() << " bytes from resource file to cache file" << std::endl; +#endif + + qint64 written = m_localFile->write(ba); + m_localFile->close(); + delete m_localFile; + m_localFile = 0; + + if (written != ba.size()) { +#ifdef DEBUG_FILE_SOURCE + std::cerr << "Copy failed (wrote " << written << " bytes)" << std::endl; +#endif + m_ok = false; + return; + } else { + m_ok = true; + m_lastStatus = 200; + m_done = true; + } + + } else { + + QString scheme = m_url.scheme().toLower(); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::init: Don't have local copy of \"" - << m_url.toString().toStdString() << "\", retrieving" << std::endl; + std::cerr << "FileSource::init: Don't have local copy of \"" + << m_url.toString() << "\", retrieving" << std::endl; #endif - if (scheme == "http") { - initHttp(); + if (scheme == "http") { + initHttp(); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource: initHttp succeeded" << std::endl; + std::cerr << "FileSource: initHttp succeeded" << std::endl; #endif - } else if (scheme == "ftp") { - initFtp(); - } else { - m_remote = false; - m_ok = false; + } else if (scheme == "ftp") { + initFtp(); + } else { + m_remote = false; + m_ok = false; + } } if (m_ok) { @@ -355,7 +419,7 @@ m_refCountMap[m_url]++; m_refCounted = true; - if (m_reporter) { + if (m_reporter && !m_done) { m_reporter->setMessage (tr("Downloading %1...").arg(m_url.toString())); connect(m_reporter, SIGNAL(cancelled()), this, SLOT(cancelled())); @@ -422,8 +486,8 @@ QString path = "/" + QString(m_url.toEncoded()).section('/', 3); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource: path is \"" - << path.toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource: path is \"" + << path << "\"" << endl; #endif if (m_preferredContentType == "") { @@ -431,7 +495,7 @@ } else { #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource: indicating preferred content type of \"" - << m_preferredContentType.toStdString() << "\"" << std::endl; + << m_preferredContentType << "\"" << std::endl; #endif QHttpRequestHeader header("GET", path); header.setValue("Host", m_url.host()); @@ -513,7 +577,8 @@ // Note that a "scheme" with length 1 is probably a DOS drive letter QString scheme = url.scheme().toLower(); return (scheme == "http" || scheme == "ftp" || - scheme == "file" || scheme == "" || scheme.length() == 1); + scheme == "file" || scheme == "qrc" || + scheme == "" || scheme.length() == 1); } bool @@ -524,8 +589,8 @@ if (!m_ok) available = false; else available = (m_lastStatus / 100 == 2); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::isAvailable: " << (available ? "yes" : "no") - << std::endl; + SVDEBUG << "FileSource::isAvailable: " << (available ? "yes" : "no") + << endl; #endif return available; } @@ -543,7 +608,7 @@ FileSource::waitForData() { while (m_ok && !m_done) { -// std::cerr << "FileSource::waitForData: calling QApplication::processEvents" << std::endl; +// SVDEBUG << "FileSource::waitForData: calling QApplication::processEvents" << endl; QCoreApplication::processEvents(); usleep(10000); } @@ -568,6 +633,12 @@ } bool +FileSource::isResource() const +{ + return m_resource; +} + +bool FileSource::isRemote() const { return m_remote; @@ -586,6 +657,12 @@ } QString +FileSource::getBasename() const +{ + return QFileInfo(m_localFilename).fileName(); +} + +QString FileSource::getContentType() const { return m_contentType; @@ -617,14 +694,14 @@ FileSource::httpResponseHeaderReceived(const QHttpResponseHeader &resp) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::httpResponseHeaderReceived" << std::endl; + SVDEBUG << "FileSource::httpResponseHeaderReceived" << endl; #endif if (resp.statusCode() / 100 == 3) { QString location = resp.value("Location"); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::responseHeaderReceived: redirect to \"" - << location.toStdString() << "\" received" << std::endl; + SVDEBUG << "FileSource::responseHeaderReceived: redirect to \"" + << location << "\" received" << endl; #endif if (location != "") { QUrl newUrl(location); @@ -651,13 +728,13 @@ m_errorString = QString("%1 %2") .arg(resp.statusCode()).arg(resp.reasonPhrase()); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::responseHeaderReceived: " - << m_errorString.toStdString() << std::endl; + SVDEBUG << "FileSource::responseHeaderReceived: " + << m_errorString << endl; #endif } else { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::responseHeaderReceived: " - << m_lastStatus << std::endl; + SVDEBUG << "FileSource::responseHeaderReceived: " + << m_lastStatus << endl; #endif if (resp.hasContentType()) m_contentType = resp.contentType(); } @@ -677,8 +754,8 @@ if (!error) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::ftpCommandFinished: success for command " - << command << std::endl; + SVDEBUG << "FileSource::ftpCommandFinished: success for command " + << command << endl; #endif return; } @@ -766,7 +843,7 @@ FileSource::deleteCacheFile() { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::deleteCacheFile(\"" << m_localFilename.toStdString() << "\")" << std::endl; + SVDEBUG << "FileSource::deleteCacheFile(\"" << m_localFilename << "\")" << endl; #endif cleanup(); @@ -803,11 +880,11 @@ if (!QFile(m_localFilename).remove()) { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::deleteCacheFile: ERROR: Failed to delete file \"" << m_localFilename.toStdString() << "\"" << std::endl; + std::cerr << "FileSource::deleteCacheFile: ERROR: Failed to delete file \"" << m_localFilename << "\"" << std::endl; #endif } else { #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::deleteCacheFile: Deleted cache file \"" << m_localFilename.toStdString() << "\"" << std::endl; + SVDEBUG << "FileSource::deleteCacheFile: Deleted cache file \"" << m_localFilename << "\"" << endl; #endif m_localFilename = ""; } @@ -824,7 +901,7 @@ QMutexLocker locker(&m_mapMutex); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::createCacheFile: refcount is " << m_refCountMap[m_url] << std::endl; + SVDEBUG << "FileSource::createCacheFile: refcount is " << m_refCountMap[m_url] << endl; #endif if (m_refCountMap[m_url] > 0) { @@ -871,7 +948,7 @@ QString filepath(dir.filePath(filename)); #ifdef DEBUG_FILE_SOURCE - std::cerr << "FileSource::createCacheFile: URL is \"" << m_url.toString().toStdString() << "\", dir is \"" << dir.path().toStdString() << "\", base \"" << base.toStdString() << "\", extension \"" << extension.toStdString() << "\", filebase \"" << filename.toStdString() << "\", filename \"" << filepath.toStdString() << "\"" << std::endl; + std::cerr << "FileSource::createCacheFile: URL is \"" << m_url.toString() << "\", dir is \"" << dir.path() << "\", base \"" << base << "\", extension \"" << extension << "\", filebase \"" << filename << "\", filename \"" << filepath << "\"" << std::endl; #endif QMutexLocker fcLocker(&m_fileCreationMutex); @@ -883,8 +960,8 @@ #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource::createCacheFile: Failed to create local file \"" - << filepath.toStdString() << "\" for URL \"" - << m_url.toString().toStdString() << "\" (or file already exists): appending suffix instead" << std::endl; + << filepath << "\" for URL \"" + << m_url.toString() << "\" (or file already exists): appending suffix instead" << std::endl; #endif if (extension == "") { @@ -899,8 +976,8 @@ #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource::createCacheFile: ERROR: Failed to create local file \"" - << filepath.toStdString() << "\" for URL \"" - << m_url.toString().toStdString() << "\" (or file already exists)" << std::endl; + << filepath << "\" for URL \"" + << m_url.toString() << "\" (or file already exists)" << std::endl; #endif return ""; @@ -909,8 +986,8 @@ #ifdef DEBUG_FILE_SOURCE std::cerr << "FileSource::createCacheFile: url " - << m_url.toString().toStdString() << " -> local filename " - << filepath.toStdString() << std::endl; + << m_url.toString() << " -> local filename " + << filepath << std::endl; #endif m_localFilename = filepath; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/FileSource.h --- a/data/fileio/FileSource.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/FileSource.h Sun Jul 01 11:53:00 2012 +0100 @@ -23,6 +23,8 @@ #include +#include "base/Debug.h" + class QFtp; class QHttp; class QFile; @@ -122,6 +124,11 @@ bool isDone() const; /** + * Return true if this FileSource is referring to a QRC resource. + */ + bool isResource() const; + + /** * Return true if this FileSource is referring to a remote URL. */ bool isRemote() const; @@ -143,6 +150,12 @@ QString getLocalFilename() const; /** + * Return the base name, i.e. the final path element (including + * extension, if any) of the location. + */ + QString getBasename() const; + + /** * Return the MIME content type of this file, if known. */ QString getContentType() const; @@ -217,6 +230,7 @@ QString m_preferredContentType; bool m_ok; int m_lastStatus; + bool m_resource; bool m_remote; bool m_done; bool m_leaveLocalFile; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MIDIFileReader.cpp --- a/data/fileio/MIDIFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MIDIFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -52,7 +52,7 @@ using namespace MIDIConstants; -//#define MIDI_DEBUG 1 +//#define MIDI_SVDEBUG 1 MIDIFileReader::MIDIFileReader(QString path, @@ -278,7 +278,7 @@ m_error = ""; #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::open() : fileName = " << m_fileName.c_str() << endl; + SVDEBUG << "MIDIFileReader::open() : fileName = " << m_fileName.c_str() << endl; #endif // Open the file @@ -315,7 +315,7 @@ for (unsigned int j = 0; j < m_numberOfTracks; ++j) { #ifdef MIDI_DEBUG - cerr << "Parsing Track " << j << endl; + SVDEBUG << "Parsing Track " << j << endl; #endif if (!skipToNextTrack()) { @@ -350,7 +350,7 @@ } catch (MIDIException e) { - cerr << "MIDIFileReader::open() - caught exception - " << e.what() << endl; + SVDEBUG << "MIDIFileReader::open() - caught exception - " << e.what() << endl; m_error = e.what(); } @@ -392,14 +392,14 @@ { if (midiHeader.size() < 14) { #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseHeader() - file header undersized" << endl; + SVDEBUG << "MIDIFileReader::parseHeader() - file header undersized" << endl; #endif return false; } if (midiHeader.compare(0, 4, MIDI_FILE_HEADER) != 0) { #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseHeader()" + SVDEBUG << "MIDIFileReader::parseHeader()" << "- file header not found or malformed" << endl; #endif @@ -408,7 +408,7 @@ if (midiBytesToLong(midiHeader.substr(4,4)) != 6L) { #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseHeader()" + SVDEBUG << "MIDIFileReader::parseHeader()" << " - header length incorrect" << endl; #endif @@ -498,7 +498,7 @@ data1 = midiByte; #ifdef MIDI_DEBUG - cerr << "using running status (byte " << int(midiByte) << " found)" << endl; + SVDEBUG << "using running status (byte " << int(midiByte) << " found)" << endl; #endif } else { #ifdef MIDI_DEBUG @@ -611,7 +611,7 @@ MIDI_END_OF_EXCLUSIVE) { #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseTrack() - " + SVDEBUG << "MIDIFileReader::parseTrack() - " << "malformed or unsupported SysEx type" << endl; #endif @@ -631,14 +631,14 @@ case MIDI_END_OF_EXCLUSIVE: #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseTrack() - " + SVDEBUG << "MIDIFileReader::parseTrack() - " << "Found a stray MIDI_END_OF_EXCLUSIVE" << endl; #endif break; default: #ifdef MIDI_DEBUG - cerr << "MIDIFileReader::parseTrack()" + SVDEBUG << "MIDIFileReader::parseTrack()" << " - Unsupported MIDI Event Code: " << (int)eventCode << endl; #endif @@ -787,15 +787,15 @@ double seconds = (60.0 * quarters) / tempo; /* - std::cerr << "MIDIFileReader::getTimeForMIDITime(" << midiTime << ")" - << std::endl; - std::cerr << "timing division = " << td << std::endl; + SVDEBUG << "MIDIFileReader::getTimeForMIDITime(" << midiTime << ")" + << endl; + SVDEBUG << "timing division = " << td << endl; std::cerr << "nearest tempo event (of " << m_tempoMap.size() << ") is at " << tempoMIDITime << " (" << tempoRealTime << ")" << std::endl; std::cerr << "quarters since then = " << quarters << std::endl; std::cerr << "tempo = " << tempo << " quarters per minute" << std::endl; std::cerr << "seconds since then = " << seconds << std::endl; - std::cerr << "resulting time = " << (tempoRealTime + RealTime::fromSeconds(seconds)) << std::endl; + SVDEBUG << "resulting time = " << (tempoRealTime + RealTime::fromSeconds(seconds)) << endl; */ return tempoRealTime + RealTime::fromSeconds(seconds); @@ -1033,7 +1033,7 @@ Note note(startFrame, (*i)->getPitch(), endFrame - startFrame, level, noteLabel); -// std::cerr << "Adding note " << startFrame << "," << (endFrame-startFrame) << " : " << int((*i)->getPitch()) << std::endl; +// SVDEBUG << "Adding note " << startFrame << "," << (endFrame-startFrame) << " : " << int((*i)->getPitch()) << endl; model->addPoint(note); break; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MP3FileReader.cpp --- a/data/fileio/MP3FileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MP3FileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -96,7 +96,7 @@ return; } else if (sz == 0) { std::cerr << QString("MP3FileReader::MP3FileReader: Warning: reached EOF after only %1 of %2 bytes") - .arg(offset).arg(m_fileSize).toStdString() << std::endl; + .arg(offset).arg(m_fileSize) << std::endl; m_fileSize = offset; break; } @@ -141,7 +141,7 @@ } if (m_error != "") { - std::cerr << "MP3FileReader::MP3FileReader(\"" << m_path.toStdString() << "\"): ERROR: " << m_error.toStdString() << std::endl; + std::cerr << "MP3FileReader::MP3FileReader(\"" << m_path << "\"): ERROR: " << m_error << std::endl; } } @@ -177,7 +177,7 @@ id3_tag *tag = id3_file_tag(file); if (!tag) { #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: No ID3 tag found" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: No ID3 tag found" << endl; #endif id3_file_close(file); return; @@ -189,12 +189,19 @@ m_maker = loadTag(tag, "TPE1"); // "lead artist" if (m_maker == "") m_maker = loadTag(tag, "TPE2"); + for (unsigned int i = 0; i < tag->nframes; ++i) { + if (tag->frames[i]) { + QString value = loadTag(tag, tag->frames[i]->id); + if (value != "") m_tags[tag->frames[i]->id] = value; + } + } + id3_file_close(file); #else #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: ID3 tag support not compiled in" - << std::endl; + SVDEBUG << "MP3FileReader::loadTags: ID3 tag support not compiled in" + << endl; #endif #endif } @@ -208,20 +215,20 @@ id3_frame *frame = id3_tag_findframe(tag, name, 0); if (!frame) { #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: No \"" << name << "\" in ID3 tag" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: No \"" << name << "\" in ID3 tag" << endl; #endif return ""; } if (frame->nfields < 2) { - std::cerr << "MP3FileReader::loadTags: WARNING: Not enough fields (" << frame->nfields << ") for \"" << name << "\" in ID3 tag" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: WARNING: Not enough fields (" << frame->nfields << ") for \"" << name << "\" in ID3 tag" << endl; return ""; } unsigned int nstrings = id3_field_getnstrings(&frame->fields[1]); if (nstrings == 0) { #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: No strings for \"" << name << "\" in ID3 tag" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: No strings for \"" << name << "\" in ID3 tag" << endl; #endif return ""; } @@ -229,7 +236,7 @@ id3_ucs4_t const *ustr = id3_field_getstrings(&frame->fields[1], 0); if (!ustr) { #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: Invalid or absent data for \"" << name << "\" in ID3 tag" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: Invalid or absent data for \"" << name << "\" in ID3 tag" << endl; #endif return ""; } @@ -244,8 +251,8 @@ free(u8str); #ifdef DEBUG_ID3TAG - std::cerr << "MP3FileReader::loadTags: tag \"" << name << "\" -> \"" - << rv.toStdString() << "\"" << std::endl; + SVDEBUG << "MP3FileReader::loadTags: tag \"" << name << "\" -> \"" + << rv << "\"" << endl; #endif @@ -358,7 +365,7 @@ initialiseDecodeCache(); if (m_cacheMode == CacheInTemporaryFile) { -// std::cerr << "MP3FileReader::accept: channel count " << m_channelCount << ", file rate " << m_fileRate << ", about to start serialised section" << std::endl; +// SVDEBUG << "MP3FileReader::accept: channel count " << m_channelCount << ", file rate " << m_fileRate << ", about to start serialised section" << endl; startSerialised("MP3FileReader::Decode"); } } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MP3FileReader.h --- a/data/fileio/MP3FileReader.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MP3FileReader.h Sun Jul 01 11:53:00 2012 +0100 @@ -49,6 +49,7 @@ virtual QString getLocation() const { return m_source.getLocation(); } virtual QString getTitle() const { return m_title; } virtual QString getMaker() const { return m_maker; } + virtual TagMap getTags() const { return m_tags; } static void getSupportedExtensions(std::set &extensions); static bool supportsExtension(QString ext); @@ -70,6 +71,7 @@ QString m_error; QString m_title; QString m_maker; + TagMap m_tags; size_t m_fileSize; double m_bitrateNum; size_t m_bitrateDenom; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MatchFileReader.cpp --- a/data/fileio/MatchFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MatchFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -86,7 +86,7 @@ MatchFileReader::~MatchFileReader() { if (m_file) { - std::cerr << "MatchFileReader::MatchFileReader: Closing file" << std::endl; + SVDEBUG << "MatchFileReader::MatchFileReader: Closing file" << endl; m_file->close(); } delete m_file; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MatchFileReader.h --- a/data/fileio/MatchFileReader.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MatchFileReader.h Sun Jul 01 11:53:00 2012 +0100 @@ -18,6 +18,7 @@ #include #include +#include "base/Debug.h" class QFile; class Model; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/MatrixFile.cpp --- a/data/fileio/MatrixFile.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/MatrixFile.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -68,7 +68,7 @@ Profiler profiler("MatrixFile::MatrixFile", true); #ifdef DEBUG_MATRIX_FILE - std::cerr << "MatrixFile::MatrixFile(" << fileBase.toStdString() << ", " << int(mode) << ", " << cellSize << ", " << width << ", " << height << ")" << std::endl; + SVDEBUG << "MatrixFile::MatrixFile(" << fileBase << ", " << int(mode) << ", " << cellSize << ", " << width << ", " << height << ")" << endl; #endif m_createMutex.lock(); @@ -103,14 +103,14 @@ #endif #ifdef DEBUG_MATRIX_FILE - std::cerr << "MatrixFile(" << this << ")::MatrixFile: opening " << fileName.toStdString() << "..." << std::endl; + std::cerr << "MatrixFile(" << this << ")::MatrixFile: opening " << fileName << "..." << std::endl; #endif if ((m_fd = ::open(fileName.toLocal8Bit(), m_flags, m_fmode)) < 0) { ::perror("Open failed"); std::cerr << "ERROR: MatrixFile::MatrixFile: " << "Failed to open cache file \"" - << fileName.toStdString() << "\""; + << fileName << "\""; if (m_mode == WriteOnly) std::cerr << " for writing"; std::cerr << std::endl; throw FailedToOpenFile(fileName); @@ -130,7 +130,7 @@ ::perror("MatrixFile::MatrixFile: read failed"); std::cerr << "ERROR: MatrixFile::MatrixFile: " << "Failed to read header (fd " << m_fd << ", file \"" - << fileName.toStdString() << "\")" << std::endl; + << fileName << "\")" << std::endl; throw FileReadFailed(fileName); } if (header[0] != m_width || header[1] != m_height) { @@ -146,7 +146,7 @@ ++m_refcount[fileName]; #ifdef DEBUG_MATRIX_FILE - std::cerr << "MatrixFile[" << m_fd << "]::MatrixFile: File " << fileName.toStdString() << ", ref " << m_refcount[fileName] << std::endl; + std::cerr << "MatrixFile[" << m_fd << "]::MatrixFile: File " << fileName << ", ref " << m_refcount[fileName] << std::endl; std::cerr << "MatrixFile[" << m_fd << "]::MatrixFile: Done, size is " << "(" << m_width << ", " << m_height << ")" << std::endl; #endif @@ -173,9 +173,9 @@ if (--m_refcount[m_fileName] == 0) { if (::unlink(m_fileName.toLocal8Bit())) { - std::cerr << "WARNING: MatrixFile::~MatrixFile: reference count reached 0, but failed to unlink file \"" << m_fileName.toStdString() << "\"" << std::endl; + std::cerr << "WARNING: MatrixFile::~MatrixFile: reference count reached 0, but failed to unlink file \"" << m_fileName << "\"" << std::endl; } else { - std::cerr << "deleted " << m_fileName.toStdString() << std::endl; + std::cerr << "deleted " << m_fileName << std::endl; } } } @@ -248,7 +248,7 @@ MatrixFile::close() { #ifdef DEBUG_MATRIX_FILE - std::cerr << "MatrixFile::close()" << std::endl; + SVDEBUG << "MatrixFile::close()" << endl; #endif if (m_fd >= 0) { if (::close(m_fd) < 0) { diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/OggVorbisFileReader.cpp --- a/data/fileio/OggVorbisFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/OggVorbisFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -50,7 +50,7 @@ m_channelCount = 0; m_fileRate = 0; -// std::cerr << "OggVorbisFileReader::OggVorbisFileReader(" << m_path.toLocal8Bit().data() << "): now have " << (++instances) << " instances" << std::endl; +// SVDEBUG << "OggVorbisFileReader::OggVorbisFileReader(" << m_path.toLocal8Bit().data() << "): now have " << (++instances) << " instances" << endl; Profiler profiler("OggVorbisFileReader::OggVorbisFileReader", true); @@ -102,7 +102,7 @@ OggVorbisFileReader::~OggVorbisFileReader() { -// std::cerr << "OggVorbisFileReader::~OggVorbisFileReader(" << m_path.toLocal8Bit().data() << "): now have " << (--instances) << " instances" << std::endl; +// SVDEBUG << "OggVorbisFileReader::~OggVorbisFileReader(" << m_path.toLocal8Bit().data() << "): now have " << (--instances) << " instances" << endl; if (m_decodeThread) { m_cancelled = true; m_decodeThread->wait(); @@ -171,19 +171,20 @@ OggVorbisFileReader *reader = (OggVorbisFileReader *)data; if (!reader->m_commentsRead) { - { - const FishSoundComment *comment = fish_sound_comment_first_byname - (fs, "TITLE"); - if (comment && comment->value) { - reader->m_title = QString::fromUtf8(comment->value); - } + const FishSoundComment *comment; + comment = fish_sound_comment_first_byname(fs, "TITLE"); + if (comment && comment->value) { + reader->m_title = QString::fromUtf8(comment->value); } - { - const FishSoundComment *comment = fish_sound_comment_first_byname - (fs, "ARTIST"); - if (comment && comment->value) { - reader->m_maker = QString::fromUtf8(comment->value); - } + comment = fish_sound_comment_first_byname(fs, "ARTIST"); + if (comment && comment->value) { + reader->m_maker = QString::fromUtf8(comment->value); + } + comment = fish_sound_comment_first(fs); + while (comment) { + reader->m_tags[QString::fromUtf8(comment->name).toUpper()] = + QString::fromUtf8(comment->value); + comment = fish_sound_comment_next(fs, comment); } reader->m_commentsRead = true; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/OggVorbisFileReader.h --- a/data/fileio/OggVorbisFileReader.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/OggVorbisFileReader.h Sun Jul 01 11:53:00 2012 +0100 @@ -51,6 +51,7 @@ virtual QString getLocation() const { return m_source.getLocation(); } virtual QString getTitle() const { return m_title; } virtual QString getMaker() const { return m_maker; } + virtual TagMap getTags() const { return m_tags; } static void getSupportedExtensions(std::set &extensions); static bool supportsExtension(QString ext); @@ -72,6 +73,7 @@ QString m_error; QString m_title; QString m_maker; + TagMap m_tags; OGGZ *m_oggz; FishSound *m_fishSound; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/PlaylistFileReader.cpp --- a/data/fileio/PlaylistFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/PlaylistFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -135,7 +135,7 @@ QFileInfo(testpath).isFile()) { std::cerr << "Path \"" << line.toStdString() << "\" is relative, resolving to \"" - << testpath.toStdString() << "\"" + << testpath << "\"" << std::endl; line = testpath; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/QuickTimeFileReader.cpp --- a/data/fileio/QuickTimeFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/QuickTimeFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -66,7 +66,7 @@ Profiler profiler("QuickTimeFileReader::QuickTimeFileReader", true); -std::cerr << "QuickTimeFileReader: path is \"" << m_path.toStdString() << "\"" << std::endl; +SVDEBUG << "QuickTimeFileReader: path is \"" << m_path << "\"" << endl; long QTversion; @@ -89,10 +89,12 @@ // (0, m_path.toLocal8Bit().data(), 0); + QByteArray ba = m_path.toLocal8Bit(); + CFURLRef url = CFURLCreateFromFileSystemRepresentation (kCFAllocatorDefault, - (const UInt8 *)m_path.toLocal8Bit().data(), - (CFIndex)m_path.length(), + (const UInt8 *)ba.data(), + (CFIndex)ba.length(), false); @@ -265,12 +267,12 @@ } } - std::cerr << "QuickTimeFileReader::QuickTimeFileReader: frame count is now " << getFrameCount() << ", error is \"\"" << m_error.toStdString() << "\"" << std::endl; + std::cerr << "QuickTimeFileReader::QuickTimeFileReader: frame count is now " << getFrameCount() << ", error is \"\"" << m_error << "\"" << std::endl; } QuickTimeFileReader::~QuickTimeFileReader() { - std::cerr << "QuickTimeFileReader::~QuickTimeFileReader" << std::endl; + SVDEBUG << "QuickTimeFileReader::~QuickTimeFileReader" << endl; if (m_decodeThread) { m_cancelled = true; diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/ResamplingWavFileReader.cpp --- a/data/fileio/ResamplingWavFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/ResamplingWavFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -39,8 +39,8 @@ m_channelCount = 0; m_fileRate = 0; - std::cerr << "ResamplingWavFileReader::ResamplingWavFileReader(\"" - << m_path.toStdString() << "\"): rate " << targetRate << std::endl; + SVDEBUG << "ResamplingWavFileReader::ResamplingWavFileReader(\"" + << m_path << "\"): rate " << targetRate << endl; Profiler profiler("ResamplingWavFileReader::ResamplingWavFileReader", true); diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/WavFileReader.cpp --- a/data/fileio/WavFileReader.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/WavFileReader.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -39,7 +39,8 @@ m_file = sf_open(m_path.toLocal8Bit(), SFM_READ, &m_fileInfo); if (!m_file || (!fileUpdating && m_fileInfo.channels <= 0)) { - std::cerr << "WavFileReader::initialize: Failed to open file (" + std::cerr << "WavFileReader::initialize: Failed to open file at \"" + << m_path << "\" (" << sf_strerror(m_file) << ")" << std::endl; if (m_file) { @@ -79,12 +80,12 @@ sf_close(m_file); m_file = sf_open(m_path.toLocal8Bit(), SFM_READ, &m_fileInfo); if (!m_file || m_fileInfo.channels <= 0) { - std::cerr << "WavFileReader::updateFrameCount: Failed to open file (" + std::cerr << "WavFileReader::updateFrameCount: Failed to open file at \"" << m_path << "\" (" << sf_strerror(m_file) << ")" << std::endl; } } -// std::cerr << "WavFileReader::updateFrameCount: now " << m_fileInfo.frames << std::endl; +// SVDEBUG << "WavFileReader::updateFrameCount: now " << m_fileInfo.frames << endl; m_frameCount = m_fileInfo.frames; @@ -121,8 +122,8 @@ } if ((long)start >= m_fileInfo.frames) { -// std::cerr << "WavFileReader::getInterleavedFrames: " << start -// << " > " << m_fileInfo.frames << std::endl; +// SVDEBUG << "WavFileReader::getInterleavedFrames: " << start +// << " > " << m_fileInfo.frames << endl; return; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/WavFileWriter.cpp --- a/data/fileio/WavFileWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/WavFileWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -17,6 +17,8 @@ #include "model/DenseTimeValueModel.h" #include "base/Selection.h" +#include "base/TempWriteFile.h" +#include "base/Exceptions.h" #include @@ -24,23 +26,43 @@ WavFileWriter::WavFileWriter(QString path, size_t sampleRate, - size_t channels) : + size_t channels, + FileWriteMode mode) : m_path(path), m_sampleRate(sampleRate), m_channels(channels), + m_temp(0), m_file(0) { SF_INFO fileInfo; fileInfo.samplerate = m_sampleRate; fileInfo.channels = m_channels; fileInfo.format = SF_FORMAT_WAV | SF_FORMAT_FLOAT; - - m_file = sf_open(m_path.toLocal8Bit(), SFM_WRITE, &fileInfo); - if (!m_file) { - std::cerr << "WavFileWriter: Failed to open file (" - << sf_strerror(m_file) << ")" << std::endl; - m_error = QString("Failed to open audio file '%1' for writing") - .arg(m_path); + + try { + if (mode == WriteToTemporary) { + m_temp = new TempWriteFile(m_path); + m_file = sf_open(m_temp->getTemporaryFilename().toLocal8Bit(), + SFM_WRITE, &fileInfo); + if (!m_file) { + std::cerr << "WavFileWriter: Failed to open file (" + << sf_strerror(m_file) << ")" << std::endl; + m_error = QString("Failed to open audio file '%1' for writing") + .arg(m_temp->getTemporaryFilename()); + } + } else { + m_file = sf_open(m_path.toLocal8Bit(), SFM_WRITE, &fileInfo); + if (!m_file) { + std::cerr << "WavFileWriter: Failed to open file (" + << sf_strerror(m_file) << ")" << std::endl; + m_error = QString("Failed to open audio file '%1' for writing") + .arg(m_path); + } + } + } catch (FileOperationFailed &f) { + m_error = f.what(); + m_temp = 0; + m_file = 0; } } @@ -61,22 +83,32 @@ return m_error; } +QString +WavFileWriter::getWriteFilename() const +{ + if (m_temp) { + return m_temp->getTemporaryFilename(); + } else { + return m_path; + } +} + bool WavFileWriter::writeModel(DenseTimeValueModel *source, MultiSelection *selection) { if (source->getChannelCount() != m_channels) { - std::cerr << "WavFileWriter::writeModel: Wrong number of channels (" + SVDEBUG << "WavFileWriter::writeModel: Wrong number of channels (" << source->getChannelCount() << " != " << m_channels << ")" - << std::endl; + << endl; m_error = QString("Failed to write model to audio file '%1'") - .arg(m_path); + .arg(getWriteFilename()); return false; } if (!m_file) { m_error = QString("Failed to write model to audio file '%1': File not open") - .arg(m_path); + .arg(getWriteFilename()); return false; } @@ -131,7 +163,7 @@ { if (!m_file) { m_error = QString("Failed to write model to audio file '%1': File not open") - .arg(m_path); + .arg(getWriteFilename()); return false; } @@ -161,6 +193,11 @@ sf_close(m_file); m_file = 0; } + if (m_temp) { + m_temp->moveToTarget(); + delete m_temp; + m_temp = 0; + } return true; } diff -r 4efa7429cd85 -r c10cb8782576 data/fileio/WavFileWriter.h --- a/data/fileio/WavFileWriter.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/fileio/WavFileWriter.h Sun Jul 01 11:53:00 2012 +0100 @@ -22,11 +22,30 @@ class DenseTimeValueModel; class MultiSelection; +class TempWriteFile; class WavFileWriter { public: - WavFileWriter(QString path, size_t sampleRate, size_t channels); + /** + * Specify the method used to open the destination file. + * + * If WriteToTemporary, the destination will be opened as a + * temporary file which is moved to the target location when the + * WavFileWriter is closed or deleted (to avoid clobbering an + * existing file with a partially written replacement). + * + * If WriteToTarget, the target file will be opened directly + * (necessary when e.g. doing a series of incremental writes to a + * file while keeping it open for reading). + */ + enum FileWriteMode { + WriteToTemporary, + WriteToTarget + }; + + WavFileWriter(QString path, size_t sampleRate, size_t channels, + FileWriteMode mode); virtual ~WavFileWriter(); bool isOK() const; @@ -46,8 +65,11 @@ QString m_path; size_t m_sampleRate; size_t m_channels; + TempWriteFile *m_temp; SNDFILE *m_file; QString m_error; + + QString getWriteFilename() const; }; diff -r 4efa7429cd85 -r c10cb8782576 data/midi/MIDIEvent.h --- a/data/midi/MIDIEvent.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/midi/MIDIEvent.h Sun Jul 01 11:53:00 2012 +0100 @@ -25,6 +25,7 @@ #include #include #include +#include "base/Debug.h" typedef unsigned char MIDIByte; diff -r 4efa7429cd85 -r c10cb8782576 data/midi/MIDIInput.cpp --- a/data/midi/MIDIInput.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/midi/MIDIInput.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -50,7 +50,7 @@ void MIDIInput::callback(double timestamp, std::vector *message) { - std::cerr << "MIDIInput::callback(" << timestamp << ")" << std::endl; + SVDEBUG << "MIDIInput::callback(" << timestamp << ")" << endl; // In my experience so far, the timings passed to this function // are not reliable enough to use. We request instead an audio // frame time from whatever FrameTimer we have been given, and use @@ -84,7 +84,7 @@ return; } std::cerr << "WARNING: MIDIInput::postEvent: MIDI event queue (capacity " << m_buffer.getSize() << " is full!" << std::endl; - std::cerr << "Waiting for something to be processed" << std::endl; + SVDEBUG << "Waiting for something to be processed" << endl; #ifdef _WIN32 Sleep(1); #else diff -r 4efa7429cd85 -r c10cb8782576 data/midi/rtmidi/RtMidi.cpp --- a/data/midi/rtmidi/RtMidi.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/midi/rtmidi/RtMidi.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -761,12 +761,12 @@ case SND_SEQ_EVENT_PORT_UNSUBSCRIBED: #if defined(__RTMIDI_DEBUG__) - std::cerr << "RtMidiIn::alsaMidiHandler: port connection has closed!\n"; + SVDEBUG << "RtMidiIn::alsaMidiHandler: port connection has closed!\n"; // FIXME: this is called for all unsubscribe events, even ones //not related to this particular connection. As it stands, I //see no data provided in the "source" and "dest" fields so //there is nothing we can do about this at this time. - // std::cout << "sender = " << ev->source.client << ", dest = " << ev->dest.port << std::endl; + // std::cout << "sender = " << ev->source.client << ", dest = " << ev->dest.port << endl; #endif //data->doInput = false; break; diff -r 4efa7429cd85 -r c10cb8782576 data/model/AggregateWaveModel.cpp --- a/data/model/AggregateWaveModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/AggregateWaveModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -29,7 +29,7 @@ i != channelSpecs.end(); ++i) { if (i->model->getSampleRate() != channelSpecs.begin()->model->getSampleRate()) { - std::cerr << "AggregateWaveModel::AggregateWaveModel: WARNING: Component models do not all have the same sample rate" << std::endl; + SVDEBUG << "AggregateWaveModel::AggregateWaveModel: WARNING: Component models do not all have the same sample rate" << endl; break; } } diff -r 4efa7429cd85 -r c10cb8782576 data/model/AlignmentModel.cpp --- a/data/model/AlignmentModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/AlignmentModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -141,7 +141,7 @@ AlignmentModel::toReference(size_t frame) const { #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::toReference(" << frame << ")" << std::endl; + SVDEBUG << "AlignmentModel::toReference(" << frame << ")" << endl; #endif if (!m_path) { if (!m_rawPath) return frame; @@ -154,7 +154,7 @@ AlignmentModel::fromReference(size_t frame) const { #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::fromReference(" << frame << ")" << std::endl; + SVDEBUG << "AlignmentModel::fromReference(" << frame << ")" << endl; #endif if (!m_reversePath) { if (!m_rawPath) return frame; @@ -194,8 +194,8 @@ m_rawPath->isReady(&completion); #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::pathCompletionChanged: completion = " - << completion << std::endl; + SVDEBUG << "AlignmentModel::pathCompletionChanged: completion = " + << completion << endl; #endif m_pathComplete = (completion == 100); @@ -242,7 +242,7 @@ } #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::constructPath: " << m_path->getPointCount() << " points, at least " << (2 * m_path->getPointCount() * (3 * sizeof(void *) + sizeof(int) + sizeof(PathPoint))) << " bytes" << std::endl; + SVDEBUG << "AlignmentModel::constructPath: " << m_path->getPointCount() << " points, at least " << (2 * m_path->getPointCount() * (3 * sizeof(void *) + sizeof(int) + sizeof(PathPoint))) << " bytes" << endl; #endif } @@ -250,15 +250,6 @@ AlignmentModel::constructReversePath() const { if (!m_reversePath) { -/*!!! - if (!m_rawPath) { - std::cerr << "ERROR: AlignmentModel::constructReversePath: " - << "No raw path available" << std::endl; - return; - } - m_reversePath = new PathModel - (m_rawPath->getSampleRate(), m_rawPath->getResolution(), false); -*/ if (!m_path) { std::cerr << "ERROR: AlignmentModel::constructReversePath: " << "No forward path available" << std::endl; @@ -267,24 +258,10 @@ m_reversePath = new PathModel (m_path->getSampleRate(), m_path->getResolution(), false); } else { -/*!!! - if (!m_rawPath) return; -*/ if (!m_path) return; } m_reversePath->clear(); -/*!!! - SparseTimeValueModel::PointList points = m_rawPath->getPoints(); - - for (SparseTimeValueModel::PointList::const_iterator i = points.begin(); - i != points.end(); ++i) { - long frame = i->frame; - float value = i->value; - long rframe = lrintf(value * m_aligned->getSampleRate()); - m_reversePath->addPoint(PathPoint(rframe, frame)); - } -*/ PathModel::PointList points = m_path->getPoints(); @@ -296,7 +273,7 @@ } #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::constructReversePath: " << m_reversePath->getPointCount() << " points, at least " << (2 * m_reversePath->getPointCount() * (3 * sizeof(void *) + sizeof(int) + sizeof(PathPoint))) << " bytes" << std::endl; + SVDEBUG << "AlignmentModel::constructReversePath: " << m_reversePath->getPointCount() << " points, at least " << (2 * m_reversePath->getPointCount() * (3 * sizeof(void *) + sizeof(int) + sizeof(PathPoint))) << " bytes" << endl; #endif } @@ -314,13 +291,13 @@ if (points.empty()) { #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::align: No points" << std::endl; + SVDEBUG << "AlignmentModel::align: No points" << endl; #endif return frame; } #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::align: frame " << frame << " requested" << std::endl; + SVDEBUG << "AlignmentModel::align: frame " << frame << " requested" << endl; #endif PathModel::Point point(frame); @@ -363,7 +340,7 @@ } #ifdef DEBUG_ALIGNMENT_MODEL - std::cerr << "AlignmentModel::align: resultFrame = " << resultFrame << std::endl; + SVDEBUG << "AlignmentModel::align: resultFrame = " << resultFrame << endl; #endif return resultFrame; @@ -375,7 +352,14 @@ if (m_path) m_path->aboutToDelete(); delete m_path; m_path = path; +#ifdef DEBUG_ALIGNMENT_MODEL + SVDEBUG << "AlignmentModel::setPath: path = " << m_path << endl; +#endif constructReversePath(); +#ifdef DEBUG_ALIGNMENT_MODEL + SVDEBUG << "AlignmentModel::setPath: after construction path = " + << m_path << ", rpath = " << m_reversePath << endl; +#endif } void @@ -384,7 +368,7 @@ QString extraAttributes) const { if (!m_path) { - std::cerr << "AlignmentModel::toXml: no path" << std::endl; + SVDEBUG << "AlignmentModel::toXml: no path" << endl; return; } diff -r 4efa7429cd85 -r c10cb8782576 data/model/EditableDenseThreeDimensionalModel.cpp --- a/data/model/EditableDenseThreeDimensionalModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/EditableDenseThreeDimensionalModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -492,7 +492,7 @@ // For historical reasons we read and write "resolution" as "windowSize" - std::cerr << "EditableDenseThreeDimensionalModel::toXml" << std::endl; + SVDEBUG << "EditableDenseThreeDimensionalModel::toXml" << endl; Model::toXml (out, indent, diff -r 4efa7429cd85 -r c10cb8782576 data/model/FFTModel.h --- a/data/model/FFTModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/FFTModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -177,6 +177,7 @@ size_t ymin = 0, size_t ymax = 0); virtual int getCompletion() const { return m_server->getFillCompletion(); } + virtual QString getError() const { return m_server->getError(); } virtual Model *clone() const; diff -r 4efa7429cd85 -r c10cb8782576 data/model/ImageModel.h --- a/data/model/ImageModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/ImageModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -181,9 +181,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/IntervalModel.h --- a/data/model/IntervalModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/IntervalModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -95,10 +95,10 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; typename I::PointList::const_iterator i = I::getPointListIteratorForRow(row); - if (i == I::m_points.end()) return false; + if (i == I::m_points.end()) return 0; typename I::EditCommand *command = new typename I::EditCommand (this, I::tr("Edit Data")); diff -r 4efa7429cd85 -r c10cb8782576 data/model/Model.cpp --- a/data/model/Model.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/Model.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -24,12 +24,12 @@ Model::~Model() { -// std::cerr << "Model::~Model(" << this << ")" << std::endl; +// SVDEBUG << "Model::~Model(" << this << ")" << endl; if (!m_aboutToDelete) { - std::cerr << "NOTE: Model::~Model(" << this << ", \"" - << objectName().toStdString() << "\"): Model deleted " - << "with no aboutToDelete notification" << std::endl; + SVDEBUG << "NOTE: Model::~Model(" << this << ", \"" + << objectName() << "\"): Model deleted " + << "with no aboutToDelete notification" << endl; } if (m_alignment) { @@ -63,7 +63,7 @@ if (m_aboutToDelete) { std::cerr << "WARNING: Model(" << this << ", \"" - << objectName().toStdString() << "\")::aboutToDelete: " + << objectName() << "\")::aboutToDelete: " << "aboutToDelete called more than once for the same model" << std::endl; } @@ -134,7 +134,7 @@ int Model::getAlignmentCompletion() const { -// std::cerr << "Model::getAlignmentCompletion" << std::endl; +// SVDEBUG << "Model::getAlignmentCompletion" << endl; if (!m_alignment) { if (m_sourceModel) return m_sourceModel->getAlignmentCompletion(); else return 100; diff -r 4efa7429cd85 -r c10cb8782576 data/model/ModelDataTableModel.cpp --- a/data/model/ModelDataTableModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/ModelDataTableModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -195,8 +195,8 @@ void ModelDataTableModel::sort(int column, Qt::SortOrder sortOrder) { -// std::cerr << "ModelDataTableModel::sort(" << column << ", " << sortOrder -// << ")" << std::endl; +// SVDEBUG << "ModelDataTableModel::sort(" << column << ", " << sortOrder +// << ")" << endl; int prevCurrent = getCurrentRow(); if (m_sortColumn != column) { clearSort(); @@ -362,7 +362,7 @@ } for (MapType::iterator i = rowMap.begin(); i != rowMap.end(); ++i) { -// std::cerr << "resortAlphabetical: " << i->second << ": " << i->first.toStdString() << std::endl; +// std::cerr << "resortAlphabetical: " << i->second << ": " << i->first << std::endl; m_rsort.push_back(i->second); } diff -r 4efa7429cd85 -r c10cb8782576 data/model/NoteModel.h --- a/data/model/NoteModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/NoteModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -56,7 +56,8 @@ { stream << QString("%1\n") - .arg(indent).arg(frame).arg(value).arg(duration).arg(level).arg(label).arg(extraAttributes); + .arg(indent).arg(frame).arg(value).arg(duration).arg(level) + .arg(XmlExportable::encodeEntities(label)).arg(extraAttributes); } QString toDelimitedDataString(QString delimiter, size_t sampleRate) const @@ -195,9 +196,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListConstIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/PowerOfSqrtTwoZoomConstraint.cpp --- a/data/model/PowerOfSqrtTwoZoomConstraint.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/PowerOfSqrtTwoZoomConstraint.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -51,7 +51,7 @@ else if (dir == RoundDown) rval = size_t(prevVal + 0.01); else if (val - blockSize < blockSize - prevVal) rval = size_t(val + 0.01); else rval = size_t(prevVal + 0.01); -// std::cerr << "returning " << rval << std::endl; +// SVDEBUG << "returning " << rval << endl; return rval; } @@ -74,7 +74,7 @@ << (power - minCachePower)); } -// std::cerr << "Testing base " << base << std::endl; +// SVDEBUG << "Testing base " << base << endl; if (base == blockSize) { result = base; diff -r 4efa7429cd85 -r c10cb8782576 data/model/RegionModel.h --- a/data/model/RegionModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/RegionModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -36,6 +36,7 @@ struct RegionRec { public: + RegionRec() : frame(0), value(0.f), duration(0) { } RegionRec(long _frame) : frame(_frame), value(0.0f), duration(0) { } RegionRec(long _frame, float _value, size_t _duration, QString _label) : frame(_frame), value(_value), duration(_duration), label(_label) { } @@ -55,7 +56,8 @@ { stream << QString("%1\n") - .arg(indent).arg(frame).arg(value).arg(duration).arg(label).arg(extraAttributes); + .arg(indent).arg(frame).arg(value).arg(duration) + .arg(XmlExportable::encodeEntities(label)).arg(extraAttributes); } QString toDelimitedDataString(QString delimiter, size_t sampleRate) const @@ -179,9 +181,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/SparseModel.h --- a/data/model/SparseModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/SparseModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -322,9 +322,9 @@ virtual Command *getSetDataCommand(int row, int column, const QVariant &value, int role) { - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/SparseOneDimensionalModel.h --- a/data/model/SparseOneDimensionalModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/SparseOneDimensionalModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -154,9 +154,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListConstIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/SparseTimeValueModel.h --- a/data/model/SparseTimeValueModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/SparseTimeValueModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -147,9 +147,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListConstIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/TextModel.h --- a/data/model/TextModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/TextModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -146,9 +146,9 @@ (row, column, value, role); } - if (role != Qt::EditRole) return false; + if (role != Qt::EditRole) return 0; PointListIterator i = getPointListIteratorForRow(row); - if (i == m_points.end()) return false; + if (i == m_points.end()) return 0; EditCommand *command = new EditCommand(this, tr("Edit Data")); Point point(*i); diff -r 4efa7429cd85 -r c10cb8782576 data/model/WaveFileModel.cpp --- a/data/model/WaveFileModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/WaveFileModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -53,8 +53,8 @@ m_reader = AudioFileReaderFactory::createThreadingReader (m_source, targetRate); if (m_reader) { - std::cerr << "WaveFileModel::WaveFileModel: reader rate: " - << m_reader->getSampleRate() << std::endl; + SVDEBUG << "WaveFileModel::WaveFileModel: reader rate: " + << m_reader->getSampleRate() << endl; } } if (m_reader) setObjectName(m_reader->getTitle()); @@ -115,7 +115,7 @@ prevCompletion = *completion; } #ifdef DEBUG_WAVE_FILE_MODEL - std::cerr << "WaveFileModel::isReady(): ready = " << ready << ", completion = " << (completion ? *completion : -1) << std::endl; + SVDEBUG << "WaveFileModel::isReady(): ready = " << ready << ", completion = " << (completion ? *completion : -1) << endl; #endif return ready; } @@ -210,8 +210,8 @@ } #ifdef DEBUG_WAVE_FILE_MODEL -// std::cerr << "WaveFileModel::getValues(" << channel << ", " -// << start << ", " << end << "): calling reader" << std::endl; +// SVDEBUG << "WaveFileModel::getValues(" << channel << ", " +// << start << ", " << end << "): calling reader" << endl; #endif int channels = getChannelCount(); @@ -534,7 +534,7 @@ } #ifdef DEBUG_WAVE_FILE_MODEL - cerr << "returning " << ranges.size() << " ranges" << endl; + SVDEBUG << "returning " << ranges.size() << " ranges" << endl; #endif return; } @@ -607,7 +607,7 @@ m_fillThread->start(); #ifdef DEBUG_WAVE_FILE_MODEL - std::cerr << "WaveFileModel::fillCache: started fill thread" << std::endl; + SVDEBUG << "WaveFileModel::fillCache: started fill thread" << endl; #endif } @@ -617,7 +617,7 @@ if (m_fillThread) { size_t fillExtent = m_fillThread->getFillExtent(); #ifdef DEBUG_WAVE_FILE_MODEL - cerr << "WaveFileModel::fillTimerTimedOut: extent = " << fillExtent << endl; + SVDEBUG << "WaveFileModel::fillTimerTimedOut: extent = " << fillExtent << endl; #endif if (fillExtent > m_lastFillExtent) { emit modelChanged(m_lastFillExtent, fillExtent); @@ -625,7 +625,7 @@ } } else { #ifdef DEBUG_WAVE_FILE_MODEL - cerr << "WaveFileModel::fillTimerTimedOut: no thread" << std::endl; + SVDEBUG << "WaveFileModel::fillTimerTimedOut: no thread" << endl; #endif emit modelChanged(); } @@ -646,7 +646,7 @@ emit modelChanged(); emit ready(); #ifdef DEBUG_WAVE_FILE_MODEL - cerr << "WaveFileModel::cacheFilled" << endl; + SVDEBUG << "WaveFileModel::cacheFilled" << endl; #endif } @@ -669,7 +669,7 @@ if (updating) { while (channels == 0 && !m_model.m_exiting) { -// std::cerr << "WaveFileModel::fill: Waiting for channels..." << std::endl; +// SVDEBUG << "WaveFileModel::fill: Waiting for channels..." << endl; sleep(1); channels = m_model.getChannelCount(); } @@ -690,11 +690,11 @@ updating = m_model.m_reader->isUpdating(); m_frameCount = m_model.getFrameCount(); -// std::cerr << "WaveFileModel::fill: frame = " << frame << ", count = " << m_frameCount << std::endl; +// SVDEBUG << "WaveFileModel::fill: frame = " << frame << ", count = " << m_frameCount << endl; while (frame < m_frameCount) { -// std::cerr << "WaveFileModel::fill inner loop: frame = " << frame << ", count = " << m_frameCount << ", blocksize " << readBlockSize << std::endl; +// SVDEBUG << "WaveFileModel::fill inner loop: frame = " << frame << ", count = " << m_frameCount << ", blocksize " << readBlockSize << endl; if (updating && (frame + readBlockSize > m_frameCount)) break; diff -r 4efa7429cd85 -r c10cb8782576 data/model/WaveFileModel.h --- a/data/model/WaveFileModel.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/WaveFileModel.h Sun Jul 01 11:53:00 2012 +0100 @@ -86,11 +86,6 @@ QString indent = "", QString extraAttributes = "") const; -signals: - void modelChanged(); - void modelChanged(size_t, size_t); - void completionChanged(); - protected slots: void fillTimerTimedOut(); void cacheFilled(); diff -r 4efa7429cd85 -r c10cb8782576 data/model/WritableWaveFileModel.cpp --- a/data/model/WritableWaveFileModel.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/model/WritableWaveFileModel.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -26,6 +26,7 @@ #include #include +#include //#define DEBUG_WRITABLE_WAVE_FILE_MODEL 1 @@ -52,9 +53,12 @@ } } - m_writer = new WavFileWriter(path, sampleRate, channels); + // Write directly to the target file, so that we can do + // incremental writes and concurrent reads + m_writer = new WavFileWriter(path, sampleRate, channels, + WavFileWriter::WriteToTarget); if (!m_writer->isOK()) { - std::cerr << "WritableWaveFileModel: Error in creating WAV file writer: " << m_writer->getError().toStdString() << std::endl; + std::cerr << "WritableWaveFileModel: Error in creating WAV file writer: " << m_writer->getError() << std::endl; delete m_writer; m_writer = 0; return; @@ -106,11 +110,11 @@ if (!m_writer) return false; #ifdef DEBUG_WRITABLE_WAVE_FILE_MODEL -// std::cerr << "WritableWaveFileModel::addSamples(" << count << ")" << std::endl; +// SVDEBUG << "WritableWaveFileModel::addSamples(" << count << ")" << endl; #endif if (!m_writer->writeSamples(samples, count)) { - std::cerr << "ERROR: WritableWaveFileModel::addSamples: writer failed: " << m_writer->getError().toStdString() << std::endl; + std::cerr << "ERROR: WritableWaveFileModel::addSamples: writer failed: " << m_writer->getError() << std::endl; return false; } @@ -120,12 +124,12 @@ if (m_reader && m_reader->getChannelCount() == 0) { #ifdef DEBUG_WRITABLE_WAVE_FILE_MODEL - std::cerr << "WritableWaveFileModel::addSamples(" << count << "): calling updateFrameCount (initial)" << std::endl; + SVDEBUG << "WritableWaveFileModel::addSamples(" << count << "): calling updateFrameCount (initial)" << endl; #endif m_reader->updateFrameCount(); } else if (++updateCounter == 100) { #ifdef DEBUG_WRITABLE_WAVE_FILE_MODEL - std::cerr << "WritableWaveFileModel::addSamples(" << count << "): calling updateFrameCount (periodic)" << std::endl; + SVDEBUG << "WritableWaveFileModel::addSamples(" << count << "): calling updateFrameCount (periodic)" << endl; #endif if (m_reader) m_reader->updateFrameCount(); updateCounter = 0; @@ -138,7 +142,7 @@ WritableWaveFileModel::isOK() const { bool ok = (m_writer && m_writer->isOK()); -// std::cerr << "WritableWaveFileModel::isOK(): ok = " << ok << std::endl; +// SVDEBUG << "WritableWaveFileModel::isOK(): ok = " << ok << endl; return ok; } @@ -161,7 +165,7 @@ size_t WritableWaveFileModel::getFrameCount() const { -// std::cerr << "WritableWaveFileModel::getFrameCount: count = " << m_frameCount << std::endl; +// SVDEBUG << "WritableWaveFileModel::getFrameCount: count = " << m_frameCount << endl; return m_frameCount; } diff -r 4efa7429cd85 -r c10cb8782576 data/osc/OSCMessage.h --- a/data/osc/OSCMessage.h Mon Nov 29 12:45:39 2010 +0000 +++ b/data/osc/OSCMessage.h Sun Jul 01 11:53:00 2012 +0100 @@ -27,6 +27,8 @@ #include #include +#include "base/Debug.h" + class OSCMessage { public: diff -r 4efa7429cd85 -r c10cb8782576 data/osc/OSCQueue.cpp --- a/data/osc/OSCQueue.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/data/osc/OSCQueue.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -167,7 +167,7 @@ return; } std::cerr << "WARNING: OSCQueue::postMessage: OSC message queue (capacity " << m_buffer.getSize() << " is full!" << std::endl; - std::cerr << "Waiting for something to be processed" << std::endl; + SVDEBUG << "Waiting for something to be processed" << endl; #ifdef _WIN32 Sleep(1); #else @@ -178,9 +178,9 @@ OSCMessage *mp = new OSCMessage(message); m_buffer.write(&mp, 1); - std::cerr << "OSCQueue::postMessage: Posted OSC message: target " + SVDEBUG << "OSCQueue::postMessage: Posted OSC message: target " << message.getTarget() << ", target data " << message.getTargetData() - << ", method " << message.getMethod().toStdString() << std::endl; + << ", method " << message.getMethod() << endl; emit messagesAvailable(); } @@ -213,14 +213,13 @@ if (method.contains('/')) { std::cerr << "ERROR: OSCQueue::parseOSCPath: malformed path \"" - << path.toStdString() << "\" (should be target/data/method or " + << path << "\" (should be target/data/method or " << "target/method or method, where target and data " << "are numeric)" << std::endl; return false; } - std::cerr << "OSCQueue::parseOSCPath: good path \"" << path.toStdString() - << "\"" << std::endl; + SVDEBUG << "OSCQueue::parseOSCPath: good path \"" << path << "\"" << endl; return true; } diff -r 4efa7429cd85 -r c10cb8782576 install-sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/install-sh Sun Jul 01 11:53:00 2012 +0100 @@ -0,0 +1,519 @@ +#!/bin/sh +# install - install a program, script, or datafile + +scriptversion=2006-12-25.00 + +# This originates from X11R5 (mit/util/scripts/install.sh), which was +# later released in X11R6 (xc/config/util/install.sh) with the +# following copyright and license. +# +# Copyright (C) 1994 X Consortium +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- +# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# Except as contained in this notice, the name of the X Consortium shall not +# be used in advertising or otherwise to promote the sale, use or other deal- +# ings in this Software without prior written authorization from the X Consor- +# tium. +# +# +# FSF changes to this file are in the public domain. +# +# Calling this script install-sh is preferred over install.sh, to prevent +# `make' implicit rules from creating a file called install from it +# when there is no Makefile. +# +# This script is compatible with the BSD install script, but was written +# from scratch. + +nl=' +' +IFS=" "" $nl" + +# set DOITPROG to echo to test this script + +# Don't use :- since 4.3BSD and earlier shells don't like it. +doit=${DOITPROG-} +if test -z "$doit"; then + doit_exec=exec +else + doit_exec=$doit +fi + +# Put in absolute file names if you don't have them in your path; +# or use environment vars. + +chgrpprog=${CHGRPPROG-chgrp} +chmodprog=${CHMODPROG-chmod} +chownprog=${CHOWNPROG-chown} +cmpprog=${CMPPROG-cmp} +cpprog=${CPPROG-cp} +mkdirprog=${MKDIRPROG-mkdir} +mvprog=${MVPROG-mv} +rmprog=${RMPROG-rm} +stripprog=${STRIPPROG-strip} + +posix_glob='?' +initialize_posix_glob=' + test "$posix_glob" != "?" || { + if (set -f) 2>/dev/null; then + posix_glob= + else + posix_glob=: + fi + } +' + +posix_mkdir= + +# Desired mode of installed file. +mode=0755 + +chgrpcmd= +chmodcmd=$chmodprog +chowncmd= +mvcmd=$mvprog +rmcmd="$rmprog -f" +stripcmd= + +src= +dst= +dir_arg= +dst_arg= + +copy_on_change=false +no_target_directory= + +usage="\ +Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE + or: $0 [OPTION]... SRCFILES... DIRECTORY + or: $0 [OPTION]... -t DIRECTORY SRCFILES... + or: $0 [OPTION]... -d DIRECTORIES... + +In the 1st form, copy SRCFILE to DSTFILE. +In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. +In the 4th, create DIRECTORIES. + +Options: + --help display this help and exit. + --version display version info and exit. + + -c (ignored) + -C install only if different (preserve the last data modification time) + -d create directories instead of installing files. + -g GROUP $chgrpprog installed files to GROUP. + -m MODE $chmodprog installed files to MODE. + -o USER $chownprog installed files to USER. + -s $stripprog installed files. + -t DIRECTORY install into DIRECTORY. + -T report an error if DSTFILE is a directory. + +Environment variables override the default commands: + CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG + RMPROG STRIPPROG +" + +while test $# -ne 0; do + case $1 in + -c) ;; + + -C) copy_on_change=true;; + + -d) dir_arg=true;; + + -g) chgrpcmd="$chgrpprog $2" + shift;; + + --help) echo "$usage"; exit $?;; + + -m) mode=$2 + case $mode in + *' '* | *' '* | *' +'* | *'*'* | *'?'* | *'['*) + echo "$0: invalid mode: $mode" >&2 + exit 1;; + esac + shift;; + + -o) chowncmd="$chownprog $2" + shift;; + + -s) stripcmd=$stripprog;; + + -t) dst_arg=$2 + shift;; + + -T) no_target_directory=true;; + + --version) echo "$0 $scriptversion"; exit $?;; + + --) shift + break;; + + -*) echo "$0: invalid option: $1" >&2 + exit 1;; + + *) break;; + esac + shift +done + +if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then + # When -d is used, all remaining arguments are directories to create. + # When -t is used, the destination is already specified. + # Otherwise, the last argument is the destination. Remove it from $@. + for arg + do + if test -n "$dst_arg"; then + # $@ is not empty: it contains at least $arg. + set fnord "$@" "$dst_arg" + shift # fnord + fi + shift # arg + dst_arg=$arg + done +fi + +if test $# -eq 0; then + if test -z "$dir_arg"; then + echo "$0: no input file specified." >&2 + exit 1 + fi + # It's OK to call `install-sh -d' without argument. + # This can happen when creating conditional directories. + exit 0 +fi + +if test -z "$dir_arg"; then + trap '(exit $?); exit' 1 2 13 15 + + # Set umask so as not to create temps with too-generous modes. + # However, 'strip' requires both read and write access to temps. + case $mode in + # Optimize common cases. + *644) cp_umask=133;; + *755) cp_umask=22;; + + *[0-7]) + if test -z "$stripcmd"; then + u_plus_rw= + else + u_plus_rw='% 200' + fi + cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; + *) + if test -z "$stripcmd"; then + u_plus_rw= + else + u_plus_rw=,u+rw + fi + cp_umask=$mode$u_plus_rw;; + esac +fi + +for src +do + # Protect names starting with `-'. + case $src in + -*) src=./$src;; + esac + + if test -n "$dir_arg"; then + dst=$src + dstdir=$dst + test -d "$dstdir" + dstdir_status=$? + else + + # Waiting for this to be detected by the "$cpprog $src $dsttmp" command + # might cause directories to be created, which would be especially bad + # if $src (and thus $dsttmp) contains '*'. + if test ! -f "$src" && test ! -d "$src"; then + echo "$0: $src does not exist." >&2 + exit 1 + fi + + if test -z "$dst_arg"; then + echo "$0: no destination specified." >&2 + exit 1 + fi + + dst=$dst_arg + # Protect names starting with `-'. + case $dst in + -*) dst=./$dst;; + esac + + # If destination is a directory, append the input filename; won't work + # if double slashes aren't ignored. + if test -d "$dst"; then + if test -n "$no_target_directory"; then + echo "$0: $dst_arg: Is a directory" >&2 + exit 1 + fi + dstdir=$dst + dst=$dstdir/`basename "$src"` + dstdir_status=0 + else + # Prefer dirname, but fall back on a substitute if dirname fails. + dstdir=` + (dirname "$dst") 2>/dev/null || + expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$dst" : 'X\(//\)[^/]' \| \ + X"$dst" : 'X\(//\)$' \| \ + X"$dst" : 'X\(/\)' \| . 2>/dev/null || + echo X"$dst" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q' + ` + + test -d "$dstdir" + dstdir_status=$? + fi + fi + + obsolete_mkdir_used=false + + if test $dstdir_status != 0; then + case $posix_mkdir in + '') + # Create intermediate dirs using mode 755 as modified by the umask. + # This is like FreeBSD 'install' as of 1997-10-28. + umask=`umask` + case $stripcmd.$umask in + # Optimize common cases. + *[2367][2367]) mkdir_umask=$umask;; + .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; + + *[0-7]) + mkdir_umask=`expr $umask + 22 \ + - $umask % 100 % 40 + $umask % 20 \ + - $umask % 10 % 4 + $umask % 2 + `;; + *) mkdir_umask=$umask,go-w;; + esac + + # With -d, create the new directory with the user-specified mode. + # Otherwise, rely on $mkdir_umask. + if test -n "$dir_arg"; then + mkdir_mode=-m$mode + else + mkdir_mode= + fi + + posix_mkdir=false + case $umask in + *[123567][0-7][0-7]) + # POSIX mkdir -p sets u+wx bits regardless of umask, which + # is incompatible with FreeBSD 'install' when (umask & 300) != 0. + ;; + *) + tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ + trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0 + + if (umask $mkdir_umask && + exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1 + then + if test -z "$dir_arg" || { + # Check for POSIX incompatibilities with -m. + # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or + # other-writeable bit of parent directory when it shouldn't. + # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. + ls_ld_tmpdir=`ls -ld "$tmpdir"` + case $ls_ld_tmpdir in + d????-?r-*) different_mode=700;; + d????-?--*) different_mode=755;; + *) false;; + esac && + $mkdirprog -m$different_mode -p -- "$tmpdir" && { + ls_ld_tmpdir_1=`ls -ld "$tmpdir"` + test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" + } + } + then posix_mkdir=: + fi + rmdir "$tmpdir/d" "$tmpdir" + else + # Remove any dirs left behind by ancient mkdir implementations. + rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null + fi + trap '' 0;; + esac;; + esac + + if + $posix_mkdir && ( + umask $mkdir_umask && + $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" + ) + then : + else + + # The umask is ridiculous, or mkdir does not conform to POSIX, + # or it failed possibly due to a race condition. Create the + # directory the slow way, step by step, checking for races as we go. + + case $dstdir in + /*) prefix='/';; + -*) prefix='./';; + *) prefix='';; + esac + + eval "$initialize_posix_glob" + + oIFS=$IFS + IFS=/ + $posix_glob set -f + set fnord $dstdir + shift + $posix_glob set +f + IFS=$oIFS + + prefixes= + + for d + do + test -z "$d" && continue + + prefix=$prefix$d + if test -d "$prefix"; then + prefixes= + else + if $posix_mkdir; then + (umask=$mkdir_umask && + $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break + # Don't fail if two instances are running concurrently. + test -d "$prefix" || exit 1 + else + case $prefix in + *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; + *) qprefix=$prefix;; + esac + prefixes="$prefixes '$qprefix'" + fi + fi + prefix=$prefix/ + done + + if test -n "$prefixes"; then + # Don't fail if two instances are running concurrently. + (umask $mkdir_umask && + eval "\$doit_exec \$mkdirprog $prefixes") || + test -d "$dstdir" || exit 1 + obsolete_mkdir_used=true + fi + fi + fi + + if test -n "$dir_arg"; then + { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && + { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && + { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || + test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 + else + + # Make a couple of temp file names in the proper directory. + dsttmp=$dstdir/_inst.$$_ + rmtmp=$dstdir/_rm.$$_ + + # Trap to clean up those temp files at exit. + trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 + + # Copy the file name to the temp name. + (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && + + # and set any options; do chmod last to preserve setuid bits. + # + # If any of these fail, we abort the whole thing. If we want to + # ignore errors from any of these, just make sure not to ignore + # errors from the above "$doit $cpprog $src $dsttmp" command. + # + { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && + { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && + { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && + { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && + + # If -C, don't bother to copy if it wouldn't change the file. + if $copy_on_change && + old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && + new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && + + eval "$initialize_posix_glob" && + $posix_glob set -f && + set X $old && old=:$2:$4:$5:$6 && + set X $new && new=:$2:$4:$5:$6 && + $posix_glob set +f && + + test "$old" = "$new" && + $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 + then + rm -f "$dsttmp" + else + # Rename the file to the real destination. + $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || + + # The rename failed, perhaps because mv can't rename something else + # to itself, or perhaps because mv is so ancient that it does not + # support -f. + { + # Now remove or move aside any old file at destination location. + # We try this two ways since rm can't unlink itself on some + # systems and the destination file might be busy for other + # reasons. In this case, the final cleanup might fail but the new + # file should still install successfully. + { + test ! -f "$dst" || + $doit $rmcmd -f "$dst" 2>/dev/null || + { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && + { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } + } || + { echo "$0: cannot unlink or rename $dst" >&2 + (exit 1); exit 1 + } + } && + + # Now rename the file to the real destination. + $doit $mvcmd "$dsttmp" "$dst" + } + fi || exit 1 + + trap '' 0 + fi +done + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-end: "$" +# End: diff -r 4efa7429cd85 -r c10cb8782576 plugin/DSSIPluginFactory.cpp --- a/plugin/DSSIPluginFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/DSSIPluginFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -69,7 +69,7 @@ const LADSPA_Descriptor *descriptor = ddesc->LADSPA_Plugin; if (!descriptor) continue; -// std::cerr << "DSSIPluginFactory::enumeratePlugins: Name " << (descriptor->Name ? descriptor->Name : "NONE" ) << std::endl; +// SVDEBUG << "DSSIPluginFactory::enumeratePlugins: Name " << (descriptor->Name ? descriptor->Name : "NONE" ) << endl; list.push_back(*i); list.push_back(descriptor->Name); @@ -159,7 +159,7 @@ if (m_libraryHandles.find(soname) == m_libraryHandles.end()) { loadLibrary(soname); if (m_libraryHandles.find(soname) == m_libraryHandles.end()) { - std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: loadLibrary failed for " << soname.toStdString() << std::endl; + std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: loadLibrary failed for " << soname << std::endl; return 0; } firstInLibrary = true; @@ -171,7 +171,7 @@ DLSYM(libraryHandle, "dssi_descriptor"); if (!fn) { - std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: No descriptor function in library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: No descriptor function in library " << soname << std::endl; return 0; } @@ -188,7 +188,7 @@ ++index; } - std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: No such plugin as " << label.toStdString() << " in library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: DSSIPluginFactory::getDSSIDescriptor: No such plugin as " << label << " in library " << soname << std::endl; return 0; } @@ -292,7 +292,7 @@ if (!libraryHandle) { std::cerr << "WARNING: DSSIPluginFactory::discoverPlugins: couldn't load plugin library " - << soname.toStdString() << " - " << DLERROR() << std::endl; + << soname << " - " << DLERROR() << std::endl; return; } @@ -300,7 +300,7 @@ DLSYM(libraryHandle, "dssi_descriptor"); if (!fn) { - std::cerr << "WARNING: DSSIPluginFactory::discoverPlugins: No descriptor function in " << soname.toStdString() << std::endl; + std::cerr << "WARNING: DSSIPluginFactory::discoverPlugins: No descriptor function in " << soname << std::endl; return; } @@ -311,7 +311,7 @@ const LADSPA_Descriptor *ladspaDescriptor = descriptor->LADSPA_Plugin; if (!ladspaDescriptor) { - std::cerr << "WARNING: DSSIPluginFactory::discoverPlugins: No LADSPA descriptor for plugin " << index << " in " << soname.toStdString() << std::endl; + std::cerr << "WARNING: DSSIPluginFactory::discoverPlugins: No LADSPA descriptor for plugin " << index << " in " << soname << std::endl; ++index; continue; } diff -r 4efa7429cd85 -r c10cb8782576 plugin/DSSIPluginInstance.cpp --- a/plugin/DSSIPluginInstance.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/DSSIPluginInstance.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -27,7 +27,11 @@ #include +#ifndef Q_OS_WIN32 #include +#else +#include +#endif //#define DEBUG_DSSI 1 //#define DEBUG_DSSI_PROCESS 1 @@ -73,8 +77,8 @@ m_haveLastEventSendTime(false) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::DSSIPluginInstance(" << identifier << ")" - << std::endl; + SVDEBUG << "DSSIPluginInstance::DSSIPluginInstance(" << identifier << ")" + << endl; #endif init(); @@ -173,7 +177,7 @@ DSSIPluginInstance::getParameter(std::string id) const { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::getParameter(" << id << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::getParameter(" << id << ")" << endl; #endif for (unsigned int i = 0; i < m_controlPortsIn.size(); ++i) { if (id == m_descriptor->LADSPA_Plugin->PortNames[m_controlPortsIn[i].first]) { @@ -182,7 +186,7 @@ #endif float v = getParameterValue(i); #ifdef DEBUG_DSSI - std::cerr << "Returning " << v << std::endl; + SVDEBUG << "Returning " << v << endl; #endif return v; } @@ -195,7 +199,7 @@ DSSIPluginInstance::setParameter(std::string id, float value) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::setParameter(" << id << ", " << value << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::setParameter(" << id << ", " << value << ")" << endl; #endif for (unsigned int i = 0; i < m_controlPortsIn.size(); ++i) { @@ -210,7 +214,7 @@ DSSIPluginInstance::init() { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::init" << std::endl; + SVDEBUG << "DSSIPluginInstance::init" << endl; #endif // Discover ports numbers and identities @@ -254,8 +258,8 @@ } #ifdef DEBUG_DSSI else - std::cerr << "DSSIPluginInstance::DSSIPluginInstance - " - << "unrecognised port type" << std::endl; + SVDEBUG << "DSSIPluginInstance::DSSIPluginInstance - " + << "unrecognised port type" << endl; #endif } @@ -268,7 +272,7 @@ size_t latency = 0; #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::getLatency(): m_latencyPort " << m_latencyPort << ", m_run " << m_run << std::endl; + SVDEBUG << "DSSIPluginInstance::getLatency(): m_latencyPort " << m_latencyPort << ", m_run " << m_run << endl; #endif if (m_latencyPort) { @@ -284,7 +288,7 @@ } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::getLatency(): latency is " << latency << std::endl; + SVDEBUG << "DSSIPluginInstance::getLatency(): latency is " << latency << endl; #endif return latency; @@ -309,8 +313,8 @@ DSSIPluginInstance::setIdealChannelCount(size_t channels) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::setIdealChannelCount: channel count " - << channels << " (was " << m_idealChannelCount << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::setIdealChannelCount: channel count " + << channels << " (was " << m_idealChannelCount << ")" << endl; #endif if (channels == m_idealChannelCount) { @@ -397,7 +401,7 @@ DSSIPluginInstance::~DSSIPluginInstance() { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::~DSSIPluginInstance" << std::endl; + SVDEBUG << "DSSIPluginInstance::~DSSIPluginInstance" << endl; #endif if (m_threads.find(m_instanceHandle) != m_threads.end()) { @@ -451,12 +455,12 @@ void DSSIPluginInstance::instantiate(unsigned long sampleRate) { + if (!m_descriptor) return; + #ifdef DEBUG_DSSI std::cout << "DSSIPluginInstance::instantiate - plugin \"unique\" id = " << m_descriptor->LADSPA_Plugin->UniqueID << std::endl; #endif - if (!m_descriptor) return; - const LADSPA_Descriptor *descriptor = m_descriptor->LADSPA_Plugin; if (!descriptor->instantiate) { @@ -498,7 +502,7 @@ m_cachedPrograms.clear(); #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::checkProgramCache" << std::endl; + SVDEBUG << "DSSIPluginInstance::checkProgramCache" << endl; #endif if (!m_descriptor || !m_descriptor->get_program) { @@ -518,7 +522,7 @@ } #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::checkProgramCache: have " << m_cachedPrograms.size() << " programs" << std::endl; + SVDEBUG << "DSSIPluginInstance::checkProgramCache: have " << m_cachedPrograms.size() << " programs" << endl; #endif m_programCacheValid = true; @@ -528,7 +532,7 @@ DSSIPluginInstance::getPrograms() const { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::getPrograms" << std::endl; + SVDEBUG << "DSSIPluginInstance::getPrograms" << endl; #endif if (!m_descriptor) return ProgramList(); @@ -549,7 +553,7 @@ DSSIPluginInstance::getProgram(int bank, int program) const { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::getProgram(" << bank << "," << program << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::getProgram(" << bank << "," << program << ")" << endl; #endif if (!m_descriptor) return std::string(); @@ -568,7 +572,7 @@ DSSIPluginInstance::getProgram(std::string name) const { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::getProgram(" << name << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::getProgram(" << name << ")" << endl; #endif if (!m_descriptor) return 0; @@ -605,7 +609,7 @@ DSSIPluginInstance::selectProgramAux(std::string program, bool backupPortValues) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::selectProgram(" << program << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::selectProgram(" << program << ")" << endl; #endif if (!m_descriptor) return; @@ -627,7 +631,7 @@ found = true; #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::selectProgram(" << program << "): found at bank " << bankNo << ", program " << programNo << std::endl; + SVDEBUG << "DSSIPluginInstance::selectProgram(" << program << "): found at bank " << bankNo << ", program " << programNo << endl; #endif break; @@ -643,7 +647,7 @@ m_processLock.unlock(); #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::selectProgram(" << program << "): made select_program(" << bankNo << "," << programNo << ") call" << std::endl; + SVDEBUG << "DSSIPluginInstance::selectProgram(" << program << "): made select_program(" << bankNo << "," << programNo << ") call" << endl; #endif if (backupPortValues) { @@ -657,7 +661,7 @@ DSSIPluginInstance::activate() { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::activate" << std::endl; + SVDEBUG << "DSSIPluginInstance::activate" << endl; #endif if (!m_descriptor || !m_descriptor->LADSPA_Plugin->activate) return; @@ -665,14 +669,14 @@ if (m_program != "") { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::activate: restoring program " << m_program << std::endl; + SVDEBUG << "DSSIPluginInstance::activate: restoring program " << m_program << endl; #endif selectProgramAux(m_program, false); } for (size_t i = 0; i < m_backupControlPortsIn.size(); ++i) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::activate: setting port " << m_controlPortsIn[i].first << " to " << m_backupControlPortsIn[i] << std::endl; + SVDEBUG << "DSSIPluginInstance::activate: setting port " << m_controlPortsIn[i].first << " to " << m_backupControlPortsIn[i] << endl; #endif *m_controlPortsIn[i].second = m_backupControlPortsIn[i]; } @@ -683,9 +687,9 @@ { if (!m_descriptor || !m_descriptor->LADSPA_Plugin->connect_port) return; #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::connectPorts: " << m_audioPortsIn.size() + SVDEBUG << "DSSIPluginInstance::connectPorts: " << m_audioPortsIn.size() << " audio ports in, " << m_audioPortsOut.size() << " out, " - << m_outputBufferCount << " output buffers" << std::endl; + << m_outputBufferCount << " output buffers" << endl; #endif assert(sizeof(LADSPA_Data) == sizeof(float)); @@ -722,7 +726,7 @@ *m_controlPortsIn[i].second = defaultValue; m_backupControlPortsIn[i] = defaultValue; #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::connectPorts: set control port " << i << " to default value " << defaultValue << std::endl; + SVDEBUG << "DSSIPluginInstance::connectPorts: set control port " << i << " to default value " << defaultValue << endl; #endif } } @@ -745,7 +749,7 @@ DSSIPluginInstance::setParameterValue(unsigned int parameter, float value) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::setParameterValue(" << parameter << ") to " << value << std::endl; + SVDEBUG << "DSSIPluginInstance::setParameterValue(" << parameter << ") to " << value << endl; #endif if (parameter >= m_controlPortsIn.size()) return; @@ -769,7 +773,7 @@ DSSIPluginInstance::setPortValueFromController(unsigned int port, int cv) { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::setPortValueFromController(" << port << ") to " << cv << std::endl; + SVDEBUG << "DSSIPluginInstance::setPortValueFromController(" << port << ") to " << cv << endl; #endif const LADSPA_Descriptor *p = m_descriptor->LADSPA_Plugin; @@ -815,7 +819,7 @@ DSSIPluginInstance::getParameterValue(unsigned int parameter) const { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::getParameterValue(" << parameter << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::getParameterValue(" << parameter << ")" << endl; #endif if (parameter >= m_controlPortsIn.size()) return 0.0; return (*m_controlPortsIn[parameter].second); @@ -865,7 +869,7 @@ #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::configure(" << key << "," << value << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::configure(" << key << "," << value << ")" << endl; #endif char *message = m_descriptor->configure(m_instanceHandle, @@ -904,7 +908,7 @@ const void *e) { #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::sendEvent: last was " << m_lastEventSendTime << " (valid " << m_haveLastEventSendTime << "), this is " << eventTime << std::endl; + SVDEBUG << "DSSIPluginInstance::sendEvent: last was " << m_lastEventSendTime << " (valid " << m_haveLastEventSendTime << "), this is " << eventTime << endl; #endif // The process mechanism only works correctly if the events are @@ -922,7 +926,7 @@ snd_seq_event_t *event = (snd_seq_event_t *)e; #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::sendEvent at " << eventTime << std::endl; + SVDEBUG << "DSSIPluginInstance::sendEvent at " << eventTime << endl; #endif snd_seq_event_t ev(*event); @@ -951,7 +955,7 @@ int controller = ev->data.control.param; #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::handleController " << controller << std::endl; + SVDEBUG << "DSSIPluginInstance::handleController " << controller << endl; #endif if (controller == 0) { // bank select MSB @@ -1016,13 +1020,13 @@ } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::run(" << blockTime << ")" << std::endl; + SVDEBUG << "DSSIPluginInstance::run(" << blockTime << ")" << endl; #endif #ifdef DEBUG_DSSI_PROCESS if (m_eventBuffer.getReadSpace() > 0) { - std::cerr << "DSSIPluginInstance::run: event buffer has " - << m_eventBuffer.getReadSpace() << " event(s) in it" << std::endl; + SVDEBUG << "DSSIPluginInstance::run: event buffer has " + << m_eventBuffer.getReadSpace() << " event(s) in it" << endl; } #endif @@ -1040,8 +1044,8 @@ } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::run: evTime " << evTime << ", blockTime " << blockTime << ", frameOffset " << frameOffset - << ", blockSize " << m_blockSize << std::endl; + SVDEBUG << "DSSIPluginInstance::run: evTime " << evTime << ", blockTime " << blockTime << ", frameOffset " << frameOffset + << ", blockSize " << m_blockSize << endl; std::cerr << "Type: " << int(ev->type) << ", pitch: " << int(ev->data.note.note) << ", velocity: " << int(ev->data.note.velocity) << std::endl; #endif @@ -1075,20 +1079,20 @@ int bank = m_pending.lsb + 128 * m_pending.msb; #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::run: making select_program(" << bank << "," << program << ") call" << std::endl; + SVDEBUG << "DSSIPluginInstance::run: making select_program(" << bank << "," << program << ") call" << endl; #endif m_pending.lsb = m_pending.msb = m_pending.program = -1; m_descriptor->select_program(m_instanceHandle, bank, program); #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::run: made select_program(" << bank << "," << program << ") call" << std::endl; + SVDEBUG << "DSSIPluginInstance::run: made select_program(" << bank << "," << program << ") call" << endl; #endif } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::run: running with " << evCount << " events" - << std::endl; + SVDEBUG << "DSSIPluginInstance::run: running with " << evCount << " events" + << endl; #endif m_descriptor->run_synth(m_instanceHandle, count, @@ -1149,7 +1153,7 @@ PluginSet &s = m_groupMap[m_identifier]; #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped(" << blockTime << "): this is " << this << "; " << s.size() << " elements in m_groupMap[" << m_identifier << "]" << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped(" << blockTime << "): this is " << this << "; " << s.size() << " elements in m_groupMap[" << m_identifier << "]" << endl; #endif if (m_lastRunTime != blockTime) { @@ -1157,7 +1161,7 @@ DSSIPluginInstance *instance = *i; if (instance != this && instance->m_lastRunTime == blockTime) { #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped(" << blockTime << "): plugin " << instance << " has already been run" << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped(" << blockTime << "): plugin " << instance << " has already been run" << endl; #endif needRun = false; } @@ -1166,13 +1170,13 @@ if (!needRun) { #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped(" << blockTime << "): already run, returning" << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped(" << blockTime << "): already run, returning" << endl; #endif return; } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped(" << blockTime << "): I'm the first, running" << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped(" << blockTime << "): I'm the first, running" << endl; #endif size_t index = 0; @@ -1190,7 +1194,7 @@ instances[index] = instance->m_instanceHandle; #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped(" << blockTime << "): running " << instance << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped(" << blockTime << "): running " << instance << endl; #endif if (instance->m_pending.program >= 0 && @@ -1216,8 +1220,8 @@ } #ifdef DEBUG_DSSI_PROCESS - std::cerr << "DSSIPluginInstance::runGrouped: evTime " << evTime << ", frameOffset " << frameOffset - << ", block size " << m_blockSize << std::endl; + SVDEBUG << "DSSIPluginInstance::runGrouped: evTime " << evTime << ", frameOffset " << frameOffset + << ", block size " << m_blockSize << endl; #endif if (frameOffset >= int(m_blockSize)) break; @@ -1255,7 +1259,7 @@ { // This is called from a non-RT context (during instantiate) - std::cerr << "DSSIPluginInstance::requestMidiSend" << std::endl; + SVDEBUG << "DSSIPluginInstance::requestMidiSend" << endl; return 1; } @@ -1266,7 +1270,7 @@ { // This is likely to be called from an RT context - std::cerr << "DSSIPluginInstance::midiSend" << std::endl; + SVDEBUG << "DSSIPluginInstance::midiSend" << endl; } void @@ -1292,7 +1296,7 @@ DSSIPluginInstance::deactivate() { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::deactivate " << m_identifier << std::endl; + SVDEBUG << "DSSIPluginInstance::deactivate " << m_identifier << endl; #endif if (!m_descriptor || !m_descriptor->LADSPA_Plugin->deactivate) return; @@ -1302,7 +1306,7 @@ m_descriptor->LADSPA_Plugin->deactivate(m_instanceHandle); #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::deactivate " << m_identifier << " done" << std::endl; + SVDEBUG << "DSSIPluginInstance::deactivate " << m_identifier << " done" << endl; #endif m_bufferScavenger.scavenge(); @@ -1312,7 +1316,7 @@ DSSIPluginInstance::cleanup() { #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::cleanup " << m_identifier << std::endl; + SVDEBUG << "DSSIPluginInstance::cleanup " << m_identifier << endl; #endif if (!m_descriptor) return; @@ -1327,7 +1331,7 @@ m_descriptor->LADSPA_Plugin->cleanup(m_instanceHandle); m_instanceHandle = 0; #ifdef DEBUG_DSSI - std::cerr << "DSSIPluginInstance::cleanup " << m_identifier << " done" << std::endl; + SVDEBUG << "DSSIPluginInstance::cleanup " << m_identifier << " done" << endl; #endif } diff -r 4efa7429cd85 -r c10cb8782576 plugin/FeatureExtractionPluginFactory.cpp --- a/plugin/FeatureExtractionPluginFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/FeatureExtractionPluginFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -58,8 +58,7 @@ { if (pluginType == "vamp") { if (!_nativeInstance) { -// std::cerr << "FeatureExtractionPluginFactory::instance(" << pluginType.toStdString() -// << "): creating new FeatureExtractionPluginFactory" << std::endl; +// SVDEBUG << "FeatureExtractionPluginFactory::instance(" << pluginType// << "): creating new FeatureExtractionPluginFactory" << endl; _nativeInstance = new FeatureExtractionPluginFactory(); } return _nativeInstance; @@ -96,7 +95,7 @@ if (factory) { std::vector tmp = factory->getPluginIdentifiers(); for (size_t i = 0; i < tmp.size(); ++i) { -// std::cerr << "identifier: " << tmp[i].toStdString() << std::endl; +// std::cerr << "identifier: " << tmp[i] << std::endl; rv.push_back(tmp[i]); } } @@ -118,7 +117,7 @@ for (std::vector::iterator i = path.begin(); i != path.end(); ++i) { #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: scanning directory " << i->toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: scanning directory " << i-<< endl; #endif QDir pluginDir(*i, PLUGIN_GLOB, @@ -130,33 +129,33 @@ QString soname = pluginDir.filePath(pluginDir[j]); #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: trying potential library " << soname.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: trying potential library " << soname << endl; #endif void *libraryHandle = DLOPEN(soname, RTLD_LAZY | RTLD_LOCAL); if (!libraryHandle) { - std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to load library " << soname.toStdString() << ": " << DLERROR() << std::endl; + std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to load library " << soname << ": " << DLERROR() << std::endl; continue; } #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: It's a library all right, checking for descriptor" << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: It's a library all right, checking for descriptor" << endl; #endif VampGetPluginDescriptorFunction fn = (VampGetPluginDescriptorFunction) DLSYM(libraryHandle, "vampGetPluginDescriptor"); if (!fn) { - std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: No descriptor function in " << soname.toStdString() << std::endl; + std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: No descriptor function in " << soname << std::endl; if (DLCLOSE(libraryHandle) != 0) { - std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to unload library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to unload library " << soname << std::endl; } continue; } #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: Vamp descriptor found" << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: Vamp descriptor found" << endl; #endif const VampPluginDescriptor *descriptor = 0; @@ -174,7 +173,7 @@ << descriptor->identifier << "\" at indices " << known[descriptor->identifier] << " and " << index << std::endl; - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: Avoiding this library (obsolete API?)" << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: Avoiding this library (obsolete API?)" << endl; ok = false; break; } else { @@ -194,14 +193,14 @@ ("vamp", soname, descriptor->identifier); rv.push_back(id); #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::getPluginIdentifiers: Found plugin id " << id.toStdString() << " at index " << index << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::getPluginIdentifiers: Found plugin id " << id << " at index " << index << endl; #endif ++index; } } if (DLCLOSE(libraryHandle) != 0) { - std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to unload library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: FeatureExtractionPluginFactory::getPluginIdentifiers: Failed to unload library " << soname << std::endl; } } } @@ -217,9 +216,9 @@ QString file = ""; #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile(\"" - << soname.toStdString() << "\", \"" << inDir.toStdString() << "\")" - << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile(\"" + << soname << "\", \"" << inDir << "\")" + << endl; #endif if (inDir != "") { @@ -234,8 +233,8 @@ if (QFileInfo(file).exists() && QFileInfo(file).isFile()) { #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile: " - << "found trivially at " << file.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile: " + << "found trivially at " << file << endl; #endif return file; @@ -246,8 +245,8 @@ if (QFileInfo(file).baseName() == QFileInfo(soname).baseName()) { #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile: " - << "found \"" << soname.toStdString() << "\" at " << file.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile: " + << "found \"" << soname << "\" at " << file << endl; #endif return file; @@ -255,8 +254,8 @@ } #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile (with dir): " - << "not found" << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile (with dir): " + << "not found" << endl; #endif return ""; @@ -267,8 +266,8 @@ if (fi.isAbsolute() && fi.exists() && fi.isFile()) { #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile: " - << "found trivially at " << soname.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile: " + << "found trivially at " << soname << endl; #endif return soname; } @@ -288,8 +287,8 @@ } #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::findPluginFile: " - << "not found" << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::findPluginFile: " + << "not found" << endl; #endif return ""; @@ -311,20 +310,20 @@ QString type, soname, label; PluginIdentifier::parseIdentifier(identifier, type, soname, label); if (type != "vamp") { - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Wrong factory for plugin type " << type.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::instantiatePlugin: Wrong factory for plugin type " << type << endl; return 0; } QString found = findPluginFile(soname); if (found == "") { - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to find library file " << soname.toStdString() << std::endl; + std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to find library file " << soname << std::endl; return 0; } else if (found != soname) { #ifdef DEBUG_PLUGIN_SCAN_AND_INSTANTIATE - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Given library name was " << soname.toStdString() << ", found at " << found.toStdString() << std::endl; - std::cerr << soname.toStdString() << " -> " << found.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::instantiatePlugin: Given library name was " << soname << ", found at " << found << endl; + std::cerr << soname << " -> " << found << std::endl; #endif } @@ -334,7 +333,7 @@ void *libraryHandle = DLOPEN(soname, RTLD_LAZY | RTLD_LOCAL); if (!libraryHandle) { - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to load library " << soname.toStdString() << ": " << DLERROR() << std::endl; + std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to load library " << soname << ": " << DLERROR() << std::endl; return 0; } @@ -342,7 +341,7 @@ DLSYM(libraryHandle, "vampGetPluginDescriptor"); if (!fn) { - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: No descriptor function in " << soname.toStdString() << std::endl; + SVDEBUG << "FeatureExtractionPluginFactory::instantiatePlugin: No descriptor function in " << soname << endl; goto done; } @@ -352,7 +351,7 @@ } if (!descriptor) { - std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to find plugin \"" << label.toStdString() << "\" in library " << soname.toStdString() << std::endl; + std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Failed to find plugin \"" << label << "\" in library " << soname << std::endl; goto done; } @@ -363,18 +362,18 @@ rv = new PluginDeletionNotifyAdapter(plugin, this); } -// std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Constructed Vamp plugin, rv is " << rv << std::endl; +// SVDEBUG << "FeatureExtractionPluginFactory::instantiatePlugin: Constructed Vamp plugin, rv is " << rv << endl; //!!! need to dlclose() when plugins from a given library are unloaded done: if (!rv) { if (DLCLOSE(libraryHandle) != 0) { - std::cerr << "WARNING: FeatureExtractionPluginFactory::instantiatePlugin: Failed to unload library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: FeatureExtractionPluginFactory::instantiatePlugin: Failed to unload library " << soname << std::endl; } } -// std::cerr << "FeatureExtractionPluginFactory::instantiatePlugin: Instantiated plugin " << label.toStdString() << " from library " << soname.toStdString() << ": descriptor " << descriptor << ", rv "<< rv << ", label " << rv->getName() << ", outputs " << rv->getOutputDescriptors().size() << std::endl; +// SVDEBUG << "FeatureExtractionPluginFactory::instantiatePlugin: Instantiated plugin " << label << " from library " << soname << ": descriptor " << descriptor << ", rv "<< rv << ", label " << rv->getName() << ", outputs " << rv->getOutputDescriptors().size() << endl; return rv; } @@ -384,7 +383,7 @@ { void *handle = m_handleMap[plugin]; if (handle) { -// std::cerr << "unloading library " << handle << " for plugin " << plugin << std::endl; +// SVDEBUG << "unloading library " << handle << " for plugin " << plugin << endl; DLCLOSE(handle); } m_handleMap.erase(plugin); @@ -416,12 +415,12 @@ QDir dir(path[i], "*.cat"); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: directory " << path[i].toStdString() << " has " << dir.count() << " .cat files" << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: directory " << path[i] << " has " << dir.count() << " .cat files" << endl; for (unsigned int j = 0; j < dir.count(); ++j) { QFile file(path[i] + "/" + dir[j]); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: about to open " << (path[i].toStdString() + "/" + dir[j].toStdString()) << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: about to open " << (path[i]+ "/" + dir[j]) << endl; if (file.open(QIODevice::ReadOnly)) { // std::cerr << "...opened" << std::endl; @@ -430,12 +429,12 @@ while (!stream.atEnd()) { line = stream.readLine(); -// std::cerr << "line is: \"" << line.toStdString() << "\"" << std::endl; +// std::cerr << "line is: \"" << line << "\"" << std::endl; QString id = PluginIdentifier::canonicalise (line.section("::", 0, 0)); QString cat = line.section("::", 1, 1); m_taxonomy[id] = cat; -// std::cerr << "FeatureExtractionPluginFactory: set id \"" << id.toStdString() << "\" to cat \"" << cat.toStdString() << "\"" << std::endl; +// std::cerr << "FeatureExtractionPluginFactory: set id \"" << id << "\" to cat \"" << cat << "\"" << std::endl; } } } diff -r 4efa7429cd85 -r c10cb8782576 plugin/FeatureExtractionPluginFactory.h --- a/plugin/FeatureExtractionPluginFactory.h Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/FeatureExtractionPluginFactory.h Sun Jul 01 11:53:00 2012 +0100 @@ -22,6 +22,8 @@ #include +#include "base/Debug.h" + class FeatureExtractionPluginFactory { public: diff -r 4efa7429cd85 -r c10cb8782576 plugin/LADSPAPluginFactory.cpp --- a/plugin/LADSPAPluginFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/LADSPAPluginFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -227,7 +227,7 @@ else logmax = log10f(maximum); } -// std::cerr << "LADSPAPluginFactory::getPortDefault: hint = " << d << std::endl; +// SVDEBUG << "LADSPAPluginFactory::getPortDefault: hint = " << d << endl; if (!LADSPA_IS_HINT_HAS_DEFAULT(d)) { @@ -349,8 +349,8 @@ m_instances.insert(instance); #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "LADSPAPluginFactory::instantiatePlugin(" - << identifier.toStdString() << ": now have " << m_instances.size() << " instances" << std::endl; + SVDEBUG << "LADSPAPluginFactory::instantiatePlugin(" + << identifier << ": now have " << m_instances.size() << " instances" << endl; #endif return instance; @@ -384,7 +384,7 @@ PluginIdentifier::parseIdentifier((*ii)->getPluginIdentifier(), itype, isoname, ilabel); if (isoname == soname) { #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "LADSPAPluginFactory::releasePlugin: dll " << soname.toStdString() << " is still in use for plugin " << ilabel.toStdString() << std::endl; + SVDEBUG << "LADSPAPluginFactory::releasePlugin: dll " << soname << " is still in use for plugin " << ilabel << endl; #endif stillInUse = true; break; @@ -394,15 +394,15 @@ if (!stillInUse) { if (soname != PluginIdentifier::BUILTIN_PLUGIN_SONAME) { #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "LADSPAPluginFactory::releasePlugin: dll " << soname.toStdString() << " no longer in use, unloading" << std::endl; + SVDEBUG << "LADSPAPluginFactory::releasePlugin: dll " << soname << " no longer in use, unloading" << endl; #endif unloadLibrary(soname); } } #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "LADSPAPluginFactory::releasePlugin(" - << identifier.toStdString() << ": now have " << m_instances.size() << " instances" << std::endl; + SVDEBUG << "LADSPAPluginFactory::releasePlugin(" + << identifier << ": now have " << m_instances.size() << " instances" << endl; #endif } @@ -415,7 +415,7 @@ if (m_libraryHandles.find(soname) == m_libraryHandles.end()) { loadLibrary(soname); if (m_libraryHandles.find(soname) == m_libraryHandles.end()) { - std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: loadLibrary failed for " << soname.toStdString() << std::endl; + std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: loadLibrary failed for " << soname << std::endl; return 0; } } @@ -426,7 +426,7 @@ DLSYM(libraryHandle, "ladspa_descriptor"); if (!fn) { - std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: No descriptor function in library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: No descriptor function in library " << soname << std::endl; return 0; } @@ -438,7 +438,7 @@ ++index; } - std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: No such plugin as " << label.toStdString() << " in library " << soname.toStdString() << std::endl; + std::cerr << "WARNING: LADSPAPluginFactory::getLADSPADescriptor: No such plugin as " << label << " in library " << soname << std::endl; return 0; } @@ -449,13 +449,13 @@ void *libraryHandle = DLOPEN(soName, RTLD_NOW); if (libraryHandle) { m_libraryHandles[soName] = libraryHandle; - std::cerr << "LADSPAPluginFactory::loadLibrary: Loaded library \"" << soName.toStdString() << "\"" << std::endl; + SVDEBUG << "LADSPAPluginFactory::loadLibrary: Loaded library \"" << soName << "\"" << endl; return; } if (QFileInfo(soName).exists()) { DLERROR(); - std::cerr << "LADSPAPluginFactory::loadLibrary: Library \"" << soName.toStdString() << "\" exists, but failed to load it" << std::endl; + std::cerr << "LADSPAPluginFactory::loadLibrary: Library \"" << soName << "\" exists, but failed to load it" << std::endl; return; } @@ -468,7 +468,7 @@ i != pathList.end(); ++i) { #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "Looking at: " << (*i).toStdString() << std::endl; + SVDEBUG << "Looking at: " << (*i) << endl; #endif QDir dir(*i, PLUGIN_GLOB, @@ -477,7 +477,7 @@ if (QFileInfo(dir.filePath(fileName)).exists()) { #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "Loading: " << fileName.toStdString() << std::endl; + std::cerr << "Loading: " << fileName << std::endl; #endif libraryHandle = DLOPEN(dir.filePath(fileName), RTLD_NOW); if (libraryHandle) { @@ -490,7 +490,7 @@ QString file = dir.filePath(dir[j]); if (QFileInfo(file).baseName() == base) { #ifdef DEBUG_LADSPA_PLUGIN_FACTORY - std::cerr << "Loading: " << file.toStdString() << std::endl; + std::cerr << "Loading: " << file << std::endl; #endif libraryHandle = DLOPEN(file, RTLD_NOW); if (libraryHandle) { @@ -501,7 +501,7 @@ } } - std::cerr << "LADSPAPluginFactory::loadLibrary: Failed to locate plugin library \"" << soName.toStdString() << "\"" << std::endl; + std::cerr << "LADSPAPluginFactory::loadLibrary: Failed to locate plugin library \"" << soName << "\"" << std::endl; } void @@ -509,7 +509,7 @@ { LibraryHandleMap::iterator li = m_libraryHandles.find(soName); if (li != m_libraryHandles.end()) { -// std::cerr << "unloading " << soname.toStdString() << std::endl; +// SVDEBUG << "unloading " << soname << endl; DLCLOSE(m_libraryHandles[soName]); m_libraryHandles.erase(li); } @@ -633,13 +633,13 @@ std::vector pathList = getPluginPath(); -// std::cerr << "LADSPAPluginFactory::discoverPlugins - " +// SVDEBUG << "LADSPAPluginFactory::discoverPlugins - " // << "discovering plugins; path is "; // for (std::vector::iterator i = pathList.begin(); // i != pathList.end(); ++i) { -// std::cerr << "[" << i->toStdString() << "] "; +// SVDEBUG << "[" << i-<< "] "; // } -// std::cerr << std::endl; +// SVDEBUG << endl; #ifdef HAVE_LRDF // read the description files @@ -684,7 +684,7 @@ if (!libraryHandle) { std::cerr << "WARNING: LADSPAPluginFactory::discoverPlugins: couldn't load plugin library " - << soname.toStdString() << " - " << DLERROR() << std::endl; + << soname << " - " << DLERROR() << std::endl; return; } @@ -692,7 +692,7 @@ DLSYM(libraryHandle, "ladspa_descriptor"); if (!fn) { - std::cerr << "WARNING: LADSPAPluginFactory::discoverPlugins: No descriptor function in " << soname.toStdString() << std::endl; + std::cerr << "WARNING: LADSPAPluginFactory::discoverPlugins: No descriptor function in " << soname << std::endl; return; } @@ -722,8 +722,8 @@ if (m_lrdfTaxonomy[descriptor->UniqueID] != "") { m_taxonomy[identifier] = m_lrdfTaxonomy[descriptor->UniqueID]; -// std::cerr << "set id \"" << identifier.toStdString() << "\" to cat \"" << m_taxonomy[identifier].toStdString() << "\" from LRDF" << std::endl; -// std::cout << identifier.toStdString() << "::" << m_taxonomy[identifier].toStdString() << std::endl; +// std::cerr << "set id \"" << identifier << "\" to cat \"" << m_taxonomy[identifier] << "\" from LRDF" << std::endl; +// std::cout << identifier << "::" << m_taxonomy[identifier] << std::endl; } QString category = m_taxonomy[identifier]; @@ -818,22 +818,22 @@ path.push_back(p); p.replace("/lib/", "/share/"); path.push_back(p); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: path element " << p.toStdString() << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: path element " << p << endl; } path.push_back(pluginPath[i]); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: path element " << pluginPath[i].toStdString() << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: path element " << pluginPath[i] << endl; } for (size_t i = 0; i < path.size(); ++i) { QDir dir(path[i], "*.cat"); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: directory " << path[i].toStdString() << " has " << dir.count() << " .cat files" << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: directory " << path[i] << " has " << dir.count() << " .cat files" << endl; for (unsigned int j = 0; j < dir.count(); ++j) { QFile file(path[i] + "/" + dir[j]); -// std::cerr << "LADSPAPluginFactory::generateFallbackCategories: about to open " << (path[i].toStdString() + "/" + dir[j].toStdString()) << std::endl; +// SVDEBUG << "LADSPAPluginFactory::generateFallbackCategories: about to open " << (path[i]+ "/" + dir[j]) << endl; if (file.open(QIODevice::ReadOnly)) { // std::cerr << "...opened" << std::endl; @@ -842,12 +842,12 @@ while (!stream.atEnd()) { line = stream.readLine(); -// std::cerr << "line is: \"" << line.toStdString() << "\"" << std::endl; +// std::cerr << "line is: \"" << line << "\"" << std::endl; QString id = PluginIdentifier::canonicalise (line.section("::", 0, 0)); QString cat = line.section("::", 1, 1); m_taxonomy[id] = cat; -// std::cerr << "set id \"" << id.toStdString() << "\" to cat \"" << cat.toStdString() << "\"" << std::endl; +// std::cerr << "set id \"" << id << "\" to cat \"" << cat << "\"" << std::endl; } } } diff -r 4efa7429cd85 -r c10cb8782576 plugin/LADSPAPluginInstance.cpp --- a/plugin/LADSPAPluginInstance.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/LADSPAPluginInstance.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -207,8 +207,8 @@ for (unsigned int i = 0; i < m_controlPortsIn.size(); ++i) { if (id == m_descriptor->PortNames[m_controlPortsIn[i].first]) { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::setParameter: Found id " - << id << " at control port " << i << std::endl; + SVDEBUG << "LADSPAPluginInstance::setParameter: Found id " + << id << " at control port " << i << endl; #endif setParameterValue(i, value); break; @@ -220,8 +220,8 @@ LADSPAPluginInstance::init(int idealChannelCount) { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::init(" << idealChannelCount << "): plugin has " - << m_descriptor->PortCount << " ports" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init(" << idealChannelCount << "): plugin has " + << m_descriptor->PortCount << " ports" << endl; #endif // Discover ports numbers and identities @@ -232,12 +232,12 @@ if (LADSPA_IS_PORT_INPUT(m_descriptor->PortDescriptors[i])) { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::init: port " << i << " is audio in" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init: port " << i << " is audio in" << endl; #endif m_audioPortsIn.push_back(i); } else { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::init: port " << i << " is audio out" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init: port " << i << " is audio out" << endl; #endif m_audioPortsOut.push_back(i); } @@ -247,7 +247,7 @@ if (LADSPA_IS_PORT_INPUT(m_descriptor->PortDescriptors[i])) { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::init: port " << i << " is control in" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init: port " << i << " is control in" << endl; #endif LADSPA_Data *data = new LADSPA_Data(0.0); m_controlPortsIn.push_back( @@ -256,7 +256,7 @@ } else { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::init: port " << i << " is control out" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init: port " << i << " is control out" << endl; #endif LADSPA_Data *data = new LADSPA_Data(0.0); m_controlPortsOut.push_back( @@ -273,8 +273,8 @@ } #ifdef DEBUG_LADSPA else - std::cerr << "LADSPAPluginInstance::init - " - << "unrecognised port type" << std::endl; + SVDEBUG << "LADSPAPluginInstance::init - " + << "unrecognised port type" << endl; #endif } @@ -341,7 +341,7 @@ LADSPAPluginInstance::~LADSPAPluginInstance() { #ifdef DEBUG_LADSPA - std::cerr << "LADSPAPluginInstance::~LADSPAPluginInstance" << std::endl; + SVDEBUG << "LADSPAPluginInstance::~LADSPAPluginInstance" << endl; #endif if (m_instanceHandles.size() != 0) { // "isOK()" @@ -379,11 +379,12 @@ void LADSPAPluginInstance::instantiate(unsigned long sampleRate) { + if (!m_descriptor) return; + #ifdef DEBUG_LADSPA std::cout << "LADSPAPluginInstance::instantiate - plugin unique id = " << m_descriptor->UniqueID << std::endl; #endif - if (!m_descriptor) return; if (!m_descriptor->instantiate) { std::cerr << "Bad plugin: plugin id " << m_descriptor->UniqueID diff -r 4efa7429cd85 -r c10cb8782576 plugin/PluginXml.cpp --- a/plugin/PluginXml.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/PluginXml.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -81,9 +81,9 @@ for (Vamp::PluginBase::ParameterList::const_iterator i = parameters.begin(); i != parameters.end(); ++i) { -// std::cerr << "PluginXml::toXml: parameter name \"" +// SVDEBUG << "PluginXml::toXml: parameter name \"" // << i->name.c_str() << "\" has value " -// << m_plugin->getParameter(i->name) << std::endl; +// << m_plugin->getParameter(i->name) << endl; stream << QString("param-%1=\"%2\" ") .arg(stripInvalidParameterNameCharacters(QString(i->identifier.c_str()))) @@ -119,7 +119,7 @@ if (ATTRIBUTE != "" && ATTRIBUTE != ACCESSOR().c_str()) { \ std::cerr << "WARNING: PluginXml::setParameters: Plugin " \ << #ATTRIBUTE << " does not match (attributes have \"" \ - << ATTRIBUTE.toStdString() << "\", my " \ + << ATTRIBUTE << "\", my " \ << #ATTRIBUTE << " is \"" << ACCESSOR() << "\")" << std::endl; \ } @@ -174,18 +174,18 @@ (QString(i->identifier.c_str()))); if (attrs.value(pname) == "") { -// std::cerr << "PluginXml::setParameters: no parameter \"" << i->name << "\" (attribute \"" << name.toStdString() << "\")" << std::endl; +// SVDEBUG << "PluginXml::setParameters: no parameter \"" << i->name << "\" (attribute \"" << name << "\")" << endl; continue; } bool ok; float value = attrs.value(pname).trimmed().toFloat(&ok); if (ok) { -// std::cerr << "PluginXml::setParameters: setting parameter \"" -// << i->identifier << "\" to value " << value << std::endl; +// SVDEBUG << "PluginXml::setParameters: setting parameter \"" +// << i->identifier << "\" to value " << value << endl; m_plugin->setParameter(i->identifier, value); } else { - std::cerr << "WARNING: PluginXml::setParameters: Invalid value \"" << attrs.value(pname).toStdString() << "\" for parameter \"" << i->identifier << "\" (attribute \"" << pname.toStdString() << "\")" << std::endl; + std::cerr << "WARNING: PluginXml::setParameters: Invalid value \"" << attrs.value(pname) << "\" for parameter \"" << i->identifier << "\" (attribute \"" << pname << "\")" << std::endl; } } } @@ -199,13 +199,13 @@ int errorLine; int errorColumn; -// std::cerr << "PluginXml::setParametersFromXml: XML is \"" -// << xml.toLocal8Bit().data() << "\"" << std::endl; +// SVDEBUG << "PluginXml::setParametersFromXml: XML is \"" +// << xml.toLocal8Bit().data() << "\"" << endl; if (!doc.setContent(xml, false, &error, &errorLine, &errorColumn)) { - std::cerr << "PluginXml::setParametersFromXml: Error in parsing XML: " << error.toStdString() << " at line " << errorLine << ", column " << errorColumn << std::endl; + std::cerr << "PluginXml::setParametersFromXml: Error in parsing XML: " << error << " at line " << errorLine << ", column " << errorColumn << std::endl; std::cerr << "Input follows:" << std::endl; - std::cerr << xml.toStdString() << std::endl; + std::cerr << xml << std::endl; std::cerr << "Input ends." << std::endl; return; } @@ -217,8 +217,7 @@ for (unsigned int i = 0; i < attrNodes.length(); ++i) { QDomAttr attr = attrNodes.item(i).toAttr(); if (attr.isNull()) continue; -// std::cerr << "PluginXml::setParametersFromXml: Adding attribute \"" << attr.name().toStdString() -// << "\" with value \"" << attr.value().toStdString() << "\"" << std::endl; +// SVDEBUG << "PluginXml::setParametersFromXml: Adding attribute \"" << attr.name()// << "\" with value \"" << attr.value() << "\"" << endl; attrs.append(attr.name(), "", "", attr.value()); } diff -r 4efa7429cd85 -r c10cb8782576 plugin/RealTimePluginFactory.cpp --- a/plugin/RealTimePluginFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/RealTimePluginFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -44,16 +44,14 @@ { if (pluginType == "ladspa") { if (!_ladspaInstance) { -// std::cerr << "RealTimePluginFactory::instance(" << pluginType.toStdString() -// << "): creating new LADSPAPluginFactory" << std::endl; +// SVDEBUG << "RealTimePluginFactory::instance(" << pluginType// << "): creating new LADSPAPluginFactory" << endl; _ladspaInstance = new LADSPAPluginFactory(); _ladspaInstance->discoverPlugins(); } return _ladspaInstance; } else if (pluginType == "dssi") { if (!_dssiInstance) { -// std::cerr << "RealTimePluginFactory::instance(" << pluginType.toStdString() -// << "): creating new DSSIPluginFactory" << std::endl; +// SVDEBUG << "RealTimePluginFactory::instance(" << pluginType// << "): creating new DSSIPluginFactory" << endl; _dssiInstance = new DSSIPluginFactory(); _dssiInstance->discoverPlugins(); } diff -r 4efa7429cd85 -r c10cb8782576 plugin/RealTimePluginFactory.h --- a/plugin/RealTimePluginFactory.h Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/RealTimePluginFactory.h Sun Jul 01 11:53:00 2012 +0100 @@ -25,6 +25,8 @@ #include #include +#include "base/Debug.h" + class RealTimePluginInstance; class RealTimePluginDescriptor diff -r 4efa7429cd85 -r c10cb8782576 plugin/RealTimePluginInstance.cpp --- a/plugin/RealTimePluginInstance.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/RealTimePluginInstance.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -28,10 +28,10 @@ RealTimePluginInstance::~RealTimePluginInstance() { -// std::cerr << "RealTimePluginInstance::~RealTimePluginInstance" << std::endl; +// SVDEBUG << "RealTimePluginInstance::~RealTimePluginInstance" << endl; if (m_factory) { -// std::cerr << "Asking factory to release " << m_identifier.toStdString() << std::endl; +// SVDEBUG << "Asking factory to release " << m_identifier << endl; m_factory->releasePlugin(this, m_identifier); } diff -r 4efa7429cd85 -r c10cb8782576 plugin/plugin.pro --- a/plugin/plugin.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = vamp vamp-hostsdk lrdf raptor -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions -QT += xml -QT -= gui - -TARGET = svplugin - -# Doesn't work with this library, which contains C99 as well as C++ -PRECOMPILED_HEADER = - -DEPENDPATH += . .. api plugins api/alsa api/alsa/sound -INCLUDEPATH += . .. api api/alsa plugins api/alsa/sound -OBJECTS_DIR = tmp_obj -MOC_DIR = tmp_moc - -# Input -HEADERS += DSSIPluginFactory.h \ - DSSIPluginInstance.h \ - FeatureExtractionPluginFactory.h \ - LADSPAPluginFactory.h \ - LADSPAPluginInstance.h \ - PluginIdentifier.h \ - PluginXml.h \ - RealTimePluginFactory.h \ - RealTimePluginInstance.h \ - api/dssi.h \ - api/ladspa.h \ - plugins/SamplePlayer.h \ - api/alsa/asoundef.h \ - api/alsa/asoundlib.h \ - api/alsa/seq.h \ - api/alsa/seq_event.h \ - api/alsa/seq_midi_event.h \ - api/alsa/sound/asequencer.h -SOURCES += DSSIPluginFactory.cpp \ - DSSIPluginInstance.cpp \ - FeatureExtractionPluginFactory.cpp \ - LADSPAPluginFactory.cpp \ - LADSPAPluginInstance.cpp \ - PluginIdentifier.cpp \ - PluginXml.cpp \ - RealTimePluginFactory.cpp \ - RealTimePluginInstance.cpp \ - api/dssi_alsa_compat.c \ - plugins/SamplePlayer.cpp - diff -r 4efa7429cd85 -r c10cb8782576 plugin/plugins/SamplePlayer.cpp --- a/plugin/plugins/SamplePlayer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/plugins/SamplePlayer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -20,7 +20,8 @@ #include "SamplePlayer.h" #include "system/System.h" -#include +#include "../api/dssi.h" + #include #include @@ -151,14 +152,14 @@ SamplePlayer::instantiate(const LADSPA_Descriptor *, unsigned long rate) { if (!hostDescriptor || !hostDescriptor->request_non_rt_thread) { - std::cerr << "SamplePlayer::instantiate: Host does not provide request_non_rt_thread, not instantiating" << std::endl; + SVDEBUG << "SamplePlayer::instantiate: Host does not provide request_non_rt_thread, not instantiating" << endl; return 0; } SamplePlayer *player = new SamplePlayer(rate); if (hostDescriptor->request_non_rt_thread(player, workThreadCallback)) { - std::cerr << "SamplePlayer::instantiate: Host rejected request_non_rt_thread call, not instantiating" << std::endl; + SVDEBUG << "SamplePlayer::instantiate: Host rejected request_non_rt_thread call, not instantiating" << endl; delete player; return 0; } @@ -320,7 +321,7 @@ if (player->m_pendingProgramChange >= 0) { #ifdef DEBUG_SAMPLE_PLAYER - std::cerr << "SamplePlayer::workThreadCallback: pending program change " << player->m_pendingProgramChange << std::endl; + SVDEBUG << "SamplePlayer::workThreadCallback: pending program change " << player->m_pendingProgramChange << endl; #endif player->m_mutex.lock(); @@ -363,8 +364,8 @@ m_samples.clear(); #ifdef DEBUG_SAMPLE_PLAYER - std::cerr << "SamplePlayer::searchSamples: Directory is \"" - << m_sampleDir.toLocal8Bit().data() << "\"" << std::endl; + SVDEBUG << "SamplePlayer::searchSamples: Directory is \"" + << m_sampleDir.toLocal8Bit().data() << "\"" << endl; #endif QDir dir(m_sampleDir, "*.wav"); diff -r 4efa7429cd85 -r c10cb8782576 plugin/plugins/SamplePlayer.h --- a/plugin/plugins/SamplePlayer.h Mon Nov 29 12:45:39 2010 +0000 +++ b/plugin/plugins/SamplePlayer.h Sun Jul 01 11:53:00 2012 +0100 @@ -18,8 +18,9 @@ #define DSSI_API_LEVEL 2 -#include -#include +#include "../api/ladspa.h" +#include "../api/dssi.h" + #include #include diff -r 4efa7429cd85 -r c10cb8782576 rdf/PluginRDFDescription.cpp --- a/rdf/PluginRDFDescription.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/PluginRDFDescription.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -16,16 +16,24 @@ #include "PluginRDFDescription.h" #include "PluginRDFIndexer.h" -#include "SimpleSPARQLQuery.h" #include "base/Profiler.h" #include "plugin/PluginIdentifier.h" +#include + #include using std::cerr; using std::endl; +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; + PluginRDFDescription::PluginRDFDescription(QString pluginId) : m_pluginId(pluginId), m_haveDescription(false) @@ -34,7 +42,7 @@ m_pluginUri = indexer->getURIForPluginId(pluginId); if (m_pluginUri == "") { cerr << "PluginRDFDescription: WARNING: No RDF description available for plugin ID \"" - << pluginId.toStdString() << "\"" << endl; + << pluginId << "\"" << endl; } else { // All the data we need should be in our RDF model already: // if it's not there, we don't know where to find it anyway @@ -172,93 +180,52 @@ { Profiler profiler("PluginRDFDescription::index"); - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + Uri plugin(m_pluginUri); - QString queryTemplate = - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " PREFIX dc: " - " SELECT ?%3 " - " WHERE { " - " <%1> %2 ?%3 . " - " }") - .arg(m_pluginUri); + Node n = index->complete + (Triple(plugin, index->expand("vamp:name"), Node())); - SimpleSPARQLQuery::Value v; - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("vamp:name").arg("name"), "name"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginName = v.value; + if (n.type == Node::Literal && n.value != "") { + m_pluginName = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("dc:description").arg("description"), "description"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginDescription = v.value; + n = index->complete + (Triple(plugin, index->expand("dc:description"), Node())); + + if (n.type == Node::Literal && n.value != "") { + m_pluginDescription = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?name " - " WHERE { " - " <%1> foaf:maker ?maker . " - " ?maker foaf:name ?name . " - " }") - .arg(m_pluginUri), - "name"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_pluginMaker = v.value; + n = index->complete + (Triple(plugin, index->expand("foaf:maker"), Node())); + + if (n.type == Node::URI || n.type == Node::Blank) { + n = index->complete(Triple(n, index->expand("foaf:name"), Node())); + if (n.type == Node::Literal && n.value != "") { + m_pluginMaker = n.value; + } } // If we have a more-information URL for this plugin, then we take - // that. Otherwise, a more-information URL for the plugin - // library would do nicely. Failing that, we could perhaps use - // any foaf:page URL at all that appears in the file -- but - // perhaps that would be unwise + // that. Otherwise, a more-information URL for the plugin library + // would do nicely. - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?page " - " WHERE { " - " <%1> foaf:page ?page . " - " }") - .arg(m_pluginUri), - "page"); + n = index->complete + (Triple(plugin, index->expand("foaf:page"), Node())); - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { + if (n.type == Node::URI && n.value != "") { + m_pluginInfoURL = n.value; + } - m_pluginInfoURL = v.value; + n = index->complete + (Triple(Node(), index->expand("vamp:available_plugin"), plugin)); - } else { - - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString( - " PREFIX vamp: " - " PREFIX foaf: " - " SELECT ?page " - " WHERE { " - " ?library vamp:available_plugin <%1> ; " - " a vamp:PluginLibrary ; " - " foaf:page ?page . " - " }") - .arg(m_pluginUri), - "page"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - - m_pluginInfoURL = v.value; + if (n.value != "") { + n = index->complete(Triple(n, index->expand("foaf:page"), Node())); + if (n.type == Node::URI && n.value != "") { + m_pluginInfoURL = n.value; } } @@ -270,57 +237,43 @@ { Profiler profiler("PluginRDFDescription::indexOutputs"); - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + Uri plugin(m_pluginUri); - SimpleSPARQLQuery query - (m, - QString - ( - " PREFIX vamp: " + Nodes outputs = index->match + (Triple(plugin, index->expand("vamp:output"), Node())).objects(); - " SELECT ?output ?output_id ?output_type ?unit " - - " WHERE { " - - " <%1> vamp:output ?output . " - - " ?output vamp:identifier ?output_id ; " - " a ?output_type . " - - " OPTIONAL { " - " ?output vamp:unit ?unit " - " } . " - - " } " - ) - .arg(m_pluginUri)); - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - cerr << "ERROR: PluginRDFDescription::index: ERROR: Failed to query outputs for <" - << m_pluginUri.toStdString() << ">: " - << query.getErrorString().toStdString() << endl; + if (outputs.empty()) { + cerr << "ERROR: PluginRDFDescription::indexURL: NOTE: No outputs defined for <" + << m_pluginUri << ">" << endl; return false; } - if (results.empty()) { - cerr << "ERROR: PluginRDFDescription::indexURL: NOTE: No outputs defined for <" - << m_pluginUri.toStdString() << ">" << endl; - return false; - } + foreach (Node output, outputs) { - // Note that an output may appear more than once, if it inherits - // more than one type (e.g. DenseOutput and QuantizedOutput). So - // these results must accumulate + if ((output.type != Node::URI && output.type != Node::Blank) || + output.value == "") { + cerr << "ERROR: PluginRDFDescription::indexURL: No valid URI for output " << output << " of plugin <" << m_pluginUri << ">" << endl; + return false; + } + + Node n = index->complete(Triple(output, index->expand("vamp:identifier"), Node())); + if (n.type != Node::Literal || n.value == "") { + cerr << "ERROR: PluginRDFDescription::indexURL: No vamp:identifier for output <" << output << ">" << endl; + return false; + } + QString outputId = n.value; - for (int i = 0; i < results.size(); ++i) { + m_outputUriMap[outputId] = output.value; - QString outputUri = results[i]["output"].value; - QString outputId = results[i]["output_id"].value; - QString outputType = results[i]["output_type"].value; + n = index->complete(Triple(output, Uri("a"), Node())); + QString outputType; + if (n.type == Node::URI) outputType = n.value; - m_outputUriMap[outputId] = outputUri; + n = index->complete(Triple(output, index->expand("vamp:unit"), Node())); + QString outputUnit; + if (n.type == Node::Literal) outputUnit = n.value; if (outputType.contains("DenseOutput")) { m_outputDispositions[outputId] = OutputDense; @@ -331,55 +284,32 @@ } else { m_outputDispositions[outputId] = OutputDispositionUnknown; } +// cerr << "output " << output << " -> id " << outputId << ", type " << outputType << ", unit " +// << outputUnit << ", disposition " << m_outputDispositions[outputId] << endl; - if (results[i]["unit"].type == SimpleSPARQLQuery::LiteralValue) { - - QString unit = results[i]["unit"].value; - - if (unit != "") { - m_outputUnitMap[outputId] = unit; - } + if (outputUnit != "") { + m_outputUnitMap[outputId] = outputUnit; } - SimpleSPARQLQuery::Value v; - - v = SimpleSPARQLQuery::singleResultQuery - (m, - QString(" PREFIX vamp: " - " PREFIX dc: " - " SELECT ?title " - " WHERE { <%2> dc:title ?title } ") - .arg(outputUri), "title"); - - if (v.type == SimpleSPARQLQuery::LiteralValue && v.value != "") { - m_outputNames[outputId] = v.value; + n = index->complete(Triple(output, index->expand("dc:title"), Node())); + if (n.type == Node::Literal && n.value != "") { + m_outputNames[outputId] = n.value; } - QString queryTemplate = - QString(" PREFIX vamp: " - " SELECT ?%3 " - " WHERE { <%2> vamp:computes_%3 ?%3 } ") - .arg(outputUri); - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("event_type"), "event_type"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputEventTypeURIMap[outputId] = v.value; + n = index->complete(Triple(output, index->expand("vamp:computes_event_type"), Node())); +// cerr << output << " -> computes_event_type " << n << endl; + if (n.type == Node::URI && n.value != "") { + m_outputEventTypeURIMap[outputId] = n.value; } - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("feature"), "feature"); + n = index->complete(Triple(output, index->expand("vamp:computes_feature"), Node())); + if (n.type == Node::URI && n.value != "") { + m_outputFeatureAttributeURIMap[outputId] = n.value; + } - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputFeatureAttributeURIMap[outputId] = v.value; - } - - v = SimpleSPARQLQuery::singleResultQuery - (m, queryTemplate.arg("signal_type"), "signal_type"); - - if (v.type == SimpleSPARQLQuery::URIValue && v.value != "") { - m_outputSignalTypeURIMap[outputId] = v.value; + n = index->complete(Triple(output, index->expand("vamp:computes_signal_type"), Node())); + if (n.type == Node::URI && n.value != "") { + m_outputSignalTypeURIMap[outputId] = n.value; } } diff -r 4efa7429cd85 -r c10cb8782576 rdf/PluginRDFDescription.h --- a/rdf/PluginRDFDescription.h Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/PluginRDFDescription.h Sun Jul 01 11:53:00 2012 +0100 @@ -20,6 +20,8 @@ #include #include +#include "base/Debug.h" + class PluginRDFDescription { public: diff -r 4efa7429cd85 -r c10cb8782576 rdf/PluginRDFIndexer.cpp --- a/rdf/PluginRDFIndexer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/PluginRDFIndexer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -15,8 +15,6 @@ #include "PluginRDFIndexer.h" -#include "SimpleSPARQLQuery.h" - #include "data/fileio/CachedFile.h" #include "data/fileio/FileSource.h" #include "data/fileio/PlaylistFileReader.h" @@ -26,6 +24,9 @@ #include +#include +#include + #include #include #include @@ -40,12 +41,18 @@ using std::string; using Vamp::PluginHostAdapter; +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::RDFException; +using Dataquay::RDFDuplicateImportException; + PluginRDFIndexer * PluginRDFIndexer::m_instance = 0; -bool -PluginRDFIndexer::m_prefixesLoaded = false; - PluginRDFIndexer * PluginRDFIndexer::getInstance() { @@ -53,11 +60,21 @@ return m_instance; } -PluginRDFIndexer::PluginRDFIndexer() +PluginRDFIndexer::PluginRDFIndexer() : + m_index(new Dataquay::BasicStore) { + m_index->addPrefix("vamp", Uri("http://purl.org/ontology/vamp/")); + m_index->addPrefix("foaf", Uri("http://xmlns.com/foaf/0.1/")); + m_index->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); indexInstalledURLs(); } +const BasicStore * +PluginRDFIndexer::getIndex() +{ + return m_index; +} + PluginRDFIndexer::~PluginRDFIndexer() { QMutexLocker locker(&m_mutex); @@ -68,19 +85,23 @@ { vector paths = PluginHostAdapter::getPluginPath(); +// std::cerr << "\nPluginRDFIndexer::indexInstalledURLs: pid is " << getpid() << std::endl; + QStringList filters; + filters << "*.ttl"; + filters << "*.TTL"; filters << "*.n3"; filters << "*.N3"; filters << "*.rdf"; filters << "*.RDF"; - // Search each Vamp plugin path for a .rdf file that either has + // Search each Vamp plugin path for an RDF file that either has // name "soname", "soname:label" or "soname/label" plus RDF - // extension. Use that order of preference, and prefer n3 over - // rdf extension. + // extension. Use that order of preference, and prefer ttl over + // n3 over rdf extension. for (vector::const_iterator i = paths.begin(); i != paths.end(); ++i) { - + QDir dir(i->c_str()); if (!dir.exists()) continue; @@ -89,6 +110,7 @@ for (QStringList::const_iterator j = entries.begin(); j != entries.end(); ++j) { + QFileInfo fi(dir.filePath(*j)); pullFile(fi.absoluteFilePath()); } @@ -98,6 +120,7 @@ for (QStringList::const_iterator j = subdirs.begin(); j != subdirs.end(); ++j) { + QDir subdir(dir.filePath(*j)); if (subdir.exists()) { entries = subdir.entryList @@ -117,7 +140,7 @@ bool PluginRDFIndexer::indexConfiguredURLs() { - std::cerr << "PluginRDFIndexer::indexConfiguredURLs" << std::endl; + SVDEBUG << "PluginRDFIndexer::indexConfiguredURLs" << endl; QSettings settings; settings.beginGroup("RDF"); @@ -129,8 +152,8 @@ QString index = indices[i]; - std::cerr << "PluginRDFIndexer::indexConfiguredURLs: index url is " - << index.toStdString() << std::endl; + SVDEBUG << "PluginRDFIndexer::indexConfiguredURLs: index url is " + << index << endl; CachedFile cf(index); if (!cf.isOK()) continue; @@ -143,8 +166,8 @@ PlaylistFileReader::Playlist list = reader.load(); for (PlaylistFileReader::Playlist::const_iterator j = list.begin(); j != list.end(); ++j) { - std::cerr << "PluginRDFIndexer::indexConfiguredURLs: url is " - << j->toStdString() << std::endl; + SVDEBUG << "PluginRDFIndexer::indexConfiguredURLs: url is " + << j->toStdString() << endl; pullURL(*j); } } @@ -186,7 +209,7 @@ // Because we may want to refer to this document again, we // cache it locally if it turns out to exist. - cerr << "PluginRDFIndexer::getIdForPluginURI: NOTE: Failed to find a local RDF document describing plugin <" << uri.toStdString() << ">: attempting to retrieve one remotely by guesswork" << endl; + cerr << "PluginRDFIndexer::getIdForPluginURI: NOTE: Failed to find a local RDF document describing plugin <" << uri << ">: attempting to retrieve one remotely by guesswork" << endl; QString baseUrl = QUrl(uri).toString(QUrl::RemoveFragment); @@ -239,13 +262,11 @@ { Profiler profiler("PluginRDFIndexer::indexURL"); - loadPrefixes(); - // std::cerr << "PluginRDFIndexer::indexURL(" << urlString.toStdString() << ")" << std::endl; QMutexLocker locker(&m_mutex); - QString localString = urlString; + QUrl local = urlString; if (FileSource::isRemote(urlString) && FileSource::canHandleScheme(urlString)) { @@ -255,91 +276,82 @@ return false; } - localString = QUrl::fromLocalFile(cf.getLocalFilename()).toString(); + local = QUrl::fromLocalFile(cf.getLocalFilename()); + + } else if (urlString.startsWith("file:")) { + + local = QUrl(urlString); + + } else { + + local = QUrl::fromLocalFile(urlString); } - return SimpleSPARQLQuery::addSourceToModel(localString); + try { + m_index->import(local, BasicStore::ImportFailOnDuplicates); + } catch (RDFDuplicateImportException &e) { + cerr << e.what() << endl; + cerr << "PluginRDFIndexer::pullURL: Document at " << urlString + << " duplicates triples found in earlier loaded document -- skipping it" << endl; + return false; + } catch (RDFException &e) { + cerr << e.what() << endl; + cerr << "PluginRDFIndexer::pullURL: Failed to import document from " + << urlString << ": " << e.what() << endl; + return false; + } + return true; } bool PluginRDFIndexer::reindex() { - SimpleSPARQLQuery::QueryType m = SimpleSPARQLQuery::QueryFromModel; - - SimpleSPARQLQuery query - (m, - QString - ( - " PREFIX vamp: " - - " SELECT ?plugin ?library ?plugin_id " - - " WHERE { " - " ?plugin a vamp:Plugin . " - " ?plugin vamp:identifier ?plugin_id . " - - " OPTIONAL { " - " ?library vamp:available_plugin ?plugin " - " } " - " } " - )); - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - cerr << "ERROR: PluginRDFIndexer::reindex: ERROR: Failed to query plugins from model: " - << query.getErrorString().toStdString() << endl; - return false; - } - - if (results.empty()) { - cerr << "PluginRDFIndexer::reindex: NOTE: no vamp:Plugin resources found in indexed documents" << endl; - return false; - } + Triples tt = m_index->match + (Triple(Node(), Uri("a"), m_index->expand("vamp:Plugin"))); + Nodes plugins = tt.subjects(); bool foundSomething = false; bool addedSomething = false; - for (SimpleSPARQLQuery::ResultList::iterator i = results.begin(); - i != results.end(); ++i) { - - QString pluginUri = (*i)["plugin"].value; - QString soUri = (*i)["library"].value; - QString identifier = (*i)["plugin_id"].value; - - if (identifier == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No vamp:identifier for plugin <" - << pluginUri.toStdString() << ">" - << endl; + foreach (Node plugin, plugins) { + + if (plugin.type != Node::URI) { + cerr << "PluginRDFIndexer::reindex: Plugin has no URI: node is " + << plugin << endl; continue; } - if (soUri == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No implementation library for plugin <" - << pluginUri.toStdString() << ">" - << endl; + + Node idn = m_index->complete + (Triple(plugin, m_index->expand("vamp:identifier"), Node())); + + if (idn.type != Node::Literal) { + cerr << "PluginRDFIndexer::reindex: Plugin " << plugin + << " lacks vamp:identifier literal" << endl; continue; } - QString sonameQuery = - QString( - " PREFIX vamp: " - " SELECT ?library_id " - " WHERE { " - " <%1> vamp:identifier ?library_id " - " } " - ) - .arg(soUri); + Node libn = m_index->complete + (Triple(Node(), m_index->expand("vamp:available_plugin"), plugin)); - SimpleSPARQLQuery::Value sonameValue = - SimpleSPARQLQuery::singleResultQuery(m, sonameQuery, "library_id"); - QString soname = sonameValue.value; - if (soname == "") { - cerr << "PluginRDFIndexer::reindex: NOTE: No identifier for library <" - << soUri.toStdString() << ">" - << endl; + if (libn.type != Node::URI) { + cerr << "PluginRDFIndexer::reindex: Plugin " << plugin + << " is not vamp:available_plugin in any library" << endl; continue; } + Node son = m_index->complete + (Triple(libn, m_index->expand("vamp:identifier"), Node())); + + if (son.type != Node::Literal) { + cerr << "PluginRDFIndexer::reindex: Library " << libn + << " lacks vamp:identifier for soname" << endl; + continue; + } + + QString pluginUri = plugin.value; + QString identifier = idn.value; + QString soname = son.value; + QString pluginId = PluginIdentifier::createIdentifier ("vamp", soname, identifier); @@ -356,9 +368,9 @@ if (pluginUri != "") { if (m_uriToIdMap.find(pluginUri) != m_uriToIdMap.end()) { cerr << "PluginRDFIndexer::reindex: WARNING: Found multiple plugins with the same URI:" << endl; - cerr << " 1. Plugin id \"" << m_uriToIdMap[pluginUri].toStdString() << "\"" << endl; - cerr << " 2. Plugin id \"" << pluginId.toStdString() << "\"" << endl; - cerr << "both claim URI <" << pluginUri.toStdString() << ">" << endl; + cerr << " 1. Plugin id \"" << m_uriToIdMap[pluginUri] << "\"" << endl; + cerr << " 2. Plugin id \"" << pluginId << "\"" << endl; + cerr << "both claim URI <" << pluginUri << ">" << endl; } else { m_uriToIdMap[pluginUri] = pluginId; } @@ -371,23 +383,3 @@ return addedSomething; } - -void -PluginRDFIndexer::loadPrefixes() -{ - return; -//!!! - if (m_prefixesLoaded) return; - const char *prefixes[] = { - "http://purl.org/ontology/vamp/" - }; - for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { - CachedFile cf(prefixes[i], 0, "application/rdf+xml"); - if (!cf.isOK()) continue; - SimpleSPARQLQuery::addSourceToModel - (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); - } - m_prefixesLoaded = true; -} - - diff -r 4efa7429cd85 -r c10cb8782576 rdf/PluginRDFIndexer.h --- a/rdf/PluginRDFIndexer.h Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/PluginRDFIndexer.h Sun Jul 01 11:53:00 2012 +0100 @@ -22,6 +22,10 @@ #include #include +namespace Dataquay { + class BasicStore; +} + class PluginRDFIndexer { public: @@ -48,6 +52,8 @@ QStringList getIndexedPluginIds(); + const Dataquay::BasicStore *getIndex(); + ~PluginRDFIndexer(); protected: @@ -64,8 +70,8 @@ bool pullURL(QString urlString); bool reindex(); - static void loadPrefixes(); - static bool m_prefixesLoaded; + Dataquay::BasicStore *m_index; + static PluginRDFIndexer *m_instance; }; diff -r 4efa7429cd85 -r c10cb8782576 rdf/RDFExporter.cpp --- a/rdf/RDFExporter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/RDFExporter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -172,6 +172,6 @@ QString RDFExporter::getSupportedExtensions() { - return "*.n3 *.ttl"; + return "*.ttl *.n3"; } diff -r 4efa7429cd85 -r c10cb8782576 rdf/RDFFeatureWriter.cpp --- a/rdf/RDFFeatureWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/RDFFeatureWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -112,7 +112,7 @@ RDFFeatureWriter::setTrackMetadata(QString trackId, TrackMetadata metadata) { -// cerr << "setTrackMetadata: title = " << metadata.title.toStdString() << ", maker = " << metadata.maker.toStdString() << endl; +// cerr << "setTrackMetadata: title = " << metadata.title << ", maker = " << metadata.maker << endl; m_metadata[trackId] = metadata; } @@ -142,10 +142,10 @@ if (m_rdfDescriptions[pluginId].haveDescription()) { cerr << "NOTE: Have RDF description for plugin ID \"" - << pluginId.toStdString() << "\"" << endl; + << pluginId << "\"" << endl; } else { cerr << "NOTE: No RDF description for plugin ID \"" - << pluginId.toStdString() << "\"" << endl; + << pluginId << "\"" << endl; if (!m_network) { cerr << " Consider using the --rdf-network option to retrieve plugin descriptions" << endl; cerr << " from the network where possible." << endl; @@ -174,7 +174,8 @@ m_startedStreamTransforms[stream].end()) { m_startedStreamTransforms[stream].insert(transform); writeLocalFeatureTypes - (stream, transform, output, m_rdfDescriptions[pluginId]); + (stream, transform, output, m_rdfDescriptions[pluginId], + summaryType); } if (m_singleFileName != "" || m_stdout) { @@ -262,7 +263,7 @@ // dirty grubby low-rent way of doing that. This function is // called by FileFeatureWriter::getOutputFile when in append mode. -// std::cerr << "reviewFileForAppending(" << filename.toStdString() << ")" << std::endl; +// std::cerr << "reviewFileForAppending(" << filename << ")" << std::endl; QFile file(filename); @@ -292,7 +293,7 @@ RDFFeatureWriter::writeSignalDescription(QTextStream *sptr, QString trackId) { -// std::cerr << "RDFFeatureWriter::writeSignalDescription" << std::endl; +// SVDEBUG << "RDFFeatureWriter::writeSignalDescription" << endl; QTextStream &stream = *sptr; @@ -348,7 +349,7 @@ (m_metadata.find(trackId) != m_metadata.end())); // cerr << "wantTrack = " << wantTrack << " (userSpecifiedTrack = " -// << userSpecifiedTrack << ", m_userMakerUri = " << m_userMakerUri.toStdString() << ", have metadata = " << (m_metadata.find(trackId) != m_metadata.end()) << ")" << endl; +// << userSpecifiedTrack << ", m_userMakerUri = " << m_userMakerUri << ", have metadata = " << (m_metadata.find(trackId) != m_metadata.end()) << ")" << endl; if (wantTrack) { // We only write a Track at all if we have some title/artist @@ -390,14 +391,15 @@ << " tl:onTimeLine " << timelineURI << "\n ] .\n\n"; - stream << timelineURI << " a tl:Timeline .\n"; + stream << timelineURI << " a tl:Timeline .\n\n"; } void RDFFeatureWriter::writeLocalFeatureTypes(QTextStream *sptr, const Transform &transform, const Plugin::OutputDescriptor &od, - PluginRDFDescription &desc) + PluginRDFDescription &desc, + std::string summaryType) { QString outputId = od.identifier.c_str(); QTextStream &stream = *sptr; @@ -412,7 +414,8 @@ //!!! bin names, extents and so on can be written out using e.g. vamp:bin_names ( "a" "b" "c" ) - if (desc.getOutputDisposition(outputId) == + if (summaryType == "" && + desc.getOutputDisposition(outputId) == PluginRDFDescription::OutputDense) { // no feature events, so may need signal type but won't need @@ -520,7 +523,7 @@ PluginRDFDescription &desc, QString timelineURI) { -// std::cerr << "RDFFeatureWriter::writeSparseRDF: have " << featureList.size() << " features" << std::endl; +// SVDEBUG << "RDFFeatureWriter::writeSparseRDF: have " << featureList.size() << " features" << endl; if (featureList.empty()) return; QTextStream &stream = *sptr; @@ -621,7 +624,7 @@ QString featureUri = desc.getOutputFeatureAttributeURI(outputId); if (featureUri == "") { - cerr << "RDFFeatureWriter::writeTrackLevelRDF: ERROR: No feature URI available -- this function should not have been called!" << endl; + SVDEBUG << "RDFFeatureWriter::writeTrackLevelRDF: ERROR: No feature URI available -- this function should not have been called!" << endl; return; } @@ -765,14 +768,14 @@ void RDFFeatureWriter::finish() { -// cerr << "RDFFeatureWriter::finish()" << endl; +// SVDEBUG << "RDFFeatureWriter::finish()" << endl; // close any open dense feature literals for (map::iterator i = m_openDenseFeatures.begin(); i != m_openDenseFeatures.end(); ++i) { -// cerr << "closing a stream" << endl; +// SVDEBUG << "closing a stream" << endl; StreamBuffer &b = i->second; *(b.first) << b.second << "\" ." << endl; } diff -r 4efa7429cd85 -r c10cb8782576 rdf/RDFFeatureWriter.h --- a/rdf/RDFFeatureWriter.h Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/RDFFeatureWriter.h Sun Jul 01 11:53:00 2012 +0100 @@ -77,7 +77,8 @@ void writeLocalFeatureTypes(QTextStream *, const Transform &, const Vamp::Plugin::OutputDescriptor &, - PluginRDFDescription &); + PluginRDFDescription &, + std::string summaryType); void writeSparseRDF(QTextStream *stream, const Transform &transform, diff -r 4efa7429cd85 -r c10cb8782576 rdf/RDFImporter.cpp --- a/rdf/RDFImporter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/RDFImporter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -21,8 +21,6 @@ #include #include -#include "SimpleSPARQLQuery.h" - #include "base/ProgressReporter.h" #include "base/RealTime.h" @@ -38,6 +36,17 @@ #include "data/fileio/CachedFile.h" #include "data/fileio/FileFinder.h" +#include +#include + +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::PropertyObject; + using std::cerr; using std::endl; @@ -55,6 +64,9 @@ std::vector getDataModels(ProgressReporter *); protected: + BasicStore *m_store; + Uri expand(QString s) { return m_store->expand(s); } + QString m_uristring; QString m_errorString; std::map m_audioModelMap; @@ -62,9 +74,6 @@ std::map > m_labelValueMap; - static bool m_prefixesLoaded; - static void loadPrefixes(ProgressReporter *reporter); - void getDataModelsAudio(std::vector &, ProgressReporter *); void getDataModelsSparse(std::vector &, ProgressReporter *); void getDataModelsDense(std::vector &, ProgressReporter *); @@ -78,8 +87,6 @@ void fillModel(Model *, long, long, bool, std::vector &, QString); }; -bool RDFImporterImpl::m_prefixesLoaded = false; - QString RDFImporter::getKnownExtensions() { @@ -121,14 +128,35 @@ } RDFImporterImpl::RDFImporterImpl(QString uri, int sampleRate) : + m_store(new BasicStore), m_uristring(uri), m_sampleRate(sampleRate) { + //!!! retrieve data if remote... then + + m_store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + m_store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + m_store->addPrefix("dc", Uri("http://purl.org/dc/elements/1.1/")); + m_store->addPrefix("tl", Uri("http://purl.org/NET/c4dm/timeline.owl#")); + m_store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + m_store->addPrefix("rdfs", Uri("http://www.w3.org/2000/01/rdf-schema#")); + + try { + QUrl url; + if (uri.startsWith("file:")) { + url = QUrl(uri); + } else { + url = QUrl::fromLocalFile(uri); + } + m_store->import(url, BasicStore::ImportIgnoreDuplicates); + } catch (std::exception &e) { + m_errorString = e.what(); + } } RDFImporterImpl::~RDFImporterImpl() { - SimpleSPARQLQuery::closeSingleSource(m_uristring); + delete m_store; } bool @@ -146,15 +174,13 @@ std::vector RDFImporterImpl::getDataModels(ProgressReporter *reporter) { - loadPrefixes(reporter); - std::vector models; getDataModelsAudio(models, reporter); if (m_sampleRate == 0) { m_errorString = QString("Invalid audio data model (is audio file format supported?)"); - std::cerr << m_errorString.toStdString() << std::endl; + std::cerr << m_errorString << std::endl; return models; } @@ -185,48 +211,31 @@ RDFImporterImpl::getDataModelsAudio(std::vector &models, ProgressReporter *reporter) { - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?source a mo:AudioFile . " - " ?signal a mo:Signal . " - " ?source mo:encodes ?signal } " - ) - .arg(m_uristring)); + Nodes sigs = m_store->match + (Triple(Node(), Uri("a"), expand("mo:Signal"))).subjects(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sig, sigs) { + + Node file = m_store->complete(Triple(Node(), expand("mo:encodes"), sig)); + if (file == Node()) { + file = m_store->complete(Triple(sig, expand("mo:available_as"), Node())); + } + if (file == Node()) { + std::cerr << "RDFImporterImpl::getDataModelsAudio: ERROR: No source for signal " << sig << std::endl; + continue; + } - if (results.empty()) { + QString signal = sig.value; + QString source = file.value; - SimpleSPARQLQuery query2 - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " SELECT ?signal ?source FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?source } " - ) - .arg(m_uristring)); - - results = query.execute(); - } - - for (int i = 0; i < results.size(); ++i) { - - QString signal = results[i]["signal"].value; - QString source = results[i]["source"].value; - - std::cerr << "NOTE: Seeking signal source \"" << source.toStdString() - << "\"..." << std::endl; + SVDEBUG << "NOTE: Seeking signal source \"" << source + << "\"..." << endl; FileSource *fs = new FileSource(source, reporter); if (fs->isAvailable()) { - std::cerr << "NOTE: Source is available: Local filename is \"" - << fs->getLocalFilename().toStdString() - << "\"..." << std::endl; + SVDEBUG << "NOTE: Source is available: Local filename is \"" + << fs->getLocalFilename() + << "\"..." << endl; } #ifdef NO_SV_GUI @@ -237,8 +246,8 @@ } #else if (!fs->isAvailable()) { - std::cerr << "NOTE: Signal source \"" << source.toStdString() - << "\" is not available, using file finder..." << std::endl; + SVDEBUG << "NOTE: Signal source \"" << source + << "\" is not available, using file finder..." << endl; FileFinder *ff = FileFinder::getInstance(); if (ff) { QString path = ff->find(FileFinder::AudioFile, @@ -265,7 +274,7 @@ fs->waitForData(); WaveFileModel *newModel = new WaveFileModel(*fs, m_sampleRate); if (newModel->isOK()) { - std::cerr << "Successfully created wave file model from source at \"" << source.toStdString() << "\"" << std::endl; + std::cerr << "Successfully created wave file model from source at \"" << source << "\"" << std::endl; models.push_back(newModel); m_audioModelMap[signal] = newModel; if (m_sampleRate == 0) { @@ -287,44 +296,21 @@ reporter->setMessage(RDFImporter::tr("Importing dense signal data from RDF...")); } - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?feature ?feature_signal_type ?value " - " FROM <%1> " - - " WHERE { " - - " ?signal af:signal_feature ?feature . " - - " ?feature a ?feature_signal_type ; " - " af:value ?value . " - - " } " - ) - .arg(m_uristring)); + Nodes sigFeatures = m_store->match + (Triple(Node(), expand("af:signal_feature"), Node())).objects(); - SimpleSPARQLQuery::ResultList results = query.execute(); + foreach (Node sf, sigFeatures) { - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } + if (sf.type != Node::URI && sf.type != Node::Blank) continue; + + Node t = m_store->complete(Triple(sf, expand("a"), Node())); + Node v = m_store->complete(Triple(sf, expand("af:value"), Node())); - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - for (int i = 0; i < results.size(); ++i) { - - QString feature = results[i]["feature"].value; - QString type = results[i]["feature_signal_type"].value; - QString value = results[i]["value"].value; + QString feature = sf.value; + QString type = t.value; + QString value = v.value; + + if (type == "" || value == "") continue; int sampleRate = 0; int windowLength = 0; @@ -410,41 +396,25 @@ QString featureUri, QString featureTypeUri) { - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring); - - SimpleSPARQLQuery::Value v; + Node n = m_store->complete + (Triple(Uri(featureUri), expand("dc:title"), Node())); - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureUri), - "title"); - - if (v.value != "") { - std::cerr << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << v.value.toStdString() << "\"" << std::endl; - m->setObjectName(v.value); + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } - v = SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - titleQuery.arg(featureTypeUri), - "title"); - - if (v.value != "") { - std::cerr << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << v.value.toStdString() << "\"" << std::endl; - m->setObjectName(v.value); + n = m_store->complete + (Triple(Uri(featureTypeUri), expand("dc:title"), Node())); + + if (n.type == Node::Literal && n.value != "") { + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: Title (from signal type) \"" << n.value << "\"" << endl; + m->setObjectName(n.value); return; } - std::cerr << "RDFImporterImpl::getDenseModelTitle: No title available for feature <" << featureUri.toStdString() << ">" << std::endl; + SVDEBUG << "RDFImporterImpl::getDenseModelTitle: No title available for feature <" << featureUri << ">" << endl; } void @@ -452,91 +422,61 @@ int &sampleRate, int &windowLength, int &hopSize, int &width, int &height) { - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + Node dim = m_store->complete + (Triple(Uri(featureUri), expand("af:dimensions"), Node())); - QString dimensionsQuery - ( - " PREFIX mo: " - " PREFIX af: " - - " SELECT ?dimensions " - " FROM <%1> " + cerr << "Dimensions = \"" << dim.value << "\"" << endl; - " WHERE { " - - " <%2> af:dimensions ?dimensions . " - - " } " - ); - - SimpleSPARQLQuery::Value dimensionsValue = - SimpleSPARQLQuery::singleResultQuery - (s, dimensionsQuery.arg(m_uristring).arg(featureUri), "dimensions"); - - cerr << "Dimensions = \"" << dimensionsValue.value.toStdString() << "\"" - << endl; - - if (dimensionsValue.value != "") { - QStringList dl = dimensionsValue.value.split(" "); - if (dl.empty()) dl.push_back(dimensionsValue.value); + if (dim.type == Node::Literal && dim.value != "") { + QStringList dl = dim.value.split(" "); + if (dl.empty()) dl.push_back(dim.value); if (dl.size() > 0) height = dl[0].toInt(); if (dl.size() > 1) width = dl[1].toInt(); } + + // Looking for rate, hop, window from: + // + // ?feature mo:time ?time . + // ?time a tl:Interval . + // ?time tl:onTimeLine ?timeline . + // ?map tl:rangeTimeLine ?timeline . + // ?map tl:sampleRate ?rate . + // ?map tl:hopSize ?hop . + // ?map tl:windowLength ?window . - QString queryTemplate - ( - " PREFIX mo: " - " PREFIX af: " - " PREFIX tl: " + Node interval = m_store->complete(Triple(Uri(featureUri), expand("mo:time"), Node())); - " SELECT ?%3 " - " FROM <%1> " - - " WHERE { " - - " <%2> mo:time ?time . " - - " ?time a tl:Interval ; " - " tl:onTimeLine ?timeline . " - - " ?map tl:rangeTimeLine ?timeline . " - - " ?map tl:%3 ?%3 . " - - " } " - ); - - // Another laborious workaround for rasqal's failure to handle - // multiple optionals properly - - SimpleSPARQLQuery::Value srValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("sampleRate"), - "sampleRate"); - if (srValue.value != "") { - sampleRate = srValue.value.toInt(); + if (!m_store->contains(Triple(interval, expand("a"), expand("tl:Interval")))) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Feature time node " + << interval << " is not a tl:Interval" << endl; + return; } - SimpleSPARQLQuery::Value hopValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("hopSize"), - "hopSize"); - if (srValue.value != "") { - hopSize = hopValue.value.toInt(); + Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); + + if (tl == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: Interval node " + << interval << " lacks tl:onTimeLine property" << endl; + return; } - SimpleSPARQLQuery::Value winValue = - SimpleSPARQLQuery::singleResultQuery(s, - queryTemplate - .arg(m_uristring).arg(featureUri) - .arg("windowLength"), - "windowLength"); - if (winValue.value != "") { - windowLength = winValue.value.toInt(); + Node map = m_store->complete(Triple(Node(), expand("tl:rangeTimeLine"), tl)); + + if (map == Node()) { + cerr << "RDFImporterImpl::getDenseFeatureProperties: No map for " + << "timeline node " << tl << endl; + } + + PropertyObject po(m_store, "tl:", map); + + if (po.hasProperty("sampleRate")) { + sampleRate = po.getProperty("sampleRate").toInt(); + } + if (po.hasProperty("hopSize")) { + hopSize = po.getProperty("hopSize").toInt(); + } + if (po.hasProperty("windowLength")) { + windowLength = po.getProperty("windowLength").toInt(); } cerr << "sr = " << sampleRate << ", hop = " << hopSize << ", win = " << windowLength << endl; @@ -550,328 +490,210 @@ reporter->setMessage(RDFImporter::tr("Importing event data from RDF...")); } - SimpleSPARQLQuery::QueryType s = SimpleSPARQLQuery::QueryFromSingleSource; + /* + This function is only used for sparse data (for dense data we + would be in getDataModelsDense instead). - // Our query is intended to retrieve every thing that has a time, - // and every feature type and value associated with a thing that - // has a time. + Our query is intended to retrieve every thing that has a time, + and every feature type and value associated with a thing that + has a time. - // We will then need to refine this big bag of results into a set - // of data models. + We will then need to refine this big bag of results into a set + of data models. - // Results that have different source signals should go into - // different models. + Results that have different source signals should go into + different models. - // Results that have different feature types should go into - // different models. + Results that have different feature types should go into + different models. + */ - // Results that are sparse should go into different models from - // those that are dense (we need to examine the timestamps to - // establish this -- if the timestamps are regular, the results - // are dense -- so we can't do it as we go along, only after - // collecting all results). - - // Timed things that have features associated with them should not - // appear directly in any model -- their features should appear - // instead -- and these should be different models from those used - // for timed things that do not have features. - - // As we load the results, we'll push them into a partially - // structured container that maps from source signal (URI as - // string) -> feature type (likewise) -> time -> list of values. - // If the source signal or feature type is unavailable, the empty - // string will do. - - QString prefixes = QString( - " PREFIX event: " - " PREFIX tl: " - " PREFIX mo: " - " PREFIX af: " - " PREFIX rdfs: " - ); - - QString queryString = prefixes + QString( - - " SELECT ?signal ?timed_thing ?timeline ?event_type ?value" - " FROM <%1>" - - " WHERE {" - - " ?signal a mo:Signal ." - - " ?signal mo:time ?interval ." - " ?interval tl:onTimeLine ?timeline ." - " ?time tl:onTimeLine ?timeline ." - " ?timed_thing event:time ?time ." - " ?timed_thing a ?event_type ." - - " OPTIONAL {" - " ?timed_thing af:feature ?value" - " }" - " }" - - ).arg(m_uristring); - - //!!! NB we're using rather old terminology for these things, apparently: - // beginsAt -> start - // onTimeLine -> timeline - - QString timeQueryString = prefixes + QString( - - " SELECT ?time FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:at ?time . " - " } " - - ).arg(m_uristring); - - QString rangeQueryString = prefixes + QString( - - " SELECT ?time ?duration FROM <%1> " - " WHERE { " - " <%2> event:time ?t . " - " ?t tl:beginsAt ?time . " - " ?t tl:duration ?duration . " - " } " - - ).arg(m_uristring); - - QString labelQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> rdfs:label ?label . " - " } " - - ).arg(m_uristring); - - QString textQueryString = prefixes + QString( - - " SELECT ?label FROM <%1> " - " WHERE { " - " <%2> af:text ?label . " - " } " - - ).arg(m_uristring); - - SimpleSPARQLQuery query(s, queryString); - query.setProgressReporter(reporter); - -// cerr << "Query will be: " << queryString.toStdString() << endl; - - SimpleSPARQLQuery::ResultList results = query.execute(); - - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return; - } - - if (query.wasCancelled()) { - m_errorString = "Query cancelled"; - return; - } - - /* - This function is now only used for sparse data (for dense data - we would be in getDataModelsDense instead). - - For sparse data, the determining factors in deciding what model - to use are: Do the features have values? and Do the features - have duration? - - We can run through the results and check off whether we find - values and duration for each of the source+type keys, and then - run through the source+type keys pushing each of the results - into a suitable model. - - Unfortunately, at this point we do not yet have any actual - timing data (time/duration) -- just the time URI. - - What we _could_ do is to create one of each type of model at the - start, for each of the source+type keys, and then push each - feature into the relevant model depending on what we find out - about it. Then return only non-empty models. - */ + Nodes sigs = m_store->match + (Triple(Node(), expand("a"), expand("mo:Signal"))).subjects(); // Map from timeline uri to event type to dimensionality to // presence of duration to model ptr. Whee! std::map > > > modelMap; - for (int i = 0; i < results.size(); ++i) { + foreach (Node sig, sigs) { + + Node interval = m_store->complete(Triple(sig, expand("mo:time"), Node())); + if (interval == Node()) continue; - if (i % 4 == 0) { - if (reporter) reporter->setProgress(i/4); - } + Node tl = m_store->complete(Triple(interval, expand("tl:onTimeLine"), Node())); + if (tl == Node()) continue; - QString source = results[i]["signal"].value; - QString timeline = results[i]["timeline"].value; - QString type = results[i]["event_type"].value; - QString thinguri = results[i]["timed_thing"].value; + Nodes times = m_store->match(Triple(Node(), expand("tl:onTimeLine"), tl)).subjects(); - RealTime time; - RealTime duration; + foreach (Node tn, times) { + + Nodes timedThings = m_store->match(Triple(Node(), expand("event:time"), tn)).subjects(); - bool haveTime = false; - bool haveDuration = false; + foreach (Node thing, timedThings) { + + Node typ = m_store->complete(Triple(thing, expand("a"), Node())); + if (typ == Node()) continue; - QString label = ""; - bool text = (type.contains("Text") || type.contains("text")); // Ha, ha + Node valu = m_store->complete(Triple(thing, expand("af:feature"), Node())); - if (text) { - label = SimpleSPARQLQuery::singleResultQuery - (s, textQueryString.arg(thinguri), "label").value; - } + QString source = sig.value; + QString timeline = tl.value; + QString type = typ.value; + QString thinguri = thing.value; - if (label == "") { - label = SimpleSPARQLQuery::singleResultQuery - (s, labelQueryString.arg(thinguri), "label").value; - } + /* + For sparse data, the determining factors in deciding + what model to use are: Do the features have values? + and Do the features have duration? - SimpleSPARQLQuery rangeQuery(s, rangeQueryString.arg(thinguri)); - SimpleSPARQLQuery::ResultList rangeResults = rangeQuery.execute(); - if (!rangeResults.empty()) { -// std::cerr << rangeResults.size() << " range results" << std::endl; - time = RealTime::fromXsdDuration - (rangeResults[0]["time"].value.toStdString()); - duration = RealTime::fromXsdDuration - (rangeResults[0]["duration"].value.toStdString()); -// std::cerr << "duration string " << rangeResults[0]["duration"].value.toStdString() << std::endl; - haveTime = true; - haveDuration = true; - } else { - QString timestring = SimpleSPARQLQuery::singleResultQuery - (s, timeQueryString.arg(thinguri), "time").value; -// std::cerr << "timestring = " << timestring.toStdString() << std::endl; - if (timestring != "") { - time = RealTime::fromXsdDuration(timestring.toStdString()); - haveTime = true; - } - } + We can run through the results and check off whether + we find values and duration for each of the + source+type keys, and then run through the + source+type keys pushing each of the results into a + suitable model. - QString valuestring = results[i]["value"].value; - std::vector values; + Unfortunately, at this point we do not yet have any + actual timing data (time/duration) -- just the time + URI. - if (valuestring != "") { - QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); - for (int j = 0; j < vsl.size(); ++j) { - bool success = false; - float v = vsl[j].toFloat(&success); - if (success) values.push_back(v); - } - } + What we _could_ do is to create one of each type of + model at the start, for each of the source+type + keys, and then push each feature into the relevant + model depending on what we find out about it. Then + return only non-empty models. + */ - int dimensions = 1; - if (values.size() == 1) dimensions = 2; - else if (values.size() > 1) dimensions = 3; + QString label = ""; + bool text = (type.contains("Text") || type.contains("text")); // Ha, ha + bool note = (type.contains("Note") || type.contains("note")); // Guffaw - Model *model = 0; + if (text) { + label = m_store->complete(Triple(thing, expand("af:text"), Node())).value; + } + + if (label == "") { + label = m_store->complete(Triple(thing, expand("rdfs:label"), Node())).value; + } - if (modelMap[timeline][type][dimensions].find(haveDuration) == - modelMap[timeline][type][dimensions].end()) { + RealTime time; + RealTime duration; + + bool haveTime = false; + bool haveDuration = false; + + Node at = m_store->complete(Triple(tn, expand("tl:at"), Node())); + + if (at != Node()) { + time = RealTime::fromXsdDuration(at.value.toStdString()); + haveTime = true; + } else { + //!!! NB we're using rather old terminology for these things, apparently: + // beginsAt -> start + // onTimeLine -> timeline + + Node start = m_store->complete(Triple(tn, expand("tl:beginsAt"), Node())); + Node dur = m_store->complete(Triple(tn, expand("tl:duration"), Node())); + if (start != Node() && dur != Node()) { + time = RealTime::fromXsdDuration + (start.value.toStdString()); + duration = RealTime::fromXsdDuration + (dur.value.toStdString()); + haveTime = haveDuration = true; + } + } + + QString valuestring = valu.value; + std::vector values; + + if (valuestring != "") { + QStringList vsl = valuestring.split(" ", QString::SkipEmptyParts); + for (int j = 0; j < vsl.size(); ++j) { + bool success = false; + float v = vsl[j].toFloat(&success); + if (success) values.push_back(v); + } + } + + int dimensions = 1; + if (values.size() == 1) dimensions = 2; + else if (values.size() > 1) dimensions = 3; + + Model *model = 0; + + if (modelMap[timeline][type][dimensions].find(haveDuration) == + modelMap[timeline][type][dimensions].end()) { /* - std::cerr << "Creating new model: source = " << source.toStdString() - << ", type = " << type.toStdString() << ", dimensions = " + SVDEBUG << "Creating new model: source = " << source << ", type = " << type << ", dimensions = " << dimensions << ", haveDuration = " << haveDuration << ", time = " << time << ", duration = " << duration - << std::endl; + << endl; */ - if (!haveDuration) { + if (!haveDuration) { - if (dimensions == 1) { + if (dimensions == 1) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + } + } else if (dimensions == 2) { + if (text) { + model = new TextModel(m_sampleRate, 1, false); + } else { + model = new SparseTimeValueModel(m_sampleRate, 1, false); + } + } else { + // We don't have a three-dimensional sparse model, + // so use a note model. We do have some logic (in + // extractStructure below) for guessing whether + // this should after all have been a dense model, + // but it's hard to apply it because we don't have + // all the necessary timing data yet... hmm + model = new NoteModel(m_sampleRate, 1, false); + } - if (text) { - - model = new TextModel(m_sampleRate, 1, false); + } else { // haveDuration - } else { - - model = new SparseOneDimensionalModel(m_sampleRate, 1, false); + if (note || (dimensions > 2)) { + model = new NoteModel(m_sampleRate, 1, false); + } else { + // If our units are frequency or midi pitch, we + // should be using a note model... hm + model = new RegionModel(m_sampleRate, 1, false); + } } - } else if (dimensions == 2) { + model->setRDFTypeURI(type); - if (text) { - - model = new TextModel(m_sampleRate, 1, false); - - } else { - - model = new SparseTimeValueModel(m_sampleRate, 1, false); + if (m_audioModelMap.find(source) != m_audioModelMap.end()) { + std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; + model->setSourceModel(m_audioModelMap[source]); } - } else { + QString title = m_store->complete + (Triple(typ, expand("dc:title"), Node())).value; + if (title == "") { + // take it from the end of the event type + title = type; + title.replace(QRegExp("^.*[/#]"), ""); + } + model->setObjectName(title); - // We don't have a three-dimensional sparse model, - // so use a note model. We do have some logic (in - // extractStructure below) for guessing whether - // this should after all have been a dense model, - // but it's hard to apply it because we don't have - // all the necessary timing data yet... hmm - - model = new NoteModel(m_sampleRate, 1, false); + modelMap[timeline][type][dimensions][haveDuration] = model; + models.push_back(model); } - } else { // haveDuration + model = modelMap[timeline][type][dimensions][haveDuration]; - if (dimensions == 1 || dimensions == 2) { - - // If our units are frequency or midi pitch, we - // should be using a note model... hm - - model = new RegionModel(m_sampleRate, 1, false); - - } else { - - // We don't have a three-dimensional sparse model, - // so use a note model. We do have some logic (in - // extractStructure below) for guessing whether - // this should after all have been a dense model, - // but it's hard to apply it because we don't have - // all the necessary timing data yet... hmm - - model = new NoteModel(m_sampleRate, 1, false); + if (model) { + long ftime = RealTime::realTime2Frame(time, m_sampleRate); + long fduration = RealTime::realTime2Frame(duration, m_sampleRate); + fillModel(model, ftime, fduration, haveDuration, values, label); } } - - model->setRDFTypeURI(type); - - if (m_audioModelMap.find(source) != m_audioModelMap.end()) { - std::cerr << "source model for " << model << " is " << m_audioModelMap[source] << std::endl; - model->setSourceModel(m_audioModelMap[source]); - } - - QString titleQuery = QString - ( - " PREFIX dc: " - " SELECT ?title " - " FROM <%1> " - " WHERE { " - " <%2> dc:title ?title . " - " } " - ).arg(m_uristring).arg(type); - QString title = SimpleSPARQLQuery::singleResultQuery - (s, titleQuery, "title").value; - if (title == "") { - // take it from the end of the event type - title = type; - title.replace(QRegExp("^.*[/#]"), ""); - } - model->setObjectName(title); - - modelMap[timeline][type][dimensions][haveDuration] = model; - models.push_back(model); - } - - model = modelMap[timeline][type][dimensions][haveDuration]; - - if (model) { - long ftime = RealTime::realTime2Frame(time, m_sampleRate); - long fduration = RealTime::realTime2Frame(duration, m_sampleRate); - fillModel(model, ftime, fduration, haveDuration, values, label); } } } @@ -884,7 +706,7 @@ std::vector &values, QString label) { -// std::cerr << "RDFImporterImpl::fillModel: adding point at frame " << ftime << std::endl; +// SVDEBUG << "RDFImporterImpl::fillModel: adding point at frame " << ftime << endl; SparseOneDimensionalModel *sodm = dynamic_cast(model); @@ -985,33 +807,34 @@ { bool haveAudio = false; bool haveAnnotations = false; + bool haveRDF = false; - // This query is not expected to return any values, but if it - // executes successfully (leaving no error in the error string) - // then we know we have RDF - SimpleSPARQLQuery q(SimpleSPARQLQuery::QueryFromSingleSource, - QString(" SELECT ?x FROM <%1> WHERE { ?x } ") - .arg(url)); - - SimpleSPARQLQuery::ResultList r = q.execute(); - if (!q.isOK()) { - SimpleSPARQLQuery::closeSingleSource(url); + BasicStore *store = 0; + + // This is not expected to return anything useful, but if it does + // anything at all then we know we have RDF + try { + //!!! non-local document? + store = BasicStore::load(QUrl(url)); + Triple t = store->matchOnce(Triple()); + if (t != Triple()) haveRDF = true; + } catch (std::exception &e) { + // nothing; haveRDF will be false so the next bit catches it + } + + if (!haveRDF) { + delete store; return NotRDF; } + store->addPrefix("mo", Uri("http://purl.org/ontology/mo/")); + store->addPrefix("event", Uri("http://purl.org/NET/c4dm/event.owl#")); + store->addPrefix("af", Uri("http://purl.org/ontology/af/")); + // "MO-conformant" structure for audio files - SimpleSPARQLQuery::Value value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?url a mo:AudioFile } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + Node n = store->complete(Triple(Node(), Uri("a"), store->expand("mo:AudioFile"))); + if (n != Node() && n.type == Node::URI) { haveAudio = true; @@ -1021,59 +844,37 @@ // (which is not properly in conformance with the Music // Ontology) - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX mo: " - " SELECT ?url FROM <%1> " - " WHERE { ?signal a mo:Signal ; mo:available_as ?url } " - ).arg(url), - "url"); - - if (value.type == SimpleSPARQLQuery::URIValue) { - haveAudio = true; + Nodes sigs = store->match(Triple(Node(), Uri("a"), store->expand("mo:Signal"))).subjects(); + foreach (Node sig, sigs) { + Node aa = store->complete(Triple(sig, store->expand("mo:available_as"), Node())); + if (aa != Node()) { + haveAudio = true; + break; + } } } - std::cerr << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " - << haveAudio << std::endl; + SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAudio = " + << haveAudio << endl; - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX event: " - " SELECT ?thing FROM <%1> " - " WHERE { ?thing event:time ?time } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + // can't call complete() with two Nothing nodes + n = store->matchOnce(Triple(Node(), store->expand("event:time"), Node())).c; + if (n != Node()) { haveAnnotations = true; } if (!haveAnnotations) { - - value = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromSingleSource, - QString - (" PREFIX af: " - " SELECT ?thing FROM <%1> " - " WHERE { ?signal af:signal_feature ?thing } " - ).arg(url), - "thing"); - - if (value.type == SimpleSPARQLQuery::URIValue) { + // can't call complete() with two Nothing nodes + n = store->matchOnce(Triple(Node(), store->expand("af:signal_feature"), Node())).c; + if (n != Node()) { haveAnnotations = true; } } - std::cerr << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " - << haveAnnotations << std::endl; + SVDEBUG << "NOTE: RDFImporter::identifyDocumentType: haveAnnotations = " + << haveAnnotations << endl; - SimpleSPARQLQuery::closeSingleSource(url); + delete store; if (haveAudio) { if (haveAnnotations) { @@ -1092,25 +893,3 @@ return OtherRDFDocument; } -void -RDFImporterImpl::loadPrefixes(ProgressReporter *reporter) -{ - return; -//!!! - if (m_prefixesLoaded) return; - const char *prefixes[] = { - "http://purl.org/NET/c4dm/event.owl", - "http://purl.org/NET/c4dm/timeline.owl", - "http://purl.org/ontology/mo/", - "http://purl.org/ontology/af/", - "http://www.w3.org/2000/01/rdf-schema", - "http://purl.org/dc/elements/1.1/", - }; - for (size_t i = 0; i < sizeof(prefixes)/sizeof(prefixes[0]); ++i) { - CachedFile cf(prefixes[i], reporter, "application/rdf+xml"); - if (!cf.isOK()) continue; - SimpleSPARQLQuery::addSourceToModel - (QUrl::fromLocalFile(cf.getLocalFilename()).toString()); - } - m_prefixesLoaded = true; -} diff -r 4efa7429cd85 -r c10cb8782576 rdf/RDFTransformFactory.cpp --- a/rdf/RDFTransformFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/RDFTransformFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -4,7 +4,7 @@ Sonic Visualiser An audio file viewer and annotation editor. Centre for Digital Music, Queen Mary, University of London. - This file copyright 2008 QMUL. + This file copyright 2008-2012 QMUL. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -24,7 +24,6 @@ #include #include -#include "SimpleSPARQLQuery.h" #include "PluginRDFIndexer.h" #include "PluginRDFDescription.h" #include "base/ProgressReporter.h" @@ -32,10 +31,19 @@ #include "transform/TransformFactory.h" +#include +#include + using std::cerr; using std::endl; -typedef const unsigned char *STR; // redland's expected string type +using Dataquay::Uri; +using Dataquay::Node; +using Dataquay::Nodes; +using Dataquay::Triple; +using Dataquay::Triples; +using Dataquay::BasicStore; +using Dataquay::PropertyObject; class RDFTransformFactoryImpl @@ -53,6 +61,7 @@ static QString writeTransformToRDF(const Transform &, QString); protected: + BasicStore *m_store; QString m_urlString; QString m_errorString; bool m_isRDF; @@ -108,14 +117,27 @@ } RDFTransformFactoryImpl::RDFTransformFactoryImpl(QString url) : + m_store(new BasicStore), m_urlString(url), m_isRDF(false) { + //!!! retrieve data if remote... then + m_store->addPrefix("vamp", Uri("http://purl.org/ontology/vamp/")); + try { + QUrl qurl; + if (url.startsWith("file:")) { + qurl = QUrl(url); + } else { + qurl = QUrl::fromLocalFile(url); + } + m_store->import(qurl, BasicStore::ImportIgnoreDuplicates); + m_isRDF = true; + } catch (...) { } } RDFTransformFactoryImpl::~RDFTransformFactoryImpl() { - SimpleSPARQLQuery::closeSingleSource(m_urlString); + delete m_store; } bool @@ -143,60 +165,31 @@ std::map uriTransformMap; - QString query = - " PREFIX vamp: " - - " SELECT ?transform ?plugin " - - " FROM <%2> " - - " WHERE { " - " ?transform a vamp:Transform ; " - " vamp:plugin ?plugin . " - " } "; - - SimpleSPARQLQuery transformsQuery - (SimpleSPARQLQuery::QueryFromSingleSource, query.arg(m_urlString)); - - SimpleSPARQLQuery::ResultList transformResults = transformsQuery.execute(); - - if (!transformsQuery.isOK()) { - m_errorString = transformsQuery.getErrorString(); - return transforms; - } - - m_isRDF = true; - - if (transformResults.empty()) { - cerr << "RDFTransformFactory: NOTE: No RDF/TTL transform descriptions found in document at <" << m_urlString.toStdString() << ">" << endl; - return transforms; - } - - // There are various queries we need to make that might include - // data from either the transform RDF or the model accumulated - // from plugin descriptions. For example, the transform RDF may - // specify the output's true URI, or it might have a blank node or - // some other URI with the appropriate vamp:identifier included in - // the file. To cover both cases, we need to add the file itself - // into the model and always query the model using the transform - // URI rather than querying the file itself subsequently. - - SimpleSPARQLQuery::addSourceToModel(m_urlString); + Nodes tnodes = m_store->match + (Triple(Node(), Uri("a"), m_store->expand("vamp:Transform"))).subjects(); PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); - for (int i = 0; i < transformResults.size(); ++i) { + foreach (Node tnode, tnodes) { - SimpleSPARQLQuery::KeyValueMap &result = transformResults[i]; + Node pnode = m_store->complete + (Triple(tnode, m_store->expand("vamp:plugin"), Node())); - QString transformUri = result["transform"].value; - QString pluginUri = result["plugin"].value; + if (pnode == Node()) { + cerr << "RDFTransformFactory: WARNING: No vamp:plugin for " + << "vamp:Transform node " << tnode + << ", skipping this transform" << endl; + continue; + } + + QString transformUri = tnode.value; + QString pluginUri = pnode.value; QString pluginId = indexer->getIdForPluginURI(pluginUri); if (pluginId == "") { cerr << "RDFTransformFactory: WARNING: Unknown plugin <" - << pluginUri.toStdString() << "> for transform <" - << transformUri.toStdString() << ">, skipping this transform" + << pluginUri << "> for transform <" + << transformUri << ">, skipping this transform" << endl; continue; } @@ -214,12 +207,7 @@ uriTransformMap[transformUri] = transform; - // We have to do this a very long way round, to work around - // rasqal's current inability to handle correctly more than one - // OPTIONAL graph in a query - static const char *optionals[] = { - "output", "program", "summary_type", "step_size", @@ -234,63 +222,40 @@ QString optional = optionals[j]; - QString queryTemplate = - " PREFIX vamp: " - - " SELECT ?%1 " - - " WHERE { " - " <%2> vamp:%1 ?%1 " - " } "; - - SimpleSPARQLQuery query - (SimpleSPARQLQuery::QueryFromModel, - queryTemplate.arg(optional).arg(transformUri)); - - SimpleSPARQLQuery::ResultList results = query.execute(); + Node onode = m_store->complete + (Triple(Uri(transformUri), + m_store->expand(QString("vamp:") + optional), Node())); - if (!query.isOK()) { - m_errorString = query.getErrorString(); - return transforms; - } + if (onode.type != Node::Literal) continue; - if (results.empty()) continue; - - for (int k = 0; k < results.size(); ++k) { - - const SimpleSPARQLQuery::Value &v = results[k][optional]; - - if (v.type == SimpleSPARQLQuery::LiteralValue) { - - if (optional == "program") { - transform.setProgram(v.value); - } else if (optional == "summary_type") { - transform.setSummaryType - (transform.stringToSummaryType(v.value)); - } else if (optional == "step_size") { - transform.setStepSize(v.value.toUInt()); - } else if (optional == "block_size") { - transform.setBlockSize(v.value.toUInt()); - } else if (optional == "window_type") { - cerr << "NOTE: can't handle window type yet (value is \"" - << v.value.toStdString() << "\")" << endl; - } else if (optional == "sample_rate") { - transform.setSampleRate(v.value.toFloat()); - } else if (optional == "start") { - transform.setStartTime - (RealTime::fromXsdDuration(v.value.toStdString())); - } else if (optional == "duration") { - transform.setDuration - (RealTime::fromXsdDuration(v.value.toStdString())); - } else { - cerr << "RDFTransformFactory: ERROR: Inconsistent optionals lists (unexpected optional \"" << optional.toStdString() << "\"" << endl; - } - } + if (optional == "program") { + transform.setProgram(onode.value); + } else if (optional == "summary_type") { + transform.setSummaryType + (transform.stringToSummaryType(onode.value)); + } else if (optional == "step_size") { + transform.setStepSize(onode.value.toUInt()); + } else if (optional == "block_size") { + transform.setBlockSize(onode.value.toUInt()); + } else if (optional == "window_type") { + transform.setWindowType + (Window::getTypeForName + (onode.value.toLower().toStdString())); + } else if (optional == "sample_rate") { + transform.setSampleRate(onode.value.toFloat()); + } else if (optional == "start") { + transform.setStartTime + (RealTime::fromXsdDuration(onode.value.toStdString())); + } else if (optional == "duration") { + transform.setDuration + (RealTime::fromXsdDuration(onode.value.toStdString())); + } else { + cerr << "RDFTransformFactory: ERROR: Inconsistent optionals lists (unexpected optional \"" << optional << "\"" << endl; } } cerr << "RDFTransformFactory: NOTE: Transform is: " << endl; - cerr << transform.toXmlString().toStdString() << endl; + cerr << transform.toXmlString() << endl; transforms.push_back(transform); } @@ -302,33 +267,36 @@ RDFTransformFactoryImpl::setOutput(Transform &transform, QString transformUri) { - SimpleSPARQLQuery::Value outputValue = - SimpleSPARQLQuery::singleResultQuery - (SimpleSPARQLQuery::QueryFromModel, - QString - ( - " PREFIX vamp: " - - " SELECT ?output_id " + Node outputNode = m_store->complete + (Triple(Uri(transformUri), m_store->expand("vamp:output"), Node())); + + if (outputNode == Node()) return true; - " WHERE { " - " <%1> vamp:output ?output . " - " ?output vamp:identifier ?output_id " - " } " - ) - .arg(transformUri), - "output_id"); - - if (outputValue.type == SimpleSPARQLQuery::NoValue) { - return true; + if (outputNode.type != Node::URI && outputNode.type != Node::Blank) { + m_errorString = QString("vamp:output for output of transform <%1> is not a URI or blank node").arg(transformUri); + return false; } - - if (outputValue.type != SimpleSPARQLQuery::LiteralValue) { + + // Now, outputNode might be the subject of a triple within m_store + // that tells us the vamp:identifier, or it might be the subject + // of a triple within the indexer that tells us it + + Node identNode = m_store->complete + (Triple(outputNode, m_store->expand("vamp:identifier"), Node())); + + if (identNode == Node()) { + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + identNode = index->complete + (Triple(outputNode, index->expand("vamp:identifier"), Node())); + } + + if (identNode == Node() || identNode.type != Node::Literal) { m_errorString = QString("No vamp:identifier found for output of transform <%1>, or vamp:identifier is not a literal").arg(transformUri); return false; } - transform.setOutput(outputValue.value); + transform.setOutput(identNode.value); return true; } @@ -338,43 +306,48 @@ RDFTransformFactoryImpl::setParameters(Transform &transform, QString transformUri) { - SimpleSPARQLQuery paramQuery - (SimpleSPARQLQuery::QueryFromModel, - QString - ( - " PREFIX vamp: " - - " SELECT ?param_id ?param_value " - - " WHERE { " - " <%1> vamp:parameter_binding ?binding . " - " ?binding vamp:parameter ?param ; " - " vamp:value ?param_value . " - " ?param vamp:identifier ?param_id " - " } " - ) - .arg(transformUri)); + Nodes bindings = m_store->match + (Triple(Uri(transformUri), m_store->expand("vamp:parameter_binding"), Node())).objects(); - SimpleSPARQLQuery::ResultList paramResults = paramQuery.execute(); - - if (!paramQuery.isOK()) { - m_errorString = paramQuery.getErrorString(); - return false; - } - - if (paramQuery.wasCancelled()) { - m_errorString = "Query cancelled"; - return false; - } - - for (int j = 0; j < paramResults.size(); ++j) { + foreach (Node binding, bindings) { + + Node paramNode = m_store->complete + (Triple(binding, m_store->expand("vamp:parameter"), Node())); + + if (paramNode == Node()) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:parameter for binding " << binding << endl; + continue; + } + + Node valueNode = m_store->complete + (Triple(binding, m_store->expand("vamp:value"), Node())); + + if (paramNode == Node()) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:value for binding " << binding << endl; + continue; + } - QString paramId = paramResults[j]["param_id"].value; - QString paramValue = paramResults[j]["param_value"].value; + // As with output above, paramNode might be the subject of a + // triple within m_store that tells us the vamp:identifier, or + // it might be the subject of a triple within the indexer that + // tells us it + + Node idNode = m_store->complete + (Triple(paramNode, m_store->expand("vamp:identifier"), Node())); + + if (idNode == Node()) { + PluginRDFIndexer *indexer = PluginRDFIndexer::getInstance(); + const BasicStore *index = indexer->getIndex(); + idNode = index->complete + (Triple(paramNode, index->expand("vamp:identifier"), Node())); + } + + if (idNode == Node() || idNode.type != Node::Literal) { + cerr << "RDFTransformFactoryImpl::setParameters: No vamp:identifier for parameter " << paramNode << endl; + continue; + } - if (paramId == "" || paramValue == "") continue; - - transform.setParameter(paramId, paramValue.toFloat()); + transform.setParameter(idNode.value, valueNode.value.toFloat()); } return true; @@ -398,7 +371,7 @@ s << uri << " a vamp:Transform ;" << endl; s << " vamp:plugin <" << QUrl(pluginUri).toEncoded().data() << "> ;" << endl; } else { - std::cerr << "WARNING: RDFTransformFactory::writeTransformToRDF: No plugin URI available for plugin id \"" << pluginId.toStdString() << "\", writing synthetic plugin and library resources" << std::endl; + std::cerr << "WARNING: RDFTransformFactory::writeTransformToRDF: No plugin URI available for plugin id \"" << pluginId << "\", writing synthetic plugin and library resources" << std::endl; QString type, soname, label; PluginIdentifier::parseIdentifier(pluginId, type, soname, label); s << uri << "_plugin a vamp:Plugin ;" << endl; @@ -415,7 +388,7 @@ QString outputUri = description.getOutputUri(outputId); if (transform.getOutput() != "" && outputUri == "") { - std::cerr << "WARNING: RDFTransformFactory::writeTransformToRDF: No output URI available for transform output id \"" << transform.getOutput().toStdString() << "\", writing a synthetic output resource" << std::endl; + std::cerr << "WARNING: RDFTransformFactory::writeTransformToRDF: No output URI available for transform output id \"" << transform.getOutput() << "\", writing a synthetic output resource" << std::endl; } if (transform.getStepSize() != 0) { diff -r 4efa7429cd85 -r c10cb8782576 rdf/SimpleSPARQLQuery.cpp --- a/rdf/SimpleSPARQLQuery.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/SimpleSPARQLQuery.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -23,7 +23,12 @@ #include -#include +// Rather than including -- for some reason redland.h +// includes , while the rasqal header actually gets +// installed as which breaks the inclusion all over +// the place unless a very clever include path is set +#include +#include //#define DEBUG_SIMPLE_SPARQL_QUERY 1 @@ -157,7 +162,7 @@ } QString err; if (!loadUri(model, fromUri, err)) { - std::cerr << "SimpleSPARQLQuery: ERROR: Failed to parse into new model: " << err.toStdString() << std::endl; + std::cerr << "SimpleSPARQLQuery: ERROR: Failed to parse into new model: " << err << std::endl; librdf_free_model(model); librdf_free_storage(storage); m_ownModelUris[fromUri] = 0; @@ -172,17 +177,17 @@ WredlandWorldWrapper::freeModel(QString forUri) { #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "SimpleSPARQLQuery::freeModel: Model uri = \"" << forUri.toStdString() << "\"" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::freeModel: Model uri = \"" << forUri << "\"" << endl; #endif QMutexLocker locker(&m_mutex); if (forUri == "") { - std::cerr << "SimpleSPARQLQuery::freeModel: ERROR: Can't free default model" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::freeModel: ERROR: Can't free default model" << endl; return; } if (m_ownModelUris.find(forUri) == m_ownModelUris.end()) { #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "SimpleSPARQLQuery::freeModel: NOTE: Unknown or already-freed model (uri = \"" << forUri.toStdString() << "\")" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::freeModel: NOTE: Unknown or already-freed model (uri = \"" << forUri << "\")" << endl; #endif return; } @@ -215,7 +220,7 @@ } #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "About to parse \"" << uri.toStdString() << "\"" << std::endl; + std::cerr << "About to parse \"" << uri << "\"" << std::endl; #endif Profiler p("SimpleSPARQLQuery: Parse URI into LIBRDF model"); @@ -373,7 +378,7 @@ #ifdef DEBUG_SIMPLE_SPARQL_QUERY if (m_errorString != "") { std::cerr << "SimpleSPARQLQuery::execute: error returned: \"" - << m_errorString.toStdString() << "\"" << std::endl; + << m_errorString << "\"" << std::endl; } #endif } @@ -382,7 +387,7 @@ SimpleSPARQLQuery::Impl::executeDirectParser() { #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "SimpleSPARQLQuery::executeDirectParser: Query is: \"" << m_query.toStdString() << "\"" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::executeDirectParser: Query is: \"" << m_query << "\"" << endl; #endif ResultList list; @@ -393,13 +398,13 @@ QString fromUri; if (fromRE.indexIn(m_query) < 0) { - std::cerr << "SimpleSPARQLQuery::executeDirectParser: Query contains no FROM clause, nothing to parse from" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::executeDirectParser: Query contains no FROM clause, nothing to parse from" << endl; return list; } else { fromUri = fromRE.cap(1); #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "SimpleSPARQLQuery::executeDirectParser: FROM URI is <" - << fromUri.toStdString() << ">" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::executeDirectParser: FROM URI is <" + << fromUri << ">" << endl; #endif } @@ -410,7 +415,7 @@ SimpleSPARQLQuery::Impl::executeDatastore() { #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << "SimpleSPARQLQuery::executeDatastore: Query is: \"" << m_query.toStdString() << "\"" << std::endl; + SVDEBUG << "SimpleSPARQLQuery::executeDatastore: Query is: \"" << m_query << "\"" << endl; #endif ResultList list; @@ -431,7 +436,7 @@ if (counter.find(m_query) == counter.end()) counter[m_query] = 1; else ++counter[m_query]; std::cerr << "Counter for this query: " << counter[m_query] << std::endl; - std::cerr << "Base URI is: \"" << modelUri.toStdString() << "\"" << std::endl; + std::cerr << "Base URI is: \"" << modelUri << "\"" << std::endl; #endif { @@ -492,7 +497,7 @@ if (!node) { #ifdef DEBUG_SIMPLE_SPARQL_QUERY - std::cerr << i << ". " << key.toStdString() << " -> (nil)" << std::endl; + std::cerr << i << ". " << key << " -> (nil)" << std::endl; #endif resultmap[key] = Value(); continue; @@ -537,7 +542,7 @@ } #ifdef DEBUG_SIMPLE_SPARQL_QUERY - cerr << i << ". " << key.toStdString() << " -> " << text.toStdString() << " (type " << type << ")" << endl; + cerr << i << ". " << key << " -> " << text << " (type " << type << ")" << endl; #endif resultmap[key] = Value(type, text); @@ -570,7 +575,7 @@ librdf_free_query(query); #ifdef DEBUG_SIMPLE_SPARQL_QUERY - cerr << "SimpleSPARQLQuery::executeDatastore: All results retrieved (" << resultCount << " of them)" << endl; + SVDEBUG << "SimpleSPARQLQuery::executeDatastore: All results retrieved (" << resultCount << " of them)" << endl; #endif return list; @@ -593,7 +598,7 @@ } if (!m_redland->loadUriIntoDefaultModel(sourceUri, err)) { - std::cerr << "SimpleSPARQLQuery::addSourceToModel: Failed to add source URI \"" << sourceUri.toStdString() << ": " << err.toStdString() << std::endl; + std::cerr << "SimpleSPARQLQuery::addSourceToModel: Failed to add source URI \"" << sourceUri << ": " << err << std::endl; return false; } return true; @@ -615,7 +620,7 @@ ResultList results = q.execute(); if (!q.isOK()) { cerr << "SimpleSPARQLQuery::singleResultQuery: ERROR: " - << q.getErrorString().toStdString() << endl; + << q.getErrorString() << endl; return Value(); } if (results.empty()) { diff -r 4efa7429cd85 -r c10cb8782576 rdf/SimpleSPARQLQuery.h --- a/rdf/SimpleSPARQLQuery.h Mon Nov 29 12:45:39 2010 +0000 +++ b/rdf/SimpleSPARQLQuery.h Sun Jul 01 11:53:00 2012 +0100 @@ -16,10 +16,14 @@ #ifndef _SIMPLE_SPARQL_QUERY_H_ #define _SIMPLE_SPARQL_QUERY_H_ +#ifdef NOT_DEFINED + #include #include #include +#include "base/Debug.h" + class ProgressReporter; class SimpleSPARQLQuery @@ -109,3 +113,5 @@ }; #endif + +#endif diff -r 4efa7429cd85 -r c10cb8782576 rdf/rdf.pro --- a/rdf/rdf.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = redland rasqal raptor vamp-hostsdk -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions - -TARGET = svrdf - -DEPENDPATH += . .. -INCLUDEPATH += . .. -OBJECTS_DIR = tmp_obj -MOC_DIR = tmp_moc - -# Input -HEADERS += PluginRDFDescription.h \ - PluginRDFIndexer.h \ - RDFExporter.h \ - RDFFeatureWriter.h \ - RDFImporter.h \ - RDFTransformFactory.h \ - SimpleSPARQLQuery.h -SOURCES += PluginRDFDescription.cpp \ - PluginRDFIndexer.cpp \ - RDFExporter.cpp \ - RDFFeatureWriter.cpp \ - RDFImporter.cpp \ - RDFTransformFactory.cpp \ - SimpleSPARQLQuery.cpp - diff -r 4efa7429cd85 -r c10cb8782576 svcore.pro --- a/svcore.pro Mon Nov 29 12:45:39 2010 +0000 +++ b/svcore.pro Sun Jul 01 11:53:00 2012 +0100 @@ -27,6 +27,7 @@ base/AudioPlaySource.h \ base/Clipboard.h \ base/Command.h \ + base/Debug.h \ base/Exceptions.h \ base/LogRange.h \ base/Pitch.h \ @@ -43,6 +44,7 @@ base/RecentFiles.h \ base/Resampler.h \ base/ResizeableBitset.h \ + base/ResourceFinder.h \ base/RingBuffer.h \ base/Scavenger.h \ base/Selection.h \ @@ -50,6 +52,7 @@ base/StorageAdviser.h \ base/StringBits.h \ base/TempDirectory.h \ + base/TempWriteFile.h \ base/TextMatcher.h \ base/Thread.h \ base/UnitDatabase.h \ @@ -60,6 +63,7 @@ SOURCES += base/AudioLevel.cpp \ base/Clipboard.cpp \ base/Command.cpp \ + base/Debug.cpp \ base/Exceptions.cpp \ base/LogRange.cpp \ base/Pitch.cpp \ @@ -74,11 +78,13 @@ base/RealTime.cpp \ base/RecentFiles.cpp \ base/Resampler.cpp \ + base/ResourceFinder.cpp \ base/Selection.cpp \ base/Serialiser.cpp \ base/StorageAdviser.cpp \ base/StringBits.cpp \ base/TempDirectory.cpp \ + base/TempWriteFile.cpp \ base/TextMatcher.cpp \ base/Thread.cpp \ base/UnitDatabase.cpp \ @@ -226,20 +232,18 @@ plugin/api/dssi_alsa_compat.c \ plugin/plugins/SamplePlayer.cpp -HEADERS += rdf/PluginRDFDescription.h \ - rdf/PluginRDFIndexer.h \ +HEADERS += rdf/PluginRDFIndexer.h \ + rdf/PluginRDFDescription.h \ rdf/RDFExporter.h \ rdf/RDFFeatureWriter.h \ rdf/RDFImporter.h \ - rdf/RDFTransformFactory.h \ - rdf/SimpleSPARQLQuery.h -SOURCES += rdf/PluginRDFDescription.cpp \ - rdf/PluginRDFIndexer.cpp \ + rdf/RDFTransformFactory.h +SOURCES += rdf/PluginRDFIndexer.cpp \ + rdf/PluginRDFDescription.cpp \ rdf/RDFExporter.cpp \ rdf/RDFFeatureWriter.cpp \ rdf/RDFImporter.cpp \ - rdf/RDFTransformFactory.cpp \ - rdf/SimpleSPARQLQuery.cpp + rdf/RDFTransformFactory.cpp HEADERS += system/Init.h \ system/System.h diff -r 4efa7429cd85 -r c10cb8782576 system/Init.cpp --- a/system/Init.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/system/Init.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -15,29 +15,14 @@ #include -#ifdef Q_WS_X11 -#include -#include -#include -#include - -static int handle_x11_error(Display *dpy, XErrorEvent *err) -{ - char errstr[256]; - XGetErrorText(dpy, err->error_code, errstr, 256); - if (err->error_code != BadWindow) { - std::cerr << "Sonic Visualiser: X Error: " - << errstr << " " << int(err->error_code) - << "\nin major opcode: " - << int(err->request_code) << std::endl; - } - return 0; -} -#endif +#include #ifdef Q_WS_WIN32 #include + +// required for SetDllDirectory +#define _WIN32_WINNT 0x0502 #include // Set default file open mode to binary @@ -46,6 +31,7 @@ void redirectStderr() { +#ifdef NO_PROBABLY_NOT HANDLE stderrHandle = GetStdHandle(STD_ERROR_HANDLE); if (!stderrHandle) return; @@ -64,18 +50,19 @@ setvbuf(stderr, NULL, _IONBF, 0); } } +#endif } #endif extern void svSystemSpecificInitialisation() { -#ifdef Q_WS_X11 - XSetErrorHandler(handle_x11_error); -#endif - #ifdef Q_WS_WIN32 redirectStderr(); + + // Remove the CWD from the DLL search path, just in case + SetDllDirectory(L""); + putenv("PATH="); #else #endif } diff -r 4efa7429cd85 -r c10cb8782576 system/System.cpp --- a/system/System.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/system/System.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -219,7 +219,7 @@ if (elements.size() > 2) unit = elements[2]; int size = elements[1].toInt(); // std::cerr << "have size \"" << size << "\", unit \"" -// << unit.toStdString() << "\"" << std::endl; +// << unit << "\"" << std::endl; if (unit.toLower() == "gb") size = size * 1024; else if (unit.toLower() == "mb") size = size; else if (unit.toLower() == "kb") size = size / 1024; diff -r 4efa7429cd85 -r c10cb8782576 system/System.h --- a/system/System.h Mon Nov 29 12:45:39 2010 +0000 +++ b/system/System.h Sun Jul 01 11:53:00 2012 +0100 @@ -16,6 +16,8 @@ #ifndef _SYSTEM_H_ #define _SYSTEM_H_ +#include "base/Debug.h" + #ifdef _WIN32 #include @@ -59,8 +61,8 @@ int gettimeofday(struct timeval *p, void *tz); } -#define ISNAN isnan -#define ISINF isinf +#define ISNAN _isnan +#define ISINF _isinf #else diff -r 4efa7429cd85 -r c10cb8782576 system/system.pro --- a/system/system.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions - -QT -= gui - -TARGET = svsystem - -DEPENDPATH += . -INCLUDEPATH += . - -# Input -HEADERS += Init.h System.h -SOURCES += Init.cpp System.cpp diff -r 4efa7429cd85 -r c10cb8782576 transform/CSVFeatureWriter.cpp --- a/transform/CSVFeatureWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/CSVFeatureWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -31,7 +31,8 @@ FileFeatureWriter(SupportOneFilePerTrackTransform | SupportOneFileTotal, "csv"), - m_separator(",") + m_separator(","), + m_sampleTiming(false) { } @@ -49,6 +50,11 @@ p.description = "Column separator for output. Default is \",\" (comma)."; p.hasArg = true; pl.push_back(p); + + p.name = "sample-timing"; + p.description = "Show timings as sample frame counts instead of in seconds."; + p.hasArg = false; + pl.push_back(p); return pl; } @@ -58,12 +64,14 @@ { FileFeatureWriter::setParameters(params); - cerr << "CSVFeatureWriter::setParameters" << endl; + SVDEBUG << "CSVFeatureWriter::setParameters" << endl; for (map::iterator i = params.begin(); i != params.end(); ++i) { cerr << i->first << " -> " << i->second << endl; if (i->first == "separator") { m_separator = i->second.c_str(); + } else if (i->first == "sample-timing") { + m_sampleTiming = true; } } } @@ -96,15 +104,29 @@ } } - QString timestamp = features[i].timestamp.toString().c_str(); - timestamp.replace(QRegExp("^ +"), ""); - stream << timestamp; + if (m_sampleTiming) { - if (features[i].hasDuration) { - QString duration = features[i].duration.toString().c_str(); - duration.replace(QRegExp("^ +"), ""); - stream << m_separator << duration; - } + stream << Vamp::RealTime::realTime2Frame + (features[i].timestamp, transform.getSampleRate()); + + if (features[i].hasDuration) { + stream << m_separator; + stream << Vamp::RealTime::realTime2Frame + (features[i].duration, transform.getSampleRate()); + } + + } else { + + QString timestamp = features[i].timestamp.toString().c_str(); + timestamp.replace(QRegExp("^ +"), ""); + stream << timestamp; + + if (features[i].hasDuration) { + QString duration = features[i].duration.toString().c_str(); + duration.replace(QRegExp("^ +"), ""); + stream << m_separator << duration; + } + } if (summaryType != "") { stream << m_separator << summaryType.c_str(); diff -r 4efa7429cd85 -r c10cb8782576 transform/CSVFeatureWriter.h --- a/transform/CSVFeatureWriter.h Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/CSVFeatureWriter.h Sun Jul 01 11:53:00 2012 +0100 @@ -53,6 +53,7 @@ private: QString m_separator; + bool m_sampleTiming; QString m_prevPrintedTrackId; }; diff -r 4efa7429cd85 -r c10cb8782576 transform/FeatureExtractionModelTransformer.cpp --- a/transform/FeatureExtractionModelTransformer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/FeatureExtractionModelTransformer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -43,7 +43,7 @@ m_descriptor(0), m_outputFeatureNo(0) { -// std::cerr << "FeatureExtractionModelTransformer::FeatureExtractionModelTransformer: plugin " << pluginId.toStdString() << ", outputName " << m_transform.getOutput().toStdString() << std::endl; +// SVDEBUG << "FeatureExtractionModelTransformer::FeatureExtractionModelTransformer: plugin " << pluginId << ", outputName " << m_transform.getOutput() << endl; QString pluginId = transform.getPluginIdentifier(); @@ -86,9 +86,9 @@ return; } - std::cerr << "Initialising feature extraction plugin with channels = " + SVDEBUG << "Initialising feature extraction plugin with channels = " << channelCount << ", step = " << m_transform.getStepSize() - << ", block = " << m_transform.getBlockSize() << std::endl; + << ", block = " << m_transform.getBlockSize() << endl; if (!m_plugin->initialise(channelCount, m_transform.getStepSize(), @@ -152,7 +152,7 @@ } for (size_t i = 0; i < outputs.size(); ++i) { -// std::cerr << "comparing output " << i << " name \"" << outputs[i].identifier << "\" with expected \"" << m_transform.getOutput().toStdString() << "\"" << std::endl; +// SVDEBUG << "comparing output " << i << " name \"" << outputs[i].identifier << "\" with expected \"" << m_transform.getOutput() << "\"" << endl; if (m_transform.getOutput() == "" || outputs[i].identifier == m_transform.getOutput().toStdString()) { m_outputFeatureNo = i; @@ -176,9 +176,8 @@ { DenseTimeValueModel *input = getConformingInput(); -// std::cerr << "FeatureExtractionModelTransformer: output sample type " -// << m_descriptor->sampleType << std::endl; - +// std::cerr << "FeatureExtractionModelTransformer::createOutputModel: sample type " << m_descriptor->sampleType << ", rate " << m_descriptor->sampleRate << std::endl; + PluginRDFDescription description(m_transform.getPluginIdentifier()); QString outputId = m_transform.getOutput(); @@ -201,7 +200,7 @@ size_t modelRate = input->getSampleRate(); size_t modelResolution = 1; - + switch (m_descriptor->sampleType) { case Vamp::Plugin::OutputDescriptor::VariableSampleRate: @@ -378,7 +377,7 @@ FeatureExtractionModelTransformer::~FeatureExtractionModelTransformer() { -// std::cerr << "FeatureExtractionModelTransformer::~FeatureExtractionModelTransformer()" << std::endl; +// SVDEBUG << "FeatureExtractionModelTransformer::~FeatureExtractionModelTransformer()" << endl; delete m_plugin; delete m_descriptor; } @@ -386,12 +385,12 @@ DenseTimeValueModel * FeatureExtractionModelTransformer::getConformingInput() { -// std::cerr << "FeatureExtractionModelTransformer::getConformingInput: input model is " << getInputModel() << std::endl; +// SVDEBUG << "FeatureExtractionModelTransformer::getConformingInput: input model is " << getInputModel() << endl; DenseTimeValueModel *dtvm = dynamic_cast(getInputModel()); if (!dtvm) { - std::cerr << "FeatureExtractionModelTransformer::getConformingInput: WARNING: Input model is not conformable to DenseTimeValueModel" << std::endl; + SVDEBUG << "FeatureExtractionModelTransformer::getConformingInput: WARNING: Input model is not conformable to DenseTimeValueModel" << endl; } return dtvm; } @@ -405,7 +404,7 @@ if (!m_output) return; while (!input->isReady() && !m_abandoned) { - std::cerr << "FeatureExtractionModelTransformer::run: Waiting for input model to be ready..." << std::endl; + SVDEBUG << "FeatureExtractionModelTransformer::run: Waiting for input model to be ready..." << endl; usleep(500000); } if (m_abandoned) return; @@ -487,6 +486,8 @@ imaginaries = new float[blockSize/2 + 1]; } + QString error = ""; + while (!m_abandoned) { if (frequencyDomain) { @@ -497,9 +498,9 @@ contextStart + contextDuration) break; } -// std::cerr << "FeatureExtractionModelTransformer::run: blockFrame " +// SVDEBUG << "FeatureExtractionModelTransformer::run: blockFrame " // << blockFrame << ", endFrame " << endFrame << ", blockSize " -// << blockSize << std::endl; +// << blockSize << endl; long completion = (((blockFrame - contextStart) / stepSize) * 99) / @@ -515,6 +516,12 @@ buffers[ch][i*2] = reals[i]; buffers[ch][i*2+1] = imaginaries[i]; } + error = fftModels[ch]->getError(); + if (error != "") { + std::cerr << "FeatureExtractionModelTransformer::run: Abandoning, error is " << error << std::endl; + m_abandoned = true; + m_message = error; + } } } else { getFrames(channelCount, blockFrame, blockSize, buffers); @@ -628,8 +635,11 @@ { size_t inputRate = m_input.getModel()->getSampleRate(); -// std::cerr << "FeatureExtractionModelTransformer::addFeature(" -// << blockFrame << ")" << std::endl; +// std::cerr << "FeatureExtractionModelTransformer::addFeature: blockFrame = " +// << blockFrame << ", hasTimestamp = " << feature.hasTimestamp +// << ", timestamp = " << feature.timestamp << ", hasDuration = " +// << feature.hasDuration << ", duration = " << feature.duration +// << std::endl; int binCount = 1; if (m_descriptor->hasFixedBinCount) { @@ -769,7 +779,7 @@ model->setColumn(frame / model->getResolution(), values); } else { - std::cerr << "FeatureExtractionModelTransformer::addFeature: Unknown output model type!" << std::endl; + SVDEBUG << "FeatureExtractionModelTransformer::addFeature: Unknown output model type!" << endl; } } @@ -781,8 +791,8 @@ binCount = m_descriptor->binCount; } -// std::cerr << "FeatureExtractionModelTransformer::setCompletion(" -// << completion << ")" << std::endl; +// SVDEBUG << "FeatureExtractionModelTransformer::setCompletion(" +// << completion << ")" << endl; if (isOutput()) { diff -r 4efa7429cd85 -r c10cb8782576 transform/FeatureWriter.h --- a/transform/FeatureWriter.h Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/FeatureWriter.h Sun Jul 01 11:53:00 2012 +0100 @@ -85,6 +85,16 @@ const Vamp::Plugin::FeatureList &features, std::string summaryType = "") = 0; + /** + * Throw FailedToOpenOutputStream if we can already tell that we + * will be unable to write to the output file. This is called to + * test the output stream before processing begins. The writer + * may legitimately succeed here but still fail later -- this is + * really an optimisation to ensure that easy-to-recognise failure + * cases fail early. + */ + virtual void testOutputFile(QString trackId, TransformId transformId) { } + virtual void flush() { } // whatever the last stream was virtual void finish() = 0; diff -r 4efa7429cd85 -r c10cb8782576 transform/FileFeatureWriter.cpp --- a/transform/FileFeatureWriter.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/FileFeatureWriter.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -46,7 +46,7 @@ } else if (m_support & SupportOneFileTotal) { m_singleFileName = QString("output.%1").arg(m_extension); } else { - cerr << "FileFeatureWriter::FileFeatureWriter: ERROR: Invalid support specification " << support << endl; + SVDEBUG << "FileFeatureWriter::FileFeatureWriter: ERROR: Invalid support specification " << support << endl; } } } @@ -59,9 +59,11 @@ m_streams.erase(m_streams.begin()); } while (!m_files.empty()) { - cerr << "FileFeatureWriter::~FileFeatureWriter: NOTE: Closing feature file \"" - << m_files.begin()->second->fileName().toStdString() << "\"" << endl; - delete m_files.begin()->second; + if (m_files.begin()->second) { + SVDEBUG << "FileFeatureWriter::~FileFeatureWriter: NOTE: Closing feature file \"" + << m_files.begin()->second->fileName() << "\"" << endl; + delete m_files.begin()->second; + } m_files.erase(m_files.begin()); } } @@ -88,7 +90,11 @@ if (m_support & SupportOneFileTotal) { if (m_support & ~SupportOneFileTotal) { // not only option p.name = "one-file"; - p.description = "Write all transform results for all input files into the single named output file."; + if (m_support & SupportOneFilePerTrack) { + p.description = "Write all transform results for all input files into the single named output file. (The default is to create one output file per input audio file, and write all transform results for that input into it.)"; + } else { + p.description = "Write all transform results for all input files into the single named output file. (The default is to create a separate output file for each combination of input audio file and transform.)"; + } p.hasArg = true; pl.push_back(p); } @@ -122,7 +128,7 @@ if (m_support & SupportOneFilePerTrackTransform && m_support & SupportOneFilePerTrack) { if (m_singleFileName != "") { - cerr << "FileFeatureWriter::setParameters: WARNING: Both one-file and many-files parameters provided, ignoring many-files" << endl; + SVDEBUG << "FileFeatureWriter::setParameters: WARNING: Both one-file and many-files parameters provided, ignoring many-files" << endl; } else { m_manyFiles = true; } @@ -130,17 +136,22 @@ } else if (i->first == "one-file") { if (m_support & SupportOneFileTotal) { if (m_support & ~SupportOneFileTotal) { // not only option - if (m_manyFiles) { - cerr << "FileFeatureWriter::setParameters: WARNING: Both many-files and one-file parameters provided, ignoring one-file" << endl; - } else { + // No, we cannot do this test because m_manyFiles + // may be on by default (for any FileFeatureWriter + // that supports OneFilePerTrackTransform but not + // OneFilePerTrack), so we need to be able to + // override it +// if (m_manyFiles) { +// SVDEBUG << "FileFeatureWriter::setParameters: WARNING: Both many-files and one-file parameters provided, ignoring one-file" << endl; +// } else { m_singleFileName = i->second.c_str(); - } +// } } } } else if (i->first == "stdout") { if (m_support & SupportOneFileTotal) { if (m_singleFileName != "") { - cerr << "FileFeatureWriter::setParameters: WARNING: Both stdout and one-file provided, ignoring stdout" << endl; + SVDEBUG << "FileFeatureWriter::setParameters: WARNING: Both stdout and one-file provided, ignoring stdout" << endl; } else { m_stdout = true; } @@ -153,13 +164,14 @@ } } -QString FileFeatureWriter::getOutputFilename(QString trackId, - TransformId transformId) +QString +FileFeatureWriter::getOutputFilename(QString trackId, + TransformId transformId) { if (m_singleFileName != "") { if (QFileInfo(m_singleFileName).exists() && !(m_force || m_append)) { - cerr << endl << "FileFeatureWriter: ERROR: Specified output file \"" << m_singleFileName.toStdString() << "\" exists and neither --" << getWriterTag().toStdString() << "-force nor --" << getWriterTag().toStdString() << "-append flag is specified -- not overwriting" << endl; - cerr << "NOTE: To find out how to fix this problem, read the help for the --" << getWriterTag().toStdString() << "-force" << endl << "and --" << getWriterTag().toStdString() << "-append options" << endl; + cerr << endl << "FileFeatureWriter: ERROR: Specified output file \"" << m_singleFileName << "\" exists and neither --" << getWriterTag() << "-force nor --" << getWriterTag() << "-append flag is specified -- not overwriting" << endl; + SVDEBUG << "NOTE: To find out how to fix this problem, read the help for the --" << getWriterTag() << "-force" << endl << "and --" << getWriterTag() << "-append options" << endl; return ""; } return m_singleFileName; @@ -181,14 +193,14 @@ infilename = scheme + ":" + infilename; // DOS drive! } - cerr << "trackId = " << trackId.toStdString() << ", url = " << url.toString().toStdString() << ", infilename = " - << infilename.toStdString() << ", basename = " << basename.toStdString() << ", m_baseDir = " << m_baseDir.toStdString() << endl; +// cerr << "trackId = " << trackId << ", url = " << url.toString() << ", infilename = " +// << infilename << ", basename = " << basename << ", m_baseDir = " << m_baseDir << endl; if (m_baseDir != "") dirname = QFileInfo(m_baseDir).absoluteFilePath(); else if (local) dirname = QFileInfo(infilename).absolutePath(); else dirname = QDir::currentPath(); - cerr << "dirname = " << dirname.toStdString() << endl; +// cerr << "dirname = " << dirname << endl; QString filename; @@ -203,17 +215,35 @@ filename = QDir(dirname).filePath(filename); if (QFileInfo(filename).exists() && !(m_force || m_append)) { - cerr << endl << "FileFeatureWriter: ERROR: Output file \"" << filename.toStdString() << "\" exists (for input file or URL \"" << trackId.toStdString() << "\" and transform \"" << transformId.toStdString() << "\") and neither --" << getWriterTag().toStdString() << "-force nor --" << getWriterTag().toStdString() << "-append is specified -- not overwriting" << endl; - cerr << "NOTE: To find out how to fix this problem, read the help for the --" << getWriterTag().toStdString() << "-force" << endl << "and --" << getWriterTag().toStdString() << "-append options" << endl; + cerr << endl << "FileFeatureWriter: ERROR: Output file \"" << filename << "\" exists (for input file or URL \"" << trackId << "\" and transform \"" << transformId << "\") and neither --" << getWriterTag() << "-force nor --" << getWriterTag() << "-append is specified -- not overwriting" << endl; + SVDEBUG << "NOTE: To find out how to fix this problem, read the help for the --" << getWriterTag() << "-force" << endl << "and --" << getWriterTag() << "-append options" << endl; return ""; } return filename; } +void +FileFeatureWriter::testOutputFile(QString trackId, + TransformId transformId) +{ + // Obviously, if we're writing to stdout we can't test for an + // openable output file. But when writing a single file we don't + // want to either, because this test would fail on the second and + // subsequent input files (because the file would already exist). + // getOutputFile does the right thing in this case, so we just + // leave it to it + if (m_stdout || m_singleFileName != "") return; -QFile *FileFeatureWriter::getOutputFile(QString trackId, - TransformId transformId) + QString filename = getOutputFilename(trackId, transformId); + if (filename == "") { + throw FailedToOpenOutputStream(trackId, transformId); + } +} + +QFile * +FileFeatureWriter::getOutputFile(QString trackId, + TransformId transformId) { pair key; @@ -229,15 +259,15 @@ QString filename = getOutputFilename(trackId, transformId); - if (filename == "") { // stdout + if (filename == "") { // stdout or failure return 0; } - cerr << "FileFeatureWriter: NOTE: Using output filename \"" - << filename.toStdString() << "\"" << endl; + SVDEBUG << "FileFeatureWriter: NOTE: Using output filename \"" + << filename << "\"" << endl; if (m_append) { - cerr << "FileFeatureWriter: NOTE: Calling reviewFileForAppending" << endl; + SVDEBUG << "FileFeatureWriter: NOTE: Calling reviewFileForAppending" << endl; reviewFileForAppending(filename); } @@ -252,7 +282,7 @@ m_files[key] = 0; throw FailedToOpenFile(filename); } - + m_files[key] = file; } @@ -267,7 +297,7 @@ if (!file && !m_stdout) { return 0; } - + if (m_streams.find(file) == m_streams.end()) { if (m_stdout) { m_streams[file] = new QTextStream(stdout); @@ -299,7 +329,7 @@ void FileFeatureWriter::finish() { -// cerr << "FileFeatureWriter::finish()" << endl; +// SVDEBUG << "FileFeatureWriter::finish()" << endl; if (m_singleFileName != "" || m_stdout) return; @@ -309,9 +339,11 @@ m_streams.erase(m_streams.begin()); } while (!m_files.empty()) { - cerr << "FileFeatureWriter::finish: NOTE: Closing feature file \"" - << m_files.begin()->second->fileName().toStdString() << "\"" << endl; - delete m_files.begin()->second; + if (m_files.begin()->second) { + SVDEBUG << "FileFeatureWriter::finish: NOTE: Closing feature file \"" + << m_files.begin()->second->fileName() << "\"" << endl; + delete m_files.begin()->second; + } m_files.erase(m_files.begin()); } m_prevstream = 0; diff -r 4efa7429cd85 -r c10cb8782576 transform/FileFeatureWriter.h --- a/transform/FileFeatureWriter.h Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/FileFeatureWriter.h Sun Jul 01 11:53:00 2012 +0100 @@ -42,6 +42,7 @@ virtual ParameterList getSupportedParameters() const; virtual void setParameters(map ¶ms); + virtual void testOutputFile(QString trackId, TransformId transformId); virtual void flush(); virtual void finish(); diff -r 4efa7429cd85 -r c10cb8782576 transform/ModelTransformerFactory.cpp --- a/transform/ModelTransformerFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/ModelTransformerFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -26,10 +26,6 @@ #include "plugin/RealTimePluginFactory.h" #include "plugin/PluginXml.h" -#ifndef NO_SV_GUI -#include "widgets/PluginParameterDialog.h" -#endif - #include "data/model/DenseTimeValueModel.h" #include @@ -52,30 +48,14 @@ { } -bool -ModelTransformerFactory::getChannelRange(TransformId identifier, - Vamp::PluginBase *plugin, - int &minChannels, int &maxChannels) -{ - Vamp::Plugin *vp = 0; - if ((vp = dynamic_cast(plugin)) || - (vp = dynamic_cast(plugin))) { - minChannels = vp->getMinChannelCount(); - maxChannels = vp->getMaxChannelCount(); - return true; - } else { - return TransformFactory::getInstance()-> - getTransformChannelRange(identifier, minChannels, maxChannels); - } -} - ModelTransformer::Input ModelTransformerFactory::getConfigurationForTransform(Transform &transform, const std::vector &candidateInputModels, Model *defaultInputModel, AudioPlaySource *source, size_t startFrame, - size_t duration) + size_t duration, + UserConfigurator *configurator) { ModelTransformer::Input input(0); @@ -87,12 +67,12 @@ Model *inputModel = candidateInputModels[0]; QStringList candidateModelNames; QString defaultModelName; - std::map modelMap; + QMap modelMap; for (size_t i = 0; i < candidateInputModels.size(); ++i) { QString modelName = candidateInputModels[i]->objectName(); QString origModelName = modelName; int dupcount = 1; - while (modelMap.find(modelName) != modelMap.end()) { + while (modelMap.contains(modelName)) { modelName = tr("%1 <%2>").arg(origModelName).arg(++dupcount); } modelMap[modelName] = candidateInputModels[i]; @@ -103,21 +83,14 @@ } QString id = transform.getPluginIdentifier(); - QString output = transform.getOutput(); - QString outputLabel = ""; - QString outputDescription = ""; - bool ok = false; + bool ok = true; QString configurationXml = m_lastConfigurations[transform.getIdentifier()]; - std::cerr << "last configuration: " << configurationXml.toStdString() << std::endl; + std::cerr << "last configuration: " << configurationXml << std::endl; Vamp::PluginBase *plugin = 0; - bool frequency = false; - bool effect = false; - bool generator = false; - if (FeatureExtractionPluginFactory::instanceFor(id)) { std::cerr << "getConfigurationForTransform: instantiating Vamp plugin" << std::endl; @@ -126,50 +99,17 @@ FeatureExtractionPluginFactory::instanceFor(id)->instantiatePlugin (id, inputModel->getSampleRate()); - if (vp) { - - plugin = vp; - frequency = (vp->getInputDomain() == Vamp::Plugin::FrequencyDomain); - - std::vector od = - vp->getOutputDescriptors(); - if (od.size() > 1) { - for (size_t i = 0; i < od.size(); ++i) { - if (od[i].identifier == output.toStdString()) { - outputLabel = od[i].name.c_str(); - outputDescription = od[i].description.c_str(); - break; - } - } - } - } + plugin = vp; } else if (RealTimePluginFactory::instanceFor(id)) { RealTimePluginFactory *factory = RealTimePluginFactory::instanceFor(id); const RealTimePluginDescriptor *desc = factory->getPluginDescriptor(id); - if (desc->audioInputPortCount > 0 && - desc->audioOutputPortCount > 0 && - !desc->isSynth) { - effect = true; - } - - if (desc->audioInputPortCount == 0) { - generator = true; - } - - if (output != "A") { - int outputNo = output.toInt(); - if (outputNo >= 0 && outputNo < int(desc->controlOutputPortCount)) { - outputLabel = desc->controlOutputPortNames[outputNo].c_str(); - } - } - size_t sampleRate = inputModel->getSampleRate(); size_t blockSize = 1024; size_t channels = 1; - if (effect && source) { + if (source) { sampleRate = source->getTargetSampleRate(); blockSize = source->getTargetBlockSize(); channels = source->getTargetChannelCount(); @@ -179,10 +119,6 @@ (id, 0, 0, sampleRate, blockSize, channels); plugin = rtp; - - if (effect && source && rtp) { - source->setAuditioningEffect(rtp); - } } if (plugin) { @@ -200,111 +136,22 @@ // whatever the user chose last time around PluginXml(plugin).setParametersFromXml(configurationXml); -#ifndef NO_SV_GUI - int sourceChannels = 1; - if (dynamic_cast(inputModel)) { - sourceChannels = dynamic_cast(inputModel) - ->getChannelCount(); - } - - int minChannels = 1, maxChannels = sourceChannels; - getChannelRange(transform.getIdentifier(), plugin, - minChannels, maxChannels); - - int targetChannels = sourceChannels; - if (!effect) { - if (sourceChannels < minChannels) targetChannels = minChannels; - if (sourceChannels > maxChannels) targetChannels = maxChannels; - } - - int defaultChannel = -1; //!!! no longer saved! [was context.channel] - - PluginParameterDialog *dialog = new PluginParameterDialog(plugin); - - dialog->setMoreInfoUrl(TransformFactory::getInstance()-> - getTransformInfoUrl(transform.getIdentifier())); - - if (candidateModelNames.size() > 1 && !generator) { - dialog->setCandidateInputModels(candidateModelNames, - defaultModelName); - } - - if (startFrame != 0 || duration != 0) { - dialog->setShowSelectionOnlyOption(true); - } - - if (targetChannels > 0) { - dialog->setChannelArrangement(sourceChannels, targetChannels, - defaultChannel); + if (configurator) { + ok = configurator->configure(input, transform, plugin, + inputModel, source, + startFrame, duration, + modelMap, + candidateModelNames, + defaultModelName); } - dialog->setOutputLabel(outputLabel, outputDescription); - - dialog->setShowProcessingOptions(true, frequency); - - if (dialog->exec() == QDialog::Accepted) { - ok = true; - } - - QString selectedInput = dialog->getInputModel(); - if (selectedInput != "") { - if (modelMap.find(selectedInput) != modelMap.end()) { - inputModel = modelMap[selectedInput]; - std::cerr << "Found selected input \"" << selectedInput.toStdString() << "\" in model map, result is " << inputModel << std::endl; - } else { - std::cerr << "Failed to find selected input \"" << selectedInput.toStdString() << "\" in model map" << std::endl; - } - } else { - std::cerr << "Selected input empty: \"" << selectedInput.toStdString() << "\"" << std::endl; - } - - // Write parameters back to transform object - TransformFactory::getInstance()-> - setParametersFromPlugin(transform, plugin); - - input.setChannel(dialog->getChannel()); - - //!!! The dialog ought to be taking & returning transform - //objects and input objects and stuff rather than passing - //around all this misc stuff, but that's for tomorrow - //(whenever that may be) - - if (startFrame != 0 || duration != 0) { - if (dialog->getSelectionOnly()) { - transform.setStartTime(RealTime::frame2RealTime - (startFrame, inputModel->getSampleRate())); - transform.setDuration(RealTime::frame2RealTime - (duration, inputModel->getSampleRate())); - } - } - - size_t stepSize = 0, blockSize = 0; - WindowType windowType = HanningWindow; - - dialog->getProcessingParameters(stepSize, - blockSize, - windowType); - - transform.setStepSize(stepSize); - transform.setBlockSize(blockSize); - transform.setWindowType(windowType); - -#endif TransformFactory::getInstance()-> makeContextConsistentWithPlugin(transform, plugin); configurationXml = PluginXml(plugin).toXmlString(); -#ifndef NO_SV_GUI - delete dialog; -#endif - - if (effect && source) { - source->setAuditioningEffect(0); // will delete our plugin - } else { - delete plugin; - } + delete plugin; } if (ok) { @@ -334,8 +181,8 @@ new RealTimeEffectModelTransformer(input, transform); } else { - std::cerr << "ModelTransformerFactory::createTransformer: Unknown transform \"" - << transform.getIdentifier().toStdString() << "\"" << std::endl; + SVDEBUG << "ModelTransformerFactory::createTransformer: Unknown transform \"" + << transform.getIdentifier() << "\"" << endl; return transformer; } @@ -348,7 +195,7 @@ const ModelTransformer::Input &input, QString &message) { - std::cerr << "ModelTransformerFactory::transform: Constructing transformer with input model " << input.getModel() << std::endl; + SVDEBUG << "ModelTransformerFactory::transform: Constructing transformer with input model " << input.getModel() << endl; ModelTransformer *t = createTransformer(transform, input); if (!t) return 0; @@ -389,7 +236,7 @@ QObject *s = sender(); ModelTransformer *transformer = dynamic_cast(s); -// std::cerr << "ModelTransformerFactory::transformerFinished(" << transformer << ")" << std::endl; +// SVDEBUG << "ModelTransformerFactory::transformerFinished(" << transformer << ")" << endl; if (!transformer) { std::cerr << "WARNING: ModelTransformerFactory::transformerFinished: sender is not a transformer" << std::endl; diff -r 4efa7429cd85 -r c10cb8782576 transform/ModelTransformerFactory.h --- a/transform/ModelTransformerFactory.h Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/ModelTransformerFactory.h Sun Jul 01 11:53:00 2012 +0100 @@ -23,6 +23,7 @@ #include +#include #include #include @@ -37,12 +38,27 @@ static ModelTransformerFactory *getInstance(); + class UserConfigurator { + public: + virtual bool configure(ModelTransformer::Input &input, + Transform &transform, + Vamp::PluginBase *plugin, + Model *&inputModel, + AudioPlaySource *source, + size_t startFrame, + size_t duration, + const QMap &modelMap, + QStringList candidateModelNames, + QString defaultModelName) = 0; + }; + /** - * Fill out the configuration for the given transform (by asking - * the user, most likely). Returns the selected input model and - * channel if the transform is acceptable, or an input with a null - * model if the operation should be cancelled. Audio play source - * may be used to audition effects plugins, if provided. + * Fill out the configuration for the given transform (may include + * asking the user by calling back on the UserConfigurator). + * Returns the selected input model and channel if the transform + * is acceptable, or an input with a null model if the operation + * should be cancelled. Audio play source may be used to audition + * effects plugins, if provided. */ ModelTransformer::Input getConfigurationForTransform(Transform &transform, @@ -50,7 +66,8 @@ Model *defaultInputModel, AudioPlaySource *source = 0, size_t startFrame = 0, - size_t duration = 0); + size_t duration = 0, + UserConfigurator *configurator = 0); /** * Return the output model resulting from applying the named @@ -85,9 +102,6 @@ typedef std::set TransformerSet; TransformerSet m_runningTransformers; - bool getChannelRange(TransformId identifier, - Vamp::PluginBase *plugin, int &min, int &max); - static ModelTransformerFactory *m_instance; }; diff -r 4efa7429cd85 -r c10cb8782576 transform/RealTimeEffectModelTransformer.cpp --- a/transform/RealTimeEffectModelTransformer.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/RealTimeEffectModelTransformer.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -43,14 +43,14 @@ if (!m_transform.getBlockSize()) m_transform.setBlockSize(1024); -// std::cerr << "RealTimeEffectModelTransformer::RealTimeEffectModelTransformer: plugin " << pluginId.toStdString() << ", output " << output << std::endl; +// SVDEBUG << "RealTimeEffectModelTransformer::RealTimeEffectModelTransformer: plugin " << pluginId << ", output " << output << endl; RealTimePluginFactory *factory = RealTimePluginFactory::instanceFor(pluginId); if (!factory) { std::cerr << "RealTimeEffectModelTransformer: No factory available for plugin id \"" - << pluginId.toStdString() << "\"" << std::endl; + << pluginId << "\"" << std::endl; return; } @@ -64,7 +64,7 @@ if (!m_plugin) { std::cerr << "RealTimeEffectModelTransformer: Failed to instantiate plugin \"" - << pluginId.toStdString() << "\"" << std::endl; + << pluginId << "\"" << std::endl; return; } @@ -110,7 +110,7 @@ DenseTimeValueModel *dtvm = dynamic_cast(getInputModel()); if (!dtvm) { - std::cerr << "RealTimeEffectModelTransformer::getConformingInput: WARNING: Input model is not conformable to DenseTimeValueModel" << std::endl; + SVDEBUG << "RealTimeEffectModelTransformer::getConformingInput: WARNING: Input model is not conformable to DenseTimeValueModel" << endl; } return dtvm; } @@ -122,7 +122,7 @@ if (!input) return; while (!input->isReady() && !m_abandoned) { - std::cerr << "RealTimeEffectModelTransformer::run: Waiting for input model to be ready..." << std::endl; + SVDEBUG << "RealTimeEffectModelTransformer::run: Waiting for input model to be ready..." << endl; usleep(500000); } if (m_abandoned) return; @@ -179,7 +179,7 @@ long completion = (((blockFrame - contextStart) / blockSize) * 99) / - ((contextDuration) / blockSize); + (1 + ((contextDuration) / blockSize)); long got = 0; diff -r 4efa7429cd85 -r c10cb8782576 transform/Transform.cpp --- a/transform/Transform.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/Transform.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -54,10 +54,10 @@ if (!doc.setContent(xml, false, &error, &errorLine, &errorColumn)) { std::cerr << "Transform::Transform: Error in parsing XML: " - << error.toStdString() << " at line " << errorLine + << error << " at line " << errorLine << ", column " << errorColumn << std::endl; std::cerr << "Input follows:" << std::endl; - std::cerr << xml.toStdString() << std::endl; + std::cerr << xml << std::endl; std::cerr << "Input ends." << std::endl; return; } @@ -124,11 +124,11 @@ m_duration == t.m_duration && m_sampleRate == t.m_sampleRate; /* - std::cerr << "Transform::operator==: identical = " << identical << std::endl; + SVDEBUG << "Transform::operator==: identical = " << identical << endl; std::cerr << "A = " << std::endl; - std::cerr << toXmlString().toStdString() << std::endl; + std::cerr << toXmlString() << std::endl; std::cerr << "B = " << std::endl; - std::cerr << t.toXmlString().toStdString() << std::endl; + std::cerr << t.toXmlString() << std::endl; */ return identical; } @@ -260,8 +260,7 @@ void Transform::setParameter(QString name, float value) { -// std::cerr << "Transform::setParameter(" << name.toStdString() -// << ") -> " << value << std::endl; +// SVDEBUG << "Transform::setParameter(" << name// << ") -> " << value << endl; m_parameters[name] = value; } @@ -280,8 +279,7 @@ void Transform::setConfigurationValue(QString name, QString value) { - std::cerr << "Transform::setConfigurationValue(" << name.toStdString() - << ") -> " << value.toStdString() << std::endl; + SVDEBUG << "Transform::setConfigurationValue(" << name << ") -> " << value << endl; m_configuration[name] = value; } @@ -463,8 +461,8 @@ str == "standard deviation" || str == "sd") return StandardDeviation; if (str == "count") return Count; if (str == "") return NoSummary; - std::cerr << "Transform::stringToSummaryType: unknown summary type \"" - << str.toStdString() << "\"" << std::endl; + SVDEBUG << "Transform::stringToSummaryType: unknown summary type \"" + << str << "\"" << endl; return NoSummary; } @@ -483,8 +481,8 @@ case Count: return "count"; case NoSummary: return ""; default: - std::cerr << "Transform::summaryTypeToString: unexpected summary type " - << int(type) << std::endl; + SVDEBUG << "Transform::summaryTypeToString: unexpected summary type " + << int(type) << endl; return ""; } } diff -r 4efa7429cd85 -r c10cb8782576 transform/TransformFactory.cpp --- a/transform/TransformFactory.cpp Mon Nov 29 12:45:39 2010 +0000 +++ b/transform/TransformFactory.cpp Sun Jul 01 11:53:00 2012 +0100 @@ -52,7 +52,7 @@ void TransformFactory::deleteInstance() { - std::cerr << "TransformFactory::deleteInstance called" << std::endl; + SVDEBUG << "TransformFactory::deleteInstance called" << endl; delete m_instance; m_instance = 0; } @@ -70,12 +70,12 @@ m_exiting = true; if (m_thread) { #ifdef DEBUG_TRANSFORM_FACTORY - std::cerr << "TransformFactory::~TransformFactory: waiting on thread" << std::endl; + SVDEBUG << "TransformFactory::~TransformFactory: waiting on thread" << endl; #endif m_thread->wait(); delete m_thread; #ifdef DEBUG_TRANSFORM_FACTORY - std::cerr << "TransformFactory::~TransformFactory: waited and done" << std::endl; + SVDEBUG << "TransformFactory::~TransformFactory: waited and done" << endl; #endif } } @@ -113,7 +113,7 @@ for (TransformDescriptionMap::const_iterator i = m_transforms.begin(); i != m_transforms.end(); ++i) { #ifdef DEBUG_TRANSFORM_FACTORY - cerr << "inserting transform into set: id = " << i->second.identifier.toStdString() << endl; + SVDEBUG << "inserting transform into set: id = " << i->second.identifier << endl; #endif dset.insert(i->second); } @@ -122,7 +122,7 @@ for (std::set::const_iterator i = dset.begin(); i != dset.end(); ++i) { #ifdef DEBUG_TRANSFORM_FACTORY - cerr << "inserting transform into list: id = " << i->identifier.toStdString() << endl; + SVDEBUG << "inserting transform into list: id = " << i->identifier << endl; #endif list.push_back(*i); } @@ -159,7 +159,7 @@ for (TransformDescriptionMap::const_iterator i = m_uninstalledTransforms.begin(); i != m_uninstalledTransforms.end(); ++i) { #ifdef DEBUG_TRANSFORM_FACTORY - cerr << "inserting transform into set: id = " << i->second.identifier.toStdString() << endl; + SVDEBUG << "inserting transform into set: id = " << i->second.identifier << endl; #endif dset.insert(i->second); } @@ -168,7 +168,7 @@ for (std::set::const_iterator i = dset.begin(); i != dset.end(); ++i) { #ifdef DEBUG_TRANSFORM_FACTORY - cerr << "inserting transform into uninstalled list: id = " << i->identifier.toStdString() << endl; + SVDEBUG << "inserting transform into uninstalled list: id = " << i->identifier << endl; #endif list.push_back(*i); } @@ -478,7 +478,7 @@ !plugin->getParameterDescriptors().empty()); #ifdef DEBUG_TRANSFORM_FACTORY - cerr << "Feature extraction plugin transform: " << transformId.toStdString() << " friendly name: " << friendlyName.toStdString() << endl; + cerr << "Feature extraction plugin transform: " << transformId << " friendly name: " << friendlyName << endl; #endif transforms[transformId] = @@ -530,7 +530,7 @@ //!!! if (descriptor->controlOutputPortCount == 0 || // descriptor->audioInputPortCount == 0) continue; -// std::cout << "TransformFactory::populateRealTimePlugins: plugin " << pluginId.toStdString() << " has " << descriptor->controlOutputPortCount << " control output ports, " << descriptor->audioOutputPortCount << " audio outputs, " << descriptor->audioInputPortCount << " audio inputs" << endl; +// std::cout << "TransformFactory::populateRealTimePlugins: plugin " << pluginId << " has " << descriptor->controlOutputPortCount << " control output ports, " << descriptor->audioOutputPortCount << " audio outputs, " << descriptor->audioInputPortCount << " audio inputs" << endl; QString pluginName = descriptor->name.c_str(); QString category = factory->getPluginCategory(pluginId); @@ -659,9 +659,8 @@ QString name = desc.getPluginName(); #ifdef DEBUG_TRANSFORM_FACTORY if (name == "") { - std::cerr << "TransformFactory::populateUninstalledTransforms: " - << "No name available for plugin " << i->toStdString() - << ", skipping" << std::endl; + SVDEBUG << "TransformFactory::populateUninstalledTransforms: " + << "No name available for plugin " << i- << ", skipping" << endl; continue; } #endif @@ -678,8 +677,8 @@ if (m_transforms.find(tid) != m_transforms.end()) { #ifdef DEBUG_TRANSFORM_FACTORY - std::cerr << "TransformFactory::populateUninstalledTransforms: " - << tid.toStdString() << " is installed; adding info url if appropriate, skipping rest" << std::endl; + SVDEBUG << "TransformFactory::populateUninstalledTransforms: " + << tid << " is installed; adding info url if appropriate, skipping rest" << endl; #endif if (infoUrl != "") { if (m_transforms[tid].infoUrl == "") { @@ -690,8 +689,8 @@ } #ifdef DEBUG_TRANSFORM_FACTORY - std::cerr << "TransformFactory::populateUninstalledTransforms: " - << "adding " << tid.toStdString() << std::endl; + SVDEBUG << "TransformFactory::populateUninstalledTransforms: " + << "adding " << tid << endl; #endif QString oname = desc.getOutputName(*j); @@ -953,9 +952,9 @@ for (Vamp::PluginBase::ParameterList::const_iterator i = parameters.begin(); i != parameters.end(); ++i) { pmap[i->identifier.c_str()] = plugin->getParameter(i->identifier); -// std::cerr << "TransformFactory::setParametersFromPlugin: parameter " +// SVDEBUG << "TransformFactory::setParametersFromPlugin: parameter " // << i->identifier << " -> value " << -// pmap[i->identifier.c_str()] << std::endl; +// pmap[i->identifier.c_str()] << endl; } transform.setParameters(pmap); @@ -1068,9 +1067,9 @@ Vamp::PluginBase *plugin = instantiateDefaultPluginFor (t.getIdentifier(), 0); if (!plugin) { - cerr << "TransformFactory::getPluginConfigurationXml: " + SVDEBUG << "TransformFactory::getPluginConfigurationXml: " << "Unable to instantiate plugin for transform \"" - << t.getIdentifier().toStdString() << "\"" << endl; + << t.getIdentifier() << "\"" << endl; return xml; } @@ -1090,9 +1089,9 @@ Vamp::PluginBase *plugin = instantiateDefaultPluginFor (t.getIdentifier(), 0); if (!plugin) { - cerr << "TransformFactory::setParametersFromPluginConfigurationXml: " + SVDEBUG << "TransformFactory::setParametersFromPluginConfigurationXml: " << "Unable to instantiate plugin for transform \"" - << t.getIdentifier().toStdString() << "\"" << endl; + << t.getIdentifier() << "\"" << endl; return; } @@ -1146,7 +1145,7 @@ if (!m_uninstalledTransformsMutex.tryLock()) { // uninstalled transforms are being populated; this may take some time, // and they aren't critical, but we will speed them up if necessary - std::cerr << "TransformFactory::search: Uninstalled transforms mutex is held, skipping" << std::endl; + SVDEBUG << "TransformFactory::search: Uninstalled transforms mutex is held, skipping" << endl; m_populatingSlowly = false; return results; } diff -r 4efa7429cd85 -r c10cb8782576 transform/transform.pro --- a/transform/transform.pro Mon Nov 29 12:45:39 2010 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -TEMPLATE = lib - -SV_UNIT_PACKAGES = vamp vamp-hostsdk -load(../prf/sv.prf) - -CONFIG += sv staticlib qt thread warn_on stl rtti exceptions -QT += xml - -TARGET = svtransform - -DEPENDPATH += . .. -INCLUDEPATH += . .. -OBJECTS_DIR = tmp_obj -MOC_DIR = tmp_moc - -# Input -HEADERS += CSVFeatureWriter.h \ - FeatureExtractionModelTransformer.h \ - FeatureWriter.h \ - FileFeatureWriter.h \ - RealTimeEffectModelTransformer.h \ - Transform.h \ - TransformDescription.h \ - TransformFactory.h \ - ModelTransformer.h \ - ModelTransformerFactory.h -SOURCES += CSVFeatureWriter.cpp \ - FeatureExtractionModelTransformer.cpp \ - FileFeatureWriter.cpp \ - RealTimeEffectModelTransformer.cpp \ - Transform.cpp \ - TransformFactory.cpp \ - ModelTransformer.cpp \ - ModelTransformerFactory.cpp