annotate musicweb.tex @ 13:ebb555f93f05

started MusicWeb section
author alo
date Fri, 29 Apr 2016 23:36:36 +0100
parents ac5876e99049
children 26c3962ac371
rev   line source
alo@0 1 \documentclass{llncs}
alo@0 2 \usepackage{graphicx}
alo@0 3
alo@0 4 \usepackage{amsmath,cite}
alo@0 5 \usepackage{pifont}
alo@0 6 \usepackage{listings}
alo@0 7 \usepackage{courier}
alo@0 8 \usepackage{adjustbox}
alo@0 9 \usepackage{url}
alo@0 10 \usepackage[ngerman,english]{babel}
alo@0 11 \usepackage[utf8]{inputenc}
alo@0 12 \newcommand{\cmark}{\fontsize{14}{14}\textbullet\selectfont}
alo@0 13 \usepackage[usenames,dvipsnames]{color}
alo@0 14
alo@0 15 \lstdefinelanguage{sparql} {
alo@0 16 morekeywords={SELECT, WHERE, a, COUNT, GROUP, ORDER, BY, HAVING, as, DISTINCT, OPTIONAL},
alo@0 17 sensitive=true,
alo@0 18 morecomment=[l][\color{ForestGreen}\bfseries\selectfont]{\#\ },
alo@0 19 moredelim=[s][\color{ForestGreen}\bfseries\selectfont]{?}{\ }, %variable
alo@0 20 moredelim=[s][\color{NavyBlue}\bfseries\selectfont]{@}{\ }, %prefix
alo@0 21 moredelim=[s][\color{OrangeRed}\bfseries\selectfont]{<}{>}, %red URIs
alo@0 22 morestring=[b][\color{ForestGreen}]\", %green strings
alo@0 23 morestring=[b]",
alo@0 24 }
alo@0 25
alo@0 26 \lstdefinestyle{sparql}{
alo@0 27 language=sparql,
alo@0 28 basicstyle=\footnotesize\ttfamily\bfseries,
alo@0 29 % basicstyle=\ttfamily,
alo@0 30 keywordstyle=\color{blue}\bfseries,
alo@0 31 identifierstyle=,
alo@0 32 commentstyle=\color{ForestGreen}\bfseries,
alo@0 33 stringstyle=\color{BlueGreen}\ttfamily,
alo@0 34 showstringspaces=false,
alo@0 35 emph={context},emphstyle=\color{BrickRed}\bfseries,
alo@0 36 frame = lines,
alo@0 37 %aboveskip = 15pt,
alo@0 38 %framexbottommargin = 5pt,
alo@0 39 %framextopmargin = 5pt,
alo@0 40 aboveskip = 10pt,
alo@0 41 framexbottommargin = 2pt,
alo@0 42 framextopmargin = 2pt,
alo@0 43 captionpos=b,
alo@0 44 escapechar=~,
alo@0 45 breaklines=true,
alo@0 46 breakatwhitespace=true,
alo@0 47 breakindent=20pt,
alo@0 48 xleftmargin=5pt,
alo@0 49 xrightmargin=5pt,
alo@0 50 basewidth={0.55em,0.45em},
alo@0 51 %basewidth={0.50em,0.6em},
alo@0 52 fontadjust=true,
alo@0 53 % lineskip={-1.5pt},
alo@0 54 % columns=[c]spaceflexible, %make the long artist URI fit
alo@0 55 %float = h!,
alo@0 56 }
alo@0 57
alo@0 58 \renewcommand{\lstlistingname}{{\bf Listing}}
alo@0 59
alo@0 60
alo@0 61 \lstdefinelanguage{ttl} {
alo@0 62 morekeywords={a},
alo@0 63 sensitive=true,
alo@0 64 % morecomment=[l][\color{ForestGreen}\bfseries\fontsize{10}{2}\selectfont]{\#\ },
alo@0 65 % moredelim=[s][\color{ForestGreen}\bfseries\fontsize{10}{2}\selectfont]{\#\ }{\ },
alo@0 66 % moredelim=[s][\color{NavyBlue}\bfseries\fontsize{10}{2}\selectfont]{@}{\ }, %prefix
alo@0 67 % moredelim=[s][\color{OrangeRed}\bfseries\fontsize{10}{2}\selectfont]{<}{>}, %red URIs
alo@0 68 morecomment=[l][\color{ForestGreen}\ttfamily\bfseries\selectfont]{\#\ },
alo@0 69 moredelim=[s][\color{NavyBlue}\ttfamily\bfseries\selectfont]{@}{\ }, %prefix
alo@0 70 moredelim=[s][\color{OrangeRed}\ttfamily\bfseries\selectfont]{<}{>}, %red URIs
alo@0 71 morestring=[b][\color{ForestGreen}]\", %green strings
alo@0 72 morestring=[b]",
alo@0 73 }
alo@0 74
alo@0 75 \lstdefinestyle{ttl}{
alo@0 76 language=ttl,
alo@0 77 basicstyle=\footnotesize\ttfamily\bfseries,
alo@0 78 keywordstyle=\color{blue}\bfseries,
alo@0 79 identifierstyle=,
alo@0 80 commentstyle=\color{ForestGreen}\bfseries,
alo@0 81 stringstyle=\color{BlueGreen}\bfseries,
alo@0 82 showstringspaces=false,
alo@0 83 emph={context},emphstyle=\color{BrickRed}\bfseries,
alo@0 84 frame = lines,
alo@0 85 framexbottommargin = 5pt,
alo@0 86 framextopmargin = 5pt,
alo@0 87 captionpos=b,
alo@0 88 escapechar=~,
alo@0 89 breaklines=true,
alo@0 90 breakatwhitespace=true,
alo@0 91 breakindent=20pt,
alo@0 92 aboveskip = 15pt,
alo@0 93 xleftmargin=5pt,
alo@0 94 xrightmargin=5pt,
alo@0 95 basewidth={0.55em,0.45em},
alo@0 96 % basewidth={0.50em,0.6em},
alo@0 97 fontadjust=true,
alo@0 98 lineskip={-1.5pt},
alo@0 99 % columns=[c]spaceflexible, %make the long artist URI fit
alo@0 100 % float = h!,
alo@0 101 }
alo@0 102
alo@0 103 \lstdefinelanguage{json} {
alo@0 104 sensitive=true,
alo@0 105 %morecomment=[l]{@},
alo@0 106 moredelim=[s][\color{ForestGreen}]{?}{\ }, %green variables
alo@0 107 moredelim=[s][\color{OrangeRed}\fontsize{7}{2}\selectfont]{<}{>}, %red URIs
alo@0 108 morestring=[b][\color{Gray}]\",
alo@0 109 morestring=[b]",
alo@0 110 }
alo@0 111
alo@0 112 \lstdefinestyle{json}{
alo@0 113 language=json,
alo@0 114 keywords={type,context},
alo@0 115 basicstyle=\fontsize{7}{8}\bfseries\ttfamily,
alo@0 116 keywordstyle=\color{blue}\bfseries,
alo@0 117 identifierstyle=,
alo@0 118 commentstyle=\color{Gray}\bfseries,
alo@0 119 stringstyle=\color{OliveGreen}\ttfamily,
alo@0 120 showstringspaces=false,
alo@0 121 emph={context},emphstyle=\color{BrickRed}\bfseries,
alo@0 122 frame = lines,
alo@0 123 framexbottommargin = 5pt,
alo@0 124 framextopmargin = 5pt,
alo@0 125 tabsize=4,
alo@0 126 captionpos=b,
alo@0 127 escapechar=~,
alo@0 128 breaklines=false,
alo@0 129 xleftmargin=5pt,
alo@0 130 xrightmargin=5pt,
alo@0 131 basewidth={0.50em,0.45em},
alo@0 132 %basewidth={0.50em,0.6em},
alo@0 133 fontadjust=true,
alo@0 134 columns=[c]spaceflexible, %make the long artist URI fit
alo@0 135 float = ht,
alo@0 136 }
alo@0 137
alo@0 138 \renewcommand{\labelitemi}{$\bullet$}
alo@0 139
alo@0 140 \begin{document}
alo@0 141
alo@1 142 \mainmatter
alo@1 143
alo@3 144 \title{MusicWeb: an open linked semantic platform for music metadata}
alo@1 145
alo@1 146 \author{Mariano Mora-Mcginity \and Alo Allik \and Gy\"orgy Fazekas \and Mark Sandler }
alo@1 147 %
alo@1 148
alo@1 149 \institute{Queen Mary University of London, \\
alo@1 150 \email{\{m.mora-mcginity, a.allik, g.fazekas, mark.sandler\}@qmul.ac.uk}}
alo@1 151
alo@1 152 \maketitle
alo@0 153
alo@1 154 \begin{abstract}
alo@1 155
gyorgyf@2 156 % MusicWeb is a web site that provides users a browsing, searching and linking platform of music artist and group information by integrating open linked semantic metadata from various Semantic Web, music recommendation and social media data sources, including DBpedia.org, sameas.org, MusicBrainz, the Music Ontology, Last.FM, Youtube, and Echonest. The front portal includes suggested links to selected artists and a search functionality from where users can navigate to individual artists pages. Each artist page contains a biography, links to online audio and a video player with a side menu displaying a selection of Youtube videos. Further it provides lists of YAGO categories linking each artist to other artists by various commonalities such as style, geographical location, instrumentation, record label as well as more obscure categories, for example, artists who have received the same award, have shared the same fate, or belonged to the same organisation or religion. The artist connections are further enhanced by thematic analysis of journal articles and blog posts as well as content-based music information retrieval similarity measures.
gyorgyf@2 157
gyorgyf@2 158 This paper presents MusicWeb, a novel platform for linking music artists within a web-based application for discovering connections between them. MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources including DBpedia.org, sameas.org, MusicBrainz, the Music Ontology, Last.FM and Youtube as well as content-derived information. The front portal includes suggested links to selected artists and a search functionality from where users can navigate to individual artists pages. Each artist page contains a biography and links to online audio and a video resources. Connections are made using YAGO categories linking artist by various commonalities such as style, geographical location, instrumentation, record label as well as more obscure categories, for instance, artists who have received the same award, have shared the same fate, or belonged to the same organisation or religion. These connections are further enhanced by thematic analysis of journal articles and blog posts as well as content-based similarity measures focussing on high level musical categories.
alo@0 159
alo@1 160 \keywords{Semantic Web, Linked Open Data, music metadata, semantic audio analysis, music information retrieval }
alo@1 161 \end{abstract}
alo@1 162
alo@0 163 \section{Introduction}\label{sec:introduction}
mariano@10 164 In recent years we have witnessed an explosion of information, a consequence of millions of users producing and consuming web resources. Researchers and industry have recognised the potential of this data, and have endeavoured to develop methods to handle such a vast amount of information: to understand and manage it, to transform into knowledge. Multimedia content providers have devoted a lot of energy to analysing consumer preference, in an effort to offer customised user experiences. Music stream services, for instance, carry out extensive analysis trying to identify patterns in user's listening habits, and researchers are striving to refine multimedia recommendation algorithms. There are two main approaches to music recommendation\cite{Song2012}: the first is known as \emph{collaborative filtering}\cite{Su2009}, which recommends music items based on the choices of similar users. The second model is based on audio content analysis, or \emph{music information retrieval}. The task here is to extract low to high-level audio features such as tempo, key, metric structure, melodic and harmonic sequences, instrument recognition and song segmentation, which are then used to measure music similarity\cite{Aucoutourier2002}, to carry out genre classification or to identify the mood of the song\cite{Kim2010}. Music discovery websites such as Last.fm\footnote{http://www.last.fm}, Allmusic\footnote{http://www.allmusic.com} or Pandora\footnote{http://www.pandora.com} have successfully developed hybrid systems which combine both approaches.
mariano@10 165 There are, however, limitations in both approaches to music recommendation. Most users participating in (or whose data is used to analyse) collaborative filtering listen to a very small percentage of the music available, the so called ``short-tail'', whereas the much larger ``long-tail'' remains mainly unknown\cite{Celma2010}. These systems will show a bias towards music that is already consumed by many listeners. Suggesting already popular music will increase the likelihood of it being recommended to new users, thus creating a \emph{rich-club phenomenon}\cite{Zhou2004} or what is known as \emph{cumulative advantage}. Also, content analysis of audio features is mainly applied to songs: systems can recommend similar tracks, but generally know nothing about similar artists. Many music listeners follow artists because of their style and would be interested in music from similar artists. It is very hard to pinpoint what exactly makes two artists ``similar'': very often notions of similarity are based on social and cultural issues, rather than a precise definition of style.
mariano@10 166 To many music lovers discovering new music, or music they weren't aware of, is an integral part of enjoying a musical experience, and appreciate expanding their musical taste. Lee and Price\cite{Lee2015} identify seven different \emph{personas} which tipify music service consumption. Two such personas, for instance, the ``active curator'' and the ``music epicurean'' characteristically spend a long time hunting for new music, whereas the ``wanderer'' enjoys the discovery process itself, trying out new things with an open mind. Automatic musical discovery is a very challenging problem\cite{Jennings2007}. There are many different ways in which people are attracted to new artists: word of mouth, their network of friends, music magazines or blogs, songs heard in a movie or a T.V. commercial, they might be interested in a musician who has played with another artist or been mentioned as an influence, etc. The route from listening to one artist and discovering a new one would sometimes seem very disconcerting were it to be drawn on paper. A listener is not so much following a map as exploring new territory, with many possible forks and shortcuts. Music discovery systems generally disregard this kind of information, often because it is very nuanced and difficult to parse and interpret. All these sources of information are in fact music metadata, data about the music data itself. Pachet identifies three types of musical metadata \cite{Pachet2005}:
mariano@9 167 \begin{enumerate}
mariano@9 168 \item Editorial metadata: information that is provided manually by authoritative experts. There is a wide range of potential producers of this kind of data, from record labels to collaborative schemes, as well as different kinds of data, from which musician played in which song to tour info, to artists' biography.
mariano@9 169 \item Cultural metadata: information which is produced by the environment or culture. This is data that is not explicitly entered into some information system, but rather is contained, and must be extracted from, other information sources, such as user trends, google searches, articles and magazines, word associations in blogs, etc.
mariano@9 170 \item Acoustic metadata: data extracted from audio files using music information retrieval methods.
mariano@9 171 \end{enumerate}
mariano@10 172 MusicWeb is an application which offers the user the possibility of exploring editorial, cultural and musical links between artists. It gathers, extracts and manages musical metadata from many different sources and connects them in informative ways. This paper deals with the different ways in which musicweb collects these resources and shapes them into high-level information. We will first review various knowledege-based web resources available to musicweb. We will then introduce the application itself and detail the architecture to analyse and extract data. Before the final conclusions and discussion of future work we will analyse the experience of interfacing with the application and how users can explore and discover new musical paths.
mariano@8 173
mariano@5 174
mariano@8 175
mariano@8 176
mariano@4 177 \section{Background}\label{sec:background}
mariano@4 178 \begin{itemize}
mariano@10 179 \item related work
mariano@10 180 \item very brief intro to the role of music related data sources on the web and what they are
mariano@10 181 \end{itemize}
alo@0 182
alo@13 183 \section{MusicWeb architecture}
mariano@10 184
alo@13 185 MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources as well as content-derived information. The front portal includes suggested links to selected artists and a search functionality from where users can navigate to individual artists pages. Each artist page contains a biography and links to online audio and a video resources. Connections are made using YAGO categories linking artist by various commonalities such as style, geographical location, instrumentation, record label as well as more obscure categories, for instance, artists who have received the same award, have shared the same fate, or belonged to the same organisation or religion. These connections are further enhanced by thematic analysis of journal articles and blog posts as well as content-based similarity measures focussing on high level musical categories.
alo@12 186
alo@12 187 \begin{figure}[!ht]
alo@12 188 \centering
alo@12 189 \includegraphics[scale=0.5]{graphics/architecture.pdf}%\vspace{-5pt}
alo@12 190 \caption{MusicWeb architecture}\vspace{-10pt}
alo@12 191 \label{fig:layers}
alo@12 192 \end{figure}
alo@12 193
alo@13 194 The MusicWeb API uses a number of LOD resources and Semantic Web ontologies to process and aggregate information about artists:
alo@13 195
alo@13 196 \begin{itemize}
alo@13 197
alo@13 198 \item[] \textbf{Musicbrainz} is an online, open, crowd-sourced music encyclopedia, that provides reliable and unambiguous identifiers for entities in music publishing metadata, including artists, releases, recordings, performances, etc. Besides the identifiers, which facilitate artist linking, the artist search functionality of MusicWeb relies on the Web services provided by MusicBrainz.
alo@13 199
alo@13 200 \item[] \textbf{DBPedia} is a crowd-sourced community effort to extract structured information from Wikipedia and make it available on the Web. MusicWeb constructs the majority of an artist profile from this resource, including the biography and most of the linking categories to other artists.
alo@13 201
alo@13 202 \item[] \textbf{Sameas.org} manages URI co-references on Web of Data, which is useful when a MusicBrainz artist identifier needs to be associated with the equivalent DBpedia resource.
alo@13 203
alo@13 204 \item[] \textbf{Youtube} API is used to query associated video content for the artist panel.
alo@13 205
alo@13 206 \item[] \textbf{Echonest} was a music metadata and information retrieval platform for developers and media companies, which has since been integrated into Spotify. The Echonest API is used for recommendations in MusicWeb.
alo@13 207
alo@13 208 \item[] \textbf{Last.fm} is an online music social network and recommender system that collects information about users listeing habits and makes available crowd-sourced tagging data through an API. MusicWeb uses Last.fm recommendation engine to enrich the user experience.
alo@13 209
alo@13 210 \item[] \textbf{YAGO} is a semantic knowledge base that collates information and structure from Wikipedia, WordNet and GeoNames with high accuracy\cite{Suchanek:WWW:2007}. The ontology makes use of the categories defined in Wikipedia as a principle for semantic linking of entities, while exploiting the clean taxonomy of concepts from WordNet.
alo@13 211
alo@13 212 \item[] \textbf{the Music Ontology}
alo@13 213
alo@13 214 \end{itemize}
alo@13 215
alo@13 216 User search request accesses MusicBrainz search services that return a list of MusicBrainz artist identifiers. The MusicWeb API architecture relies on sameas.org co-references to associate these identifiers to DBpedia artist links.
alo@13 217
mariano@10 218 %% - Brief description of what it is and what it does
mariano@10 219 %% - Architecture (with a nice diagram) [Alo, can you make this in Omnigraffle? I can then adjust/refine]
mariano@10 220 %% - More details about individual components we use (Yago, musicbrainz, sameas, dbpedia etc.)
mariano@10 221 %% - Brief intro to components we developed for artist similarity (just to bridge to Section 4)
mariano@10 222
mariano@10 223 \section{Artist similarity}
mariano@10 224
mariano@10 225 \begin{enumerate}
mariano@10 226 \item Socio-cultural linkage (using linked data)
mariano@10 227 \item Artist similarity by NLP [needs a better subtitle] : MUSIC (picture of interface)
mariano@10 228
mariano@4 229 \begin{itemize}
mariano@4 230 \item Semantic analysis\cite{Landauer1998}
mariano@4 231 \item Topic modeling\cite{Blei2012}
mariano@4 232 \item Entity recognition
mariano@4 233 \item Hierarchical bayesian modeling
mariano@4 234 \item Authors, journals, keywords, tags
mariano@4 235
mariano@4 236 \end{itemize}
mariano@10 237
mariano@10 238 \item Artist similarity by features [i can write this part]
mariano@10 239 \end{enumerate}
mariano@10 240
alo@1 241 \section{Content-based information retrieval}\label{sec:mir}
alo@0 242
alo@0 243 \section{Discussion}\label{sec:discussion}
alo@0 244
alo@0 245 \section{Conclusions}\label{sec:conclusions}
alo@0 246
alo@0 247 %
alo@0 248 % ---- Bibliography ----
alo@0 249 %
alo@0 250 \vspace{-1em}\begin{thebibliography}{5}
alo@0 251 %
alo@0 252
mariano@9 253
mariano@8 254 \bibitem{Song2012}
mariano@6 255 Y.~Song, S.~Dixon and M.~Pearce.
mariano@6 256 \newblock A survey of music recommendation systems and future perspectives
mariano@6 257 \newblock In {\em Proceedings of the 9th International Symposium on Computer Music Modelling and Retrieval}, 2012.
mariano@9 258
mariano@8 259 \bibitem{Su2009}
mariano@8 260 X.~Su and T. M. ~Khoshgoftaar.
mariano@8 261 \newblock A Survey of Collaborative Filtering Techniques.
mariano@8 262 \newblock In {\em Advances in Artificial Intelligence,(Section 3):1–19}, 2009.
mariano@9 263
mariano@9 264 \bibitem{Aucoutourier2002}
mariano@9 265 J. J.~Aucouturier and F~Pachet.
mariano@9 266 \newblock Music Similarity Measures: What is the Use.
mariano@9 267 \newblock In {\em Proceedings of the ISMIR, pages 157–163}, 2002.
mariano@9 268
mariano@9 269 \bibitem{Kim2010}
mariano@9 270 Y.E.~Kim, E.M.~Schmidt, R.~Migneco, B.G.~Morton, P.~Richardson, J.~Scott, J.A.~Speck and D.~Turnbull.
mariano@9 271 \newblock Music Emotion Recognition: A State of the Art Review.
mariano@9 272 \newblock In {\em Proc. of the 11th Intl. Society for Music Information Retrieval (ISMIR) Conf}, 2010.
mariano@6 273
mariano@6 274 \bibitem{Celma2010}
mariano@10 275 \`O.~Celma
mariano@6 276 \newblock Music Recommendation and Discovery:The Long Tail, Long Fail, and Long Play in the Digital Music Space.
mariano@6 277 \newblock Springer Verlag, Heidelberg, 2010.
mariano@8 278
mariano@9 279 \bibitem{Zhou2004}
mariano@9 280 S.~Zhou and R. J.~Mondrag\'on
mariano@9 281 \newblock The rich-club phenomenon in the Internet topology
mariano@9 282 \newblock In {\em Communications Letters, IEEE}, 2004
mariano@8 283
mariano@10 284 \bibitem{Lee2015}
mariano@10 285 J. H.~Lee and R.~Price
mariano@10 286 \newblock Understanding users of commercial music services through personas: design implications.
mariano@10 287 \newblock In {\em Proceedings of the 16th ISMIR Conference}, M\'alaga, Spain, 2015
mariano@10 288
mariano@10 289 \bibitem{Jennings2007}
mariano@9 290 D.~Jennings.
mariano@9 291 \newblock Net, Blogs and Rock ’n’ Rolls: How Digital Discovery Works and What It Means for Consumers.
mariano@9 292 \newblock Nicholas Brealey Pub., 2007
mariano@10 293
mariano@9 294
mariano@9 295 \bibitem{Pachet2005}
mariano@9 296 F.~Pachet
mariano@9 297 \newblock Knowledge management and musical metadata.
mariano@9 298 \newblock In {\em Encyclopedia of Knowledge Management}, Schwartz, D. Ed. Idea Group, 2005
mariano@8 299
mariano@8 300 \bibitem{FazekasRJS10_OMRAS2}
mariano@6 301 G.~Fazekas, Y.~Raimond, K.~Jakobson, and M.~Sandler.
mariano@6 302 \newblock An overview of semantic web activities in the {OMRAS2} project.
mariano@6 303 \newblock {\em Journal of New Music Research (JNMR)}, 39(4), 2010.
mariano@6 304
mariano@6 305 \bibitem{Porter:ISMIR:15}
mariano@6 306 A.~Porter, D.~Bogdanov, R.~Kaye, R.~Tsukanov, and X.~Serra.
mariano@6 307 \newblock Acousticbrainz: a community platform for gathering music information
mariano@6 308 obtained from audio.
mariano@6 309 \newblock In {\em 16th International Society for Music Information Retrieval
alo@0 310 (ISMIR) Conference}, 2015.
alo@0 311
alo@0 312 \bibitem{DBLP:conf/ismir/RaimondASG07}
alo@0 313 Y~Raimond, S.~Abdallah, M.~Sandler, and F.~Giasson.
alo@0 314 \newblock The music ontology.
alo@0 315 \newblock In {\em Proceedings of the 8th International Conference on Music
alo@0 316 Information Retrieval, ISMIR 2007, Vienna, Austria, September 23-27}, 2007.
mariano@4 317
alo@12 318 \bibitem{Suchanek:WWW:2007}
alo@12 319 F.~Suchanek, G.~Kasneci, and G.~Weikum.
alo@12 320 \newblock YAGO: A Core of Semantic Knowledge Unifying WordNet and Wikipedia.
alo@12 321 \newblock In {\em Proceedings of the 16th international World Wide Web conference, May 8–12, 2007, Banff, Alberta, Canada.}, 2007.
mariano@6 322
mariano@6 323 \bibitem{Landauer1998}
mariano@4 324 T.~Landauer, P.~Folt, and D.~Laham.
mariano@4 325 \newblock An introduction to latent semantic analysis
mariano@4 326 \newblock In {\em Discourse processes, 25}, 1998
mariano@4 327
mariano@4 328 \bibitem{Blei2012}
mariano@4 329 D.~Blei, A.~ Ng, and M.I.~Jordan.
mariano@4 330 \newblock Latent Dirichlet Allocation.
mariano@4 331 \newblock In {\em Journal of Machine Learning Research, 3(4-5), 993–1022}, 2012
mariano@6 332
mariano@6 333
alo@0 334 \end{thebibliography}
alo@0 335
alo@0 336 \end{document}