@book{buttcher_information_2010, title = {Information {Retrieval}: {Implementing} and {Evaluating} {Search} {Engines}}, isbn = {978-0-262-52887-0}, shorttitle = {Information {Retrieval}}, abstract = {Information retrieval is the foundation for modern search engines. This textbook offers an introduction to the core topics underlying modern search technologies, including algorithms, data structures, indexing, retrieval, and evaluation. The emphasis is on implementation and experimentation; each chapter includes exercises and suggestions for student projects. Wumpus -- a multiuser open-source information retrieval system developed by one of the authors and available online -- provides model implementations and a basis for student work. The modular structure of the book allows instructors to use it in a variety of graduate-level courses, including courses taught from a database systems perspective, traditional information retrieval courses with a focus on IR theory, and courses covering the basics of Web retrieval. In addition to its classroom use, Information Retrieval will be a valuable reference for professionals in computer science, computer engineering, and software engineering.}, language = {en}, publisher = {MIT Press}, author = {Büttcher, Stefan and Clarke, Charles L. A. and Cormack, Gordon V.}, year = {2010}, } @inproceedings{clarke_novelty_2008, address = {New York, NY, USA}, series = {{SIGIR} '08}, title = {Novelty and {Diversity} in {Information} {Retrieval} {Evaluation}}, isbn = {978-1-60558-164-4}, url = {http://doi.acm.org/10.1145/1390334.1390446}, doi = {10.1145/1390334.1390446}, abstract = {Evaluation measures act as objective functions to be optimized by information retrieval systems. Such objective functions must accurately reflect user requirements, particularly when tuning IR systems and learning ranking functions. Ambiguity in queries and redundancy in retrieved documents are poorly reflected by current evaluation measures. In this paper, we present a framework for evaluation that systematically rewards novelty and diversity. We develop this framework into a specific evaluation measure, based on cumulative gain. We demonstrate the feasibility of our approach using a test collection based on the TREC question answering track.}, language = {en}, urldate = {2019-01-27}, booktitle = {Proceedings of the 31st {Annual} {International} {ACM} {SIGIR} {Conference} on {Research} and {Development} in {Information} {Retrieval}}, publisher = {ACM}, author = {Clarke, Charles L.A. and Kolla, Maheedhar and Cormack, Gordon V. and Vechtomova, Olga and Ashkan, Azin and Büttcher, Stefan and MacKinnon, Ian}, year = {2008}, pages = {659--666}, }