select_lefff.bib
@inproceedings{TolVoyLec10LCG,
author = {Tolone, Elsa and Voyatzi, Stavroula and Lecl\`ere, Christian},
title = {{Constructions d\'efinitoires des tables du Lexique-Grammaire}},
booktitle = {Actes du 29\`eme Colloque international sur le Lexique et la Grammaire (LGC'10)},
address = {Belgrade, Serbie},
xorganizer = {Universit\'e de Belgrade, Serbie},
editor = {Ljubomir Popovi\'c and Cvetana Krstev and Du{\v s}ko Vitas and Gordana Pavlovi\'c-La{\v z}eti\'c and Ivan Obradovi\'c},
year = {2010},
pages = {321--331},
days = {14-18},
month = sep,
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/clg10bases.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/clg10bases.pdf},
xhal = {http://hal.archives-ouvertes.fr/hal-00515301},
xarxiv = {http://arxiv.org/abs/1009.1117},
slides = {http://www-igm.univ-mlv.fr/~tolone/publi/clg10bases_slides.pdf},
xslides = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/clg10bases_slides.pdf},
xsite = {http://lgc2010.matf.bg.ac.rs/},
xkeywords = {Natural Language Processing, syntactic lexicon, Lexicon-Grammar},
lang = {FR},
xabstract = {Lexicon-Grammar tables are a very rich syntactic lexicon for the French language. This
linguistic database is nevertheless not directly suitable for use by computer programs, as it is incomplete
and lacks consistency. Tables are defined on the basis of features which are not explicitly recorded in the
lexicon. These features are only described in literature. Our aim is to define for each tables these essential
properties to make them usable in various Natural Language Processing (NLP) applications, such as parsing.}
}
@article{Tol12TAL,
author = {Tolone, Elsa},
title = {{Maintenance du Lexique-Grammaire : Formules d\'efinitoires et arbre de classement}},
booktitle = {Ressources Linguistiques Libres},
journal = {Traitement Automatique des Langues (T.A.L.)},
publisher = {ATALA},
editor = {N\'uria Bel and Ben\^it Sagot},
year = {2012},
volume = {52},
number = {3},
pages = {},
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/tal12.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/tal12.pdf},
xhal = {http://hal.archives-ouvertes.fr/},
title = {{Evaluating and improving syntactic lexica by plugging them within a parser}},
booktitle = {Proceedings of the 8th Language Resources and Evaluation Conference (LREC'12)},
address = {Istanbul, Turkey},
xorganizer = {ELRA},
xeditor = {},
year = {2012},
pages = {},
days = {21-27},
month = may,
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/lrec12lglx.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lrec12lglex.pdf},
xonline = {www.lrec-conf.org/proceedings/lrec2012/summaries/525.html},
xhal = {http://hal.archives-ouvertes.fr/hal-00786883},
slides = {http://www-igm.univ-mlv.fr/~tolone/publi/lrec12lglex_slides.pdf},
xslides = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lrec12lglex_slides.pdf},
xsite = {http://www.lrec-conf.org/lrec2012/},
xkeywords = {syntactic lexica, parsing, error mining},
lang = {EN},
xabstract = {We present some evaluation results for four French syntactic lexica, obtained through their conversion to the Alexina format used by
the Le\textit{fff} lexicon (Sagot, 2010), and their integration within the large-coverage TAG-based FRMG parser (de La Clergerie, 2005). The
evaluations are run on two test corpora, annotated with two distinct annotation formats, namely EASy/Passage chunks and relations and
CoNLL dependencies. The information provided by the evaluation results provide valuable feedback about the four lexica. Moreover,
when coupled with error mining techniques, they allow us to identify how these lexica might be improved.},
note = {electronic version (8 pp.)}
}
@incollection{ConTol09,
author = {Constant, Matthieu and Tolone, Elsa},
title = {{A generic tool to generate a lexicon for NLP from Lexicon-Grammar tables}},
booktitle = {Actes du 27e Colloque international sur le lexique et la grammaire (L'Aquila, 10-13 septembre 2008). Seconde partie},
series = {Lingue d'Europa e del Mediterraneo, Grammatica comparata},
publisher = {Aracne},
editor = {Michele De Gioia},
xaddress = {Rome, Italie},
year = {2010},
volume = {1},
pages = {79--93},
month = apr,
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/li-constant-tolone-lgc08.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/li-constant-tolone-lgc08.pdf},
xhal = {http://hal.archives-ouvertes.fr/hal-00483662},
xarxiv = {http://arxiv.org/abs/1005.5596},
slides = {http://www-igm.univ-mlv.fr/~tolone/publi/lgc08.pdf},
xslides = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lgc08.pdf},
url = {http://www.aracneeditrice.it/aracneweb/index.php/catalogo/9788854831667-detail.html},
xurl = {http://www.aracneeditrice.it > Catalogo > De Gioia, http://store.aracneeditrice.com/it/libro_new.php?id=3000},
xkeywords = {syntactic lexicon, Lexicon-Grammar, NLP},
lang = {EN},
xabstract = {Lexicon-Grammar tables constitute a large-coverage syntactic lexicon but they cannot be
directly used in Natural Language Processing (NLP) applications because they sometimes rely on implicit
information. In this paper, we introduce \textit{LGExtract}, a generic tool for generating a syntactic
lexicon for NLP from the Lexicon-Grammar tables. It relies on a global table that contains undefined
information and on a unique extraction script including all operations to be performed for all tables.
We also present an experiment that has been conducted to generate a new lexicon of French verbs and
predicative nouns.},
note = {ISBN 978-88-548-3166-7.}
}
@incollection{TolSag11LNAI,
author = {Tolone, Elsa and Sagot, Beno\^it},
title = {{Using Lexicon-Grammar tables for French verbs in a large-coverage parser}},
booktitle = {Human Language Technology. Challenges for Computer Science and Linguistics. 4th Language and Technology Conference, LTC 2009, Pozna\'n, Poland, November 6-8, 2009, Revised Selected Papers},
series = {Lecture Notes in Artificial Intelligence (LNAI)},
publisher = {Springer Verlag},
editor = {Vetulani, Zygmunt},
xaddress = {Pozna\'n, Poland},
year = {2011},
volume = {6562},
pages = {183--191},
month = jun,
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/lnai11lglex.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lnai11lglex.pdf},
xhal = {http://hal.archives-ouvertes.fr/hal-00461895},
slides = {http://www-igm.univ-mlv.fr/~tolone/publi/ltc09lglex_slides.pdf},
xslides = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/ltc09lglex_slides.pdf},
xkeywords = {syntactic lexicon, Lexicon-Grammar, parsing, evaluation},
lang = {EN},
xabstract = {In this paper, we describe the integration of Lexicon-Grammar tables for French verbs in
the large-coverage FRMG parser and the evaluation of the resulting parser. This integration required a
conversion step so as to extract the syntactic information encoded in Lexicon-Grammar tables and represent
it in the NLP lexical formalism used by FRMG, i.e., the Alexina framework (that of the Le\textit{fff} lexicon,
on which the standard version of FRMG relies). We describe the linguistic basis of this conversion process,
and the resulting lexicon. We compare the results of the FRMG parser on the EASy reference corpus depending
on whether it relies on the verb entries of the Le\textit{fff} or those of the converted Lexicon-Grammar verb
tables.},
note = {ISBN 978-3-642-20094-6}
}
@inproceedings{TolSagCle12LREC,
author = {Elsa Tolone and Beno\^it Sagot and \'Eric {de La Clergerie}},
title = {{Evaluating and improving syntactic lexica by plugging them within a parser}},
booktitle = {Proceedings of the the 8th Language Resources and Evaluation Conference (LREC'12)},
address = {Istanbul, Turkey},
xorganizer = {},
xeditor = {},
year = {2012},
pages = {},
days = {21-27},
month = may,
pdf = {http://www-igm.univ-mlv.fr/~tolone/publi/lrec12lglex.pdf},
xpdf = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lrec12lglex.pdf},
xonline = {http://www.lrec-conf.org/proceedings/lrec2012/summaries/525.html},
xhal = {http://hal.archives-ouvertes.fr/hal-00786883},
slides = {http://www-igm.univ-mlv.fr/~tolone/publi/lrec12lglex_slides.pdf},
xslides = {http://infolingu.univ-mlv.fr/Bibliographie/Elsa/lrec12lglex_slides.pdf},
url = {http://www.lrec-conf.org/lrec2012/},
xkeywords = {syntactic lexica, parsing, error mining},
lang = {EN},
xabstract = {We present some evaluation results for four French syntactic lexica, obtained through their conversion to the Alexina format used by
the Le\textit{fff} lexicon (Sagot, 2010), and their integration within the large-coverage TAG-based FRMG parser (de La Clergerie, 2005). The
evaluations are run on two test corpora, annotated with two distinct annotation formats, namely EASy/Passage chunks and relations and
CoNLL dependencies. The information provided by the evaluation results provide valuable feedback about the four lexica. Moreover,
when coupled with error mining techniques, they allow us to identify how these lexica might be improved.},
note = {electronic version (8 pp.)}
}