From fd519b0e45c857b266814994ba8c1421f508e522 Mon Sep 17 00:00:00 2001 From: redpony Date: Tue, 27 Jul 2010 16:13:19 +0000 Subject: preso git-svn-id: https://ws10smt.googlecode.com/svn/trunk@435 ec762483-ff6d-05da-a07a-a48fb63a330f --- .../acl09-short/common-bak/acl-ijcnlp2009.sty | 368 +++ .../pyp_clustering/acl09-short/common-bak/acl.bst | 1322 ++++++++++ .../acl09-short/common-bak/acl08.sty | 344 +++ .../acl09-short/common-bak/algorithmicx.sty | 786 ++++++ .../acl09-short/common-bak/algpseudocode.sty | 92 + .../acl09-short/common-bak/hyphen.sty | 23 + .../acl09-short/common-bak/jeffe.sty | 566 ++++ .../acl09-short/common-bak/prettyref.sty | 37 + .../acl09-short/common-bak/scrunchacl.bst | 1317 ++++++++++ .../acl09-short/common-bak/standard.bib | 2702 ++++++++++++++++++++ 10 files changed, 7557 insertions(+) create mode 100644 report/pyp_clustering/acl09-short/common-bak/acl-ijcnlp2009.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/acl.bst create mode 100644 report/pyp_clustering/acl09-short/common-bak/acl08.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/algorithmicx.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/algpseudocode.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/hyphen.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/jeffe.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/prettyref.sty create mode 100644 report/pyp_clustering/acl09-short/common-bak/scrunchacl.bst create mode 100644 report/pyp_clustering/acl09-short/common-bak/standard.bib (limited to 'report/pyp_clustering/acl09-short/common-bak') diff --git a/report/pyp_clustering/acl09-short/common-bak/acl-ijcnlp2009.sty b/report/pyp_clustering/acl09-short/common-bak/acl-ijcnlp2009.sty new file mode 100644 index 00000000..927779a6 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/acl-ijcnlp2009.sty @@ -0,0 +1,368 @@ +% File acl-ijcnlp2009.sty +% adapted from -- +% File eacl2006.sty +% September 19, 2005 +% Contact: e.agirre@ehu.es or Sergi.Balari@uab.es + +% This is the LaTeX style file for EACL 2006. It is nearly identical to the +% style files for ACL2005, ACL 2002, ACL 2001, ACL 2000, EACL 95 and EACL +% 99. +% +% Changes made include: adapt layout to A4 and centimeters, widden abstract + +% This is the LaTeX style file for ACL 2000. It is nearly identical to the +% style files for EACL 95 and EACL 99. Minor changes include editing the +% instructions to reflect use of \documentclass rather than \documentstyle +% and removing the white space before the title on the first page +% -- John Chen, June 29, 2000 + +% To convert from submissions prepared using the style file aclsub.sty +% prepared for the ACL 2000 conference, proceed as follows: +% 1) Remove submission-specific information: \whichsession, \id, +% \wordcount, \otherconferences, \area, \keywords +% 2) \summary should be removed. The summary material should come +% after \maketitle and should be in the ``abstract'' environment +% 3) Check all citations. This style should handle citations correctly +% and also allows multiple citations separated by semicolons. +% 4) Check figures and examples. Because the final format is double- +% column, some adjustments may have to be made to fit text in the column +% or to choose full-width (\figure*} figures. +% 5) Change the style reference from aclsub to acl2000, and be sure +% this style file is in your TeX search path + + +% This is the LaTeX style file for EACL-95. It is identical to the +% style file for ANLP '94 except that the margins are adjusted for A4 +% paper. -- abney 13 Dec 94 + +% The ANLP '94 style file is a slightly modified +% version of the style used for AAAI and IJCAI, using some changes +% prepared by Fernando Pereira and others and some minor changes +% by Paul Jacobs. + +% Papers prepared using the aclsub.sty file and acl.bst bibtex style +% should be easily converted to final format using this style. +% (1) Submission information (\wordcount, \subject, and \makeidpage) +% should be removed. +% (2) \summary should be removed. The summary material should come +% after \maketitle and should be in the ``abstract'' environment +% (between \begin{abstract} and \end{abstract}). +% (3) Check all citations. This style should handle citations correctly +% and also allows multiple citations separated by semicolons. +% (4) Check figures and examples. Because the final format is double- +% column, some adjustments may have to be made to fit text in the column +% or to choose full-width (\figure*} figures. + +% Place this in a file called aclap.sty in the TeX search path. +% (Placing it in the same directory as the paper should also work.) + +% Prepared by Peter F. Patel-Schneider, liberally using the ideas of +% other style hackers, including Barbara Beeton. +% This style is NOT guaranteed to work. It is provided in the hope +% that it will make the preparation of papers easier. +% +% There are undoubtably bugs in this style. If you make bug fixes, +% improvements, etc. please let me know. My e-mail address is: +% pfps@research.att.com + +% Papers are to be prepared using the ``acl'' bibliography style, +% as follows: +% \documentclass[11pt]{article} +% \usepackage{acl2000} +% \title{Title} +% \author{Author 1 \and Author 2 \\ Address line \\ Address line \And +% Author 3 \\ Address line \\ Address line} +% \begin{document} +% ... +% \bibliography{bibliography-file} +% \bibliographystyle{acl} +% \end{document} + +% Author information can be set in various styles: +% For several authors from the same institution: +% \author{Author 1 \and ... \and Author n \\ +% Address line \\ ... \\ Address line} +% if the names do not fit well on one line use +% Author 1 \\ {\bf Author 2} \\ ... \\ {\bf Author n} \\ +% For authors from different institutions: +% \author{Author 1 \\ Address line \\ ... \\ Address line +% \And ... \And +% Author n \\ Address line \\ ... \\ Address line} +% To start a seperate ``row'' of authors use \AND, as in +% \author{Author 1 \\ Address line \\ ... \\ Address line +% \AND +% Author 2 \\ Address line \\ ... \\ Address line \And +% Author 3 \\ Address line \\ ... \\ Address line} + +% If the title and author information does not fit in the area allocated, +% place \setlength\titlebox{} right after +% \usepackage{acl2000} +% where can be something larger than 2.25in + +% \typeout{Conference Style for ACL 2000 -- released June 20, 2000} +\typeout{Conference Style for ACL 2005 -- released Octobe 11, 2004} + +% NOTE: Some laser printers have a serious problem printing TeX output. +% These printing devices, commonly known as ``write-white'' laser +% printers, tend to make characters too light. To get around this +% problem, a darker set of fonts must be created for these devices. +% + +%% % Physical page layout - slightly modified from IJCAI by pj +%% \setlength\topmargin{0.0in} \setlength\oddsidemargin{-0.0in} +%% \setlength\textheight{9.0in} \setlength\textwidth{6.5in} +%% \setlength\columnsep{0.2in} +%% \newlength\titlebox +%% \setlength\titlebox{2.25in} +%% \setlength\headheight{0pt} \setlength\headsep{0pt} +%% %\setlength\footheight{0pt} +%% \setlength\footskip{0pt} +%% \thispagestyle{empty} \pagestyle{empty} +%% \flushbottom \twocolumn \sloppy + +%% Original A4 version of page layout +%% \setlength\topmargin{-0.45cm} % changed by Rz -1.4 +%% \setlength\oddsidemargin{.8mm} % was -0cm, changed by Rz +%% \setlength\textheight{23.5cm} +%% \setlength\textwidth{15.8cm} +%% \setlength\columnsep{0.6cm} +%% \newlength\titlebox +%% \setlength\titlebox{2.00in} +%% \setlength\headheight{5pt} +%% \setlength\headsep{0pt} +%% \setlength\footheight{0pt} +%% \setlength\footskip{0pt} +%% \thispagestyle{empty} +%% \pagestyle{empty} + +% A4 modified by Eneko +\setlength{\paperwidth}{21cm} % A4 +\setlength{\paperheight}{29.7cm}% A4 +\setlength\topmargin{-0.5cm} +\setlength\oddsidemargin{0cm} +\setlength\textheight{24.7cm} +\setlength\textwidth{16.0cm} +\setlength\columnsep{0.6cm} +\newlength\titlebox +\setlength\titlebox{2.00in} +\setlength\headheight{5pt} +\setlength\headsep{0pt} +\thispagestyle{empty} +\pagestyle{empty} + + +\flushbottom \twocolumn \sloppy + +% We're never going to need a table of contents, so just flush it to +% save space --- suggested by drstrip@sandia-2 +\def\addcontentsline#1#2#3{} + +% Title stuff, taken from deproc. +\def\maketitle{\par + \begingroup + \def\thefootnote{\fnsymbol{footnote}} + \def\@makefnmark{\hbox to 0pt{$^{\@thefnmark}$\hss}} + \twocolumn[\@maketitle] \@thanks + \endgroup + \setcounter{footnote}{0} + \let\maketitle\relax \let\@maketitle\relax + \gdef\@thanks{}\gdef\@author{}\gdef\@title{}\let\thanks\relax} +\def\@maketitle{\vbox to \titlebox{\hsize\textwidth + \linewidth\hsize \vskip 0.125in minus 0.125in \centering + {\Large\bf \@title \par} \vskip 0.2in plus 1fil minus 0.1in + {\def\and{\unskip\enspace{\rm and}\enspace}% + \def\And{\end{tabular}\hss \egroup \hskip 1in plus 2fil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf}% + \def\AND{\end{tabular}\hss\egroup \hfil\hfil\egroup + \vskip 0.25in plus 1fil minus 0.125in + \hbox to \linewidth\bgroup\large \hfil\hfil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf} + \hbox to \linewidth\bgroup\large \hfil\hfil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf\@author + \end{tabular}\hss\egroup + \hfil\hfil\egroup} + \vskip 0.3in plus 2fil minus 0.1in +}} + +% margins for abstract +\renewenvironment{abstract}% + {\centerline{\large\bf Abstract}% + \begin{list}{}% + {\setlength{\rightmargin}{0.6cm}% + \setlength{\leftmargin}{0.6cm}}% + \item[]\ignorespaces}% + {\unskip\end{list}} + +%\renewenvironment{abstract}{\centerline{\large\bf +% Abstract}\vspace{0.5ex}\begin{quote}}{\par\end{quote}\vskip 1ex} + + +% bibliography + +\def\thebibliography#1{\section*{References} + \global\def\@listi{\leftmargin\leftmargini + \labelwidth\leftmargini \advance\labelwidth-\labelsep + \topsep 1pt plus 2pt minus 1pt + \parsep 0.25ex plus 1pt \itemsep 0.25ex plus 1pt} + \list {[\arabic{enumi}]}{\settowidth\labelwidth{[#1]}\leftmargin\labelwidth + \advance\leftmargin\labelsep\usecounter{enumi}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy + \sfcode`\.=1000\relax} + +\def\@up#1{\raise.2ex\hbox{#1}} + +% most of cite format is from aclsub.sty by SMS + +% don't box citations, separate with ; and a space +% also, make the penalty between citations negative: a good place to break +% changed comma back to semicolon pj 2/1/90 +% \def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi +% \def\@citea{}\@cite{\@for\@citeb:=#2\do +% {\@citea\def\@citea{;\penalty\@citeseppen\ }\@ifundefined +% {b@\@citeb}{{\bf ?}\@warning +% {Citation `\@citeb' on page \thepage \space undefined}}% +% {\csname b@\@citeb\endcsname}}}{#1}} + +% don't box citations, separate with ; and a space +% Replaced for multiple citations (pj) +% don't box citations and also add space, semicolon between multiple citations +\def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi + \def\@citea{}\@cite{\@for\@citeb:=#2\do + {\@citea\def\@citea{; }\@ifundefined + {b@\@citeb}{{\bf ?}\@warning + {Citation `\@citeb' on page \thepage \space undefined}}% + {\csname b@\@citeb\endcsname}}}{#1}} + +% Allow short (name-less) citations, when used in +% conjunction with a bibliography style that creates labels like +% \citename{, } +% +\let\@internalcite\cite +\def\cite{\def\citename##1{##1, }\@internalcite} +\def\shortcite{\def\citename##1{}\@internalcite} +\def\newcite{\def\citename##1{{\frenchspacing##1} (}\@internalciteb} + +% Macros for \newcite, which leaves name in running text, and is +% otherwise like \shortcite. +\def\@citexb[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi + \def\@citea{}\@newcite{\@for\@citeb:=#2\do + {\@citea\def\@citea{;\penalty\@m\ }\@ifundefined + {b@\@citeb}{{\bf ?}\@warning + {Citation `\@citeb' on page \thepage \space undefined}}% +{\csname b@\@citeb\endcsname}}}{#1}} +\def\@internalciteb{\@ifnextchar [{\@tempswatrue\@citexb}{\@tempswafalse\@citexb[]}} + +\def\@newcite#1#2{{#1\if@tempswa, #2\fi)}} + +\def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} + +%%% More changes made by SMS (originals in latex.tex) +% Use parentheses instead of square brackets in the text. +\def\@cite#1#2{({#1\if@tempswa , #2\fi})} + +% Don't put a label in the bibliography at all. Just use the unlabeled format +% instead. +\def\thebibliography#1{\vskip\parskip% +\vskip\baselineskip% +\def\baselinestretch{1}% +\ifx\@currsize\normalsize\@normalsize\else\@currsize\fi% +\vskip-\parskip% +\vskip-\baselineskip% +\section*{References\@mkboth + {References}{References}}\list + {}{\setlength{\labelwidth}{0pt}\setlength{\leftmargin}{\parindent} + \setlength{\itemindent}{-\parindent}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy\clubpenalty4000\widowpenalty4000 + \sfcode`\.=1000\relax} +\let\endthebibliography=\endlist + +% Allow for a bibliography of sources of attested examples +\def\thesourcebibliography#1{\vskip\parskip% +\vskip\baselineskip% +\def\baselinestretch{1}% +\ifx\@currsize\normalsize\@normalsize\else\@currsize\fi% +\vskip-\parskip% +\vskip-\baselineskip% +\section*{Sources of Attested Examples\@mkboth + {Sources of Attested Examples}{Sources of Attested Examples}}\list + {}{\setlength{\labelwidth}{0pt}\setlength{\leftmargin}{\parindent} + \setlength{\itemindent}{-\parindent}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy\clubpenalty4000\widowpenalty4000 + \sfcode`\.=1000\relax} +\let\endthesourcebibliography=\endlist + +\def\@lbibitem[#1]#2{\item[]\if@filesw + { \def\protect##1{\string ##1\space}\immediate + \write\@auxout{\string\bibcite{#2}{#1}}\fi\ignorespaces}} + +\def\@bibitem#1{\item\if@filesw \immediate\write\@auxout + {\string\bibcite{#1}{\the\c@enumi}}\fi\ignorespaces} + +% sections with less space +\def\section{\@startsection {section}{1}{\z@}{-2.0ex plus + -0.5ex minus -.2ex}{1.5ex plus 0.3ex minus .2ex}{\large\bf\raggedright}} +\def\subsection{\@startsection{subsection}{2}{\z@}{-1.8ex plus + -0.5ex minus -.2ex}{0.8ex plus .2ex}{\normalsize\bf\raggedright}} +%% changed by KO to - values to get teh initial parindent right +\def\subsubsection{\@startsection{subsubsection}{3}{\z@}{-1.5ex plus + -0.5ex minus -.2ex}{0.5ex plus .2ex}{\normalsize\bf\raggedright}} +\def\paragraph{\@startsection{paragraph}{4}{\z@}{1.5ex plus + 0.5ex minus .2ex}{-1em}{\normalsize\bf}} +\def\subparagraph{\@startsection{subparagraph}{5}{\parindent}{1.5ex plus + 0.5ex minus .2ex}{-1em}{\normalsize\bf}} + +% Footnotes +\footnotesep 6.65pt % +\skip\footins 9pt plus 4pt minus 2pt +\def\footnoterule{\kern-3pt \hrule width 5pc \kern 2.6pt } +\setcounter{footnote}{0} + +% Lists and paragraphs +\parindent 1em +\topsep 4pt plus 1pt minus 2pt +\partopsep 1pt plus 0.5pt minus 0.5pt +\itemsep 2pt plus 1pt minus 0.5pt +\parsep 2pt plus 1pt minus 0.5pt + +\leftmargin 2em \leftmargini\leftmargin \leftmarginii 2em +\leftmarginiii 1.5em \leftmarginiv 1.0em \leftmarginv .5em \leftmarginvi .5em +\labelwidth\leftmargini\advance\labelwidth-\labelsep \labelsep 5pt + +\def\@listi{\leftmargin\leftmargini} +\def\@listii{\leftmargin\leftmarginii + \labelwidth\leftmarginii\advance\labelwidth-\labelsep + \topsep 2pt plus 1pt minus 0.5pt + \parsep 1pt plus 0.5pt minus 0.5pt + \itemsep \parsep} +\def\@listiii{\leftmargin\leftmarginiii + \labelwidth\leftmarginiii\advance\labelwidth-\labelsep + \topsep 1pt plus 0.5pt minus 0.5pt + \parsep \z@ \partopsep 0.5pt plus 0pt minus 0.5pt + \itemsep \topsep} +\def\@listiv{\leftmargin\leftmarginiv + \labelwidth\leftmarginiv\advance\labelwidth-\labelsep} +\def\@listv{\leftmargin\leftmarginv + \labelwidth\leftmarginv\advance\labelwidth-\labelsep} +\def\@listvi{\leftmargin\leftmarginvi + \labelwidth\leftmarginvi\advance\labelwidth-\labelsep} + +\abovedisplayskip 7pt plus2pt minus5pt% +\belowdisplayskip \abovedisplayskip +\abovedisplayshortskip 0pt plus3pt% +\belowdisplayshortskip 4pt plus3pt minus3pt% + +% Less leading in most fonts (due to the narrow columns) +% The choices were between 1-pt and 1.5-pt leading +\def\@normalsize{\@setsize\normalsize{11pt}\xpt\@xpt} +\def\small{\@setsize\small{10pt}\ixpt\@ixpt} +\def\footnotesize{\@setsize\footnotesize{10pt}\ixpt\@ixpt} +\def\scriptsize{\@setsize\scriptsize{8pt}\viipt\@viipt} +\def\tiny{\@setsize\tiny{7pt}\vipt\@vipt} +\def\large{\@setsize\large{14pt}\xiipt\@xiipt} +\def\Large{\@setsize\Large{16pt}\xivpt\@xivpt} +\def\LARGE{\@setsize\LARGE{20pt}\xviipt\@xviipt} +\def\huge{\@setsize\huge{23pt}\xxpt\@xxpt} +\def\Huge{\@setsize\Huge{28pt}\xxvpt\@xxvpt} diff --git a/report/pyp_clustering/acl09-short/common-bak/acl.bst b/report/pyp_clustering/acl09-short/common-bak/acl.bst new file mode 100644 index 00000000..4396d4dc --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/acl.bst @@ -0,0 +1,1322 @@ + +% BibTeX `acl' style file for BibTeX version 0.99c, LaTeX version 2.09 +% This version was made by modifying `aaai-named' format based on the master +% file by Oren Patashnik (PATASHNIK@SCORE.STANFORD.EDU) + +% Copyright (C) 1985, all rights reserved. +% Modifications Copyright 1988, Peter F. Patel-Schneider +% Further modifictions by Stuart Shieber, 1991, and Fernando Pereira, 1992. +% Copying of this file is authorized only if either +% (1) you make absolutely no changes to your copy, including name, or +% (2) if you do make changes, you name it something other than +% btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst. +% This restriction helps ensure that all standard styles are identical. + +% There are undoubtably bugs in this style. If you make bug fixes, +% improvements, etc. please let me know. My e-mail address is: +% pfps@spar.slb.com + +% Citation format: [author-last-name, year] +% [author-last-name and author-last-name, year] +% [author-last-name {\em et al.}, year] +% +% Reference list ordering: alphabetical by author or whatever passes +% for author in the absence of one. +% +% This BibTeX style has support for short (year only) citations. This +% is done by having the citations actually look like +% \citename{name-info, }year +% The LaTeX style has to have the following +% \let\@internalcite\cite +% \def\cite{\def\citename##1{##1}\@internalcite} +% \def\shortcite{\def\citename##1{}\@internalcite} +% \def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} +% which makes \shortcite the macro for short citations. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Changes made by SMS for thesis style +% no emphasis on "et al." +% "Ph.D." includes periods (not "PhD") +% moved year to immediately after author's name +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +ENTRY + { address + author + booktitle + chapter + edition + editor + howpublished + institution + journal + key + month + note + number + organization + pages + publisher + school + series + title + type + volume + year + } + {} + { label extra.label sort.label } + +INTEGERS { output.state before.all mid.sentence after.sentence after.block } + +FUNCTION {init.state.consts} +{ #0 'before.all := + #1 'mid.sentence := + #2 'after.sentence := + #3 'after.block := +} + +STRINGS { s t } + +FUNCTION {output.nonnull} +{ 's := + output.state mid.sentence = + { ", " * write$ } + { output.state after.block = + { add.period$ write$ + newline$ + "\newblock " write$ + } + { output.state before.all = + 'write$ + { add.period$ " " * write$ } + if$ + } + if$ + mid.sentence 'output.state := + } + if$ + s +} + +FUNCTION {output} +{ duplicate$ empty$ + 'pop$ + 'output.nonnull + if$ +} + +FUNCTION {output.check} +{ 't := + duplicate$ empty$ + { pop$ "empty " t * " in " * cite$ * warning$ } + 'output.nonnull + if$ +} + +FUNCTION {output.bibitem} +{ newline$ + + "\bibitem[" write$ + label write$ + "]{" write$ + + cite$ write$ + "}" write$ + newline$ + "" + before.all 'output.state := +} + +FUNCTION {fin.entry} +{ add.period$ + write$ + newline$ +} + +FUNCTION {new.block} +{ output.state before.all = + 'skip$ + { after.block 'output.state := } + if$ +} + +FUNCTION {new.sentence} +{ output.state after.block = + 'skip$ + { output.state before.all = + 'skip$ + { after.sentence 'output.state := } + if$ + } + if$ +} + +FUNCTION {not} +{ { #0 } + { #1 } + if$ +} + +FUNCTION {and} +{ 'skip$ + { pop$ #0 } + if$ +} + +FUNCTION {or} +{ { pop$ #1 } + 'skip$ + if$ +} + +FUNCTION {new.block.checka} +{ empty$ + 'skip$ + 'new.block + if$ +} + +FUNCTION {new.block.checkb} +{ empty$ + swap$ empty$ + and + 'skip$ + 'new.block + if$ +} + +FUNCTION {new.sentence.checka} +{ empty$ + 'skip$ + 'new.sentence + if$ +} + +FUNCTION {new.sentence.checkb} +{ empty$ + swap$ empty$ + and + 'skip$ + 'new.sentence + if$ +} + +FUNCTION {field.or.null} +{ duplicate$ empty$ + { pop$ "" } + 'skip$ + if$ +} + +FUNCTION {emphasize} +{ duplicate$ empty$ + { pop$ "" } + { "{\em " swap$ * "}" * } + if$ +} + +INTEGERS { nameptr namesleft numnames } + +FUNCTION {format.names} +{ 's := + #1 'nameptr := + s num.names$ 'numnames := + numnames 'namesleft := + { namesleft #0 > } + + { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't := + + nameptr #1 > + { namesleft #1 > + { ", " * t * } + { numnames #2 > + { "," * } + 'skip$ + if$ + t "others" = + { " et~al." * } + { " and " * t * } + if$ + } + if$ + } + 't + if$ + nameptr #1 + 'nameptr := + namesleft #1 - 'namesleft := + } + while$ +} + +FUNCTION {format.authors} +{ author empty$ + { "" } + { author format.names } + if$ +} + +FUNCTION {format.editors} +{ editor empty$ + { "" } + { editor format.names + editor num.names$ #1 > + { ", editors" * } + { ", editor" * } + if$ + } + if$ +} + +FUNCTION {format.title} +{ title empty$ + { "" } + + { title "t" change.case$ } + + if$ +} + +FUNCTION {n.dashify} +{ 't := + "" + { t empty$ not } + { t #1 #1 substring$ "-" = + { t #1 #2 substring$ "--" = not + { "--" * + t #2 global.max$ substring$ 't := + } + { { t #1 #1 substring$ "-" = } + { "-" * + t #2 global.max$ substring$ 't := + } + while$ + } + if$ + } + { t #1 #1 substring$ * + t #2 global.max$ substring$ 't := + } + if$ + } + while$ +} + +FUNCTION {format.date} +{ year empty$ + { month empty$ + { "" } + { "there's a month but no year in " cite$ * warning$ + month + } + if$ + } + { month empty$ + { "" } + { month } + if$ + } + if$ +} + +FUNCTION {format.btitle} +{ title emphasize +} + +FUNCTION {tie.or.space.connect} +{ duplicate$ text.length$ #3 < + { "~" } + { " " } + if$ + swap$ * * +} + +FUNCTION {either.or.check} +{ empty$ + 'pop$ + { "can't use both " swap$ * " fields in " * cite$ * warning$ } + if$ +} + +FUNCTION {format.bvolume} +{ volume empty$ + { "" } + { "volume" volume tie.or.space.connect + series empty$ + 'skip$ + { " of " * series emphasize * } + if$ + "volume and number" number either.or.check + } + if$ +} + +FUNCTION {format.number.series} +{ volume empty$ + { number empty$ + { series field.or.null } + { output.state mid.sentence = + { "number" } + { "Number" } + if$ + number tie.or.space.connect + series empty$ + { "there's a number but no series in " cite$ * warning$ } + { " in " * series * } + if$ + } + if$ + } + { "" } + if$ +} + +FUNCTION {format.edition} +{ edition empty$ + { "" } + { output.state mid.sentence = + { edition "l" change.case$ " edition" * } + { edition "t" change.case$ " edition" * } + if$ + } + if$ +} + +INTEGERS { multiresult } + +FUNCTION {multi.page.check} +{ 't := + #0 'multiresult := + { multiresult not + t empty$ not + and + } + { t #1 #1 substring$ + duplicate$ "-" = + swap$ duplicate$ "," = + swap$ "+" = + or or + { #1 'multiresult := } + { t #2 global.max$ substring$ 't := } + if$ + } + while$ + multiresult +} + +FUNCTION {format.pages} +{ pages empty$ + { "" } + { pages multi.page.check + { "pages" pages n.dashify tie.or.space.connect } + { "page" pages tie.or.space.connect } + if$ + } + if$ +} + +FUNCTION {format.year.label} +{ year extra.label * +} + +FUNCTION {format.vol.num.pages} +{ volume field.or.null + number empty$ + 'skip$ + { "(" number * ")" * * + volume empty$ + { "there's a number but no volume in " cite$ * warning$ } + 'skip$ + if$ + } + if$ + pages empty$ + 'skip$ + { duplicate$ empty$ + { pop$ format.pages } + { ":" * pages n.dashify * } + if$ + } + if$ +} + +FUNCTION {format.chapter.pages} +{ chapter empty$ + 'format.pages + { type empty$ + { "chapter" } + { type "l" change.case$ } + if$ + chapter tie.or.space.connect + pages empty$ + 'skip$ + { ", " * format.pages * } + if$ + } + if$ +} + +FUNCTION {format.in.ed.booktitle} +{ booktitle empty$ + { "" } + { editor empty$ + { "In " booktitle emphasize * } + { "In " format.editors * ", " * booktitle emphasize * } + if$ + } + if$ +} + +FUNCTION {empty.misc.check} +{ author empty$ title empty$ howpublished empty$ + month empty$ year empty$ note empty$ + and and and and and + + key empty$ not and + + { "all relevant fields are empty in " cite$ * warning$ } + 'skip$ + if$ +} + +FUNCTION {format.thesis.type} +{ type empty$ + 'skip$ + { pop$ + type "t" change.case$ + } + if$ +} + +FUNCTION {format.tr.number} +{ type empty$ + { "Technical Report" } + 'type + if$ + number empty$ + { "t" change.case$ } + { number tie.or.space.connect } + if$ +} + +FUNCTION {format.article.crossref} +{ key empty$ + { journal empty$ + { "need key or journal for " cite$ * " to crossref " * crossref * + warning$ + "" + } + { "In {\em " journal * "\/}" * } + if$ + } + { "In " key * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {format.crossref.editor} +{ editor #1 "{vv~}{ll}" format.name$ + editor num.names$ duplicate$ + #2 > + { pop$ " et~al." * } + { #2 < + 'skip$ + { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" = + { " et~al." * } + { " and " * editor #2 "{vv~}{ll}" format.name$ * } + if$ + } + if$ + } + if$ +} + +FUNCTION {format.book.crossref} +{ volume empty$ + { "empty volume in " cite$ * "'s crossref of " * crossref * warning$ + "In " + } + { "Volume" volume tie.or.space.connect + " of " * + } + if$ + editor empty$ + editor field.or.null author field.or.null = + or + { key empty$ + { series empty$ + { "need editor, key, or series for " cite$ * " to crossref " * + crossref * warning$ + "" * + } + { "{\em " * series * "\/}" * } + if$ + } + { key * } + if$ + } + { format.crossref.editor * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {format.incoll.inproc.crossref} +{ editor empty$ + editor field.or.null author field.or.null = + or + { key empty$ + { booktitle empty$ + { "need editor, key, or booktitle for " cite$ * " to crossref " * + crossref * warning$ + "" + } + { "In {\em " booktitle * "\/}" * } + if$ + } + { "In " key * } + if$ + } + { "In " format.crossref.editor * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {article} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { journal emphasize "journal" output.check + format.vol.num.pages output + format.date output + } + { format.article.crossref output.nonnull + format.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {book} +{ output.bibitem + author empty$ + { format.editors "author and editor" output.check } + { format.authors output.nonnull + crossref missing$ + { "author and editor" editor either.or.check } + 'skip$ + if$ + } + if$ + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + crossref missing$ + { format.bvolume output + new.block + format.number.series output + new.sentence + publisher "publisher" output.check + address output + } + { new.block + format.book.crossref output.nonnull + } + if$ + format.edition output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {booklet} +{ output.bibitem + format.authors output + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + howpublished address new.block.checkb + howpublished output + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {inbook} +{ output.bibitem + author empty$ + { format.editors "author and editor" output.check } + { format.authors output.nonnull + crossref missing$ + { "author and editor" editor either.or.check } + 'skip$ + if$ + } + if$ + format.year.label "year" output.check + new.block + new.block + format.btitle "title" output.check + crossref missing$ + { format.bvolume output + format.chapter.pages "chapter and pages" output.check + new.block + format.number.series output + new.sentence + publisher "publisher" output.check + address output + } + { format.chapter.pages "chapter and pages" output.check + new.block + format.book.crossref output.nonnull + } + if$ + format.edition output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {incollection} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { format.in.ed.booktitle "booktitle" output.check + format.bvolume output + format.number.series output + format.chapter.pages output + new.sentence + publisher "publisher" output.check + address output + format.edition output + format.date output + } + { format.incoll.inproc.crossref output.nonnull + format.chapter.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {inproceedings} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { format.in.ed.booktitle "booktitle" output.check + format.bvolume output + format.number.series output + format.pages output + address empty$ + { organization publisher new.sentence.checkb + organization output + publisher output + format.date output + } + { address output.nonnull + format.date output + new.sentence + organization output + publisher output + } + if$ + } + { format.incoll.inproc.crossref output.nonnull + format.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {conference} { inproceedings } + +FUNCTION {manual} +{ output.bibitem + author empty$ + { organization empty$ + 'skip$ + { organization output.nonnull + address output + } + if$ + } + { format.authors output.nonnull } + if$ + format.year.label "year" output.check + new.block + new.block + format.btitle "title" output.check + author empty$ + { organization empty$ + { address new.block.checka + address output + } + 'skip$ + if$ + } + { organization address new.block.checkb + organization output + address output + } + if$ + format.edition output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {mastersthesis} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + "Master's thesis" format.thesis.type output.nonnull + school "school" output.check + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {misc} +{ output.bibitem + format.authors output + new.block + format.year.label output + new.block + title howpublished new.block.checkb + format.title output + howpublished new.block.checka + howpublished output + format.date output + new.block + note output + fin.entry + empty.misc.check +} + +FUNCTION {phdthesis} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + new.block + "{Ph.D.} thesis" format.thesis.type output.nonnull + school "school" output.check + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {proceedings} +{ output.bibitem + editor empty$ + { organization output } + { format.editors output.nonnull } + if$ + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + format.bvolume output + format.number.series output + address empty$ + { editor empty$ + { publisher new.sentence.checka } + { organization publisher new.sentence.checkb + organization output + } + if$ + publisher output + format.date output + } + { address output.nonnull + format.date output + new.sentence + editor empty$ + 'skip$ + { organization output } + if$ + publisher output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {techreport} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + format.tr.number output.nonnull + institution "institution" output.check + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {unpublished} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + note "note" output.check + format.date output + fin.entry +} + +FUNCTION {default.type} { misc } + +MACRO {jan} {"January"} + +MACRO {feb} {"February"} + +MACRO {mar} {"March"} + +MACRO {apr} {"April"} + +MACRO {may} {"May"} + +MACRO {jun} {"June"} + +MACRO {jul} {"July"} + +MACRO {aug} {"August"} + +MACRO {sep} {"September"} + +MACRO {oct} {"October"} + +MACRO {nov} {"November"} + +MACRO {dec} {"December"} + +MACRO {acmcs} {"ACM Computing Surveys"} + +MACRO {acta} {"Acta Informatica"} + +MACRO {cacm} {"Communications of the ACM"} + +MACRO {ibmjrd} {"IBM Journal of Research and Development"} + +MACRO {ibmsj} {"IBM Systems Journal"} + +MACRO {ieeese} {"IEEE Transactions on Software Engineering"} + +MACRO {ieeetc} {"IEEE Transactions on Computers"} + +MACRO {ieeetcad} + {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"} + +MACRO {ipl} {"Information Processing Letters"} + +MACRO {jacm} {"Journal of the ACM"} + +MACRO {jcss} {"Journal of Computer and System Sciences"} + +MACRO {scp} {"Science of Computer Programming"} + +MACRO {sicomp} {"SIAM Journal on Computing"} + +MACRO {tocs} {"ACM Transactions on Computer Systems"} + +MACRO {tods} {"ACM Transactions on Database Systems"} + +MACRO {tog} {"ACM Transactions on Graphics"} + +MACRO {toms} {"ACM Transactions on Mathematical Software"} + +MACRO {toois} {"ACM Transactions on Office Information Systems"} + +MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"} + +MACRO {tcs} {"Theoretical Computer Science"} + +READ + +FUNCTION {sortify} +{ purify$ + "l" change.case$ +} + +INTEGERS { len } + +FUNCTION {chop.word} +{ 's := + 'len := + s #1 len substring$ = + { s len #1 + global.max$ substring$ } + 's + if$ +} + +INTEGERS { et.al.char.used } + +FUNCTION {initialize.et.al.char.used} +{ #0 'et.al.char.used := +} + +EXECUTE {initialize.et.al.char.used} + +FUNCTION {format.lab.names} +{ 's := + s num.names$ 'numnames := + + numnames #1 = + { s #1 "{vv }{ll}" format.name$ } + { numnames #2 = + { s #1 "{vv }{ll }and " format.name$ s #2 "{vv }{ll}" format.name$ * + } + { s #1 "{vv }{ll }\bgroup et al.\egroup " format.name$ } + if$ + } + if$ + +} + +FUNCTION {author.key.label} +{ author empty$ + { key empty$ + + { cite$ #1 #3 substring$ } + + { key #3 text.prefix$ } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {author.editor.key.label} +{ author empty$ + { editor empty$ + { key empty$ + + { cite$ #1 #3 substring$ } + + { key #3 text.prefix$ } + if$ + } + { editor format.lab.names } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {author.key.organization.label} +{ author empty$ + { key empty$ + { organization empty$ + + { cite$ #1 #3 substring$ } + + { "The " #4 organization chop.word #3 text.prefix$ } + if$ + } + { key #3 text.prefix$ } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {editor.key.organization.label} +{ editor empty$ + { key empty$ + { organization empty$ + + { cite$ #1 #3 substring$ } + + { "The " #4 organization chop.word #3 text.prefix$ } + if$ + } + { key #3 text.prefix$ } + if$ + } + { editor format.lab.names } + if$ +} + +FUNCTION {calc.label} +{ type$ "book" = + type$ "inbook" = + or + 'author.editor.key.label + { type$ "proceedings" = + 'editor.key.organization.label + { type$ "manual" = + 'author.key.organization.label + 'author.key.label + if$ + } + if$ + } + if$ + duplicate$ + + "\protect\citename{" swap$ * "}" * + year field.or.null purify$ * + 'label := + year field.or.null purify$ * + + sortify 'sort.label := +} + +FUNCTION {sort.format.names} +{ 's := + #1 'nameptr := + "" + s num.names$ 'numnames := + numnames 'namesleft := + { namesleft #0 > } + { nameptr #1 > + { " " * } + 'skip$ + if$ + + s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't := + + nameptr numnames = t "others" = and + { "et al" * } + { t sortify * } + if$ + nameptr #1 + 'nameptr := + namesleft #1 - 'namesleft := + } + while$ +} + +FUNCTION {sort.format.title} +{ 't := + "A " #2 + "An " #3 + "The " #4 t chop.word + chop.word + chop.word + sortify + #1 global.max$ substring$ +} + +FUNCTION {author.sort} +{ author empty$ + { key empty$ + { "to sort, need author or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {author.editor.sort} +{ author empty$ + { editor empty$ + { key empty$ + { "to sort, need author, editor, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { editor sort.format.names } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {author.organization.sort} +{ author empty$ + { organization empty$ + { key empty$ + { "to sort, need author, organization, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { "The " #4 organization chop.word sortify } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {editor.organization.sort} +{ editor empty$ + { organization empty$ + { key empty$ + { "to sort, need editor, organization, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { "The " #4 organization chop.word sortify } + if$ + } + { editor sort.format.names } + if$ +} + +FUNCTION {presort} + +{ calc.label + sort.label + " " + * + type$ "book" = + + type$ "inbook" = + or + 'author.editor.sort + { type$ "proceedings" = + 'editor.organization.sort + { type$ "manual" = + 'author.organization.sort + 'author.sort + if$ + } + if$ + } + if$ + + * + + " " + * + year field.or.null sortify + * + " " + * + title field.or.null + sort.format.title + * + #1 entry.max$ substring$ + 'sort.key$ := +} + +ITERATE {presort} + +SORT + +STRINGS { longest.label last.sort.label next.extra } + +INTEGERS { longest.label.width last.extra.num } + +FUNCTION {initialize.longest.label} +{ "" 'longest.label := + #0 int.to.chr$ 'last.sort.label := + "" 'next.extra := + #0 'longest.label.width := + #0 'last.extra.num := +} + +FUNCTION {forward.pass} +{ last.sort.label sort.label = + { last.extra.num #1 + 'last.extra.num := + last.extra.num int.to.chr$ 'extra.label := + } + { "a" chr.to.int$ 'last.extra.num := + "" 'extra.label := + sort.label 'last.sort.label := + } + if$ +} + +FUNCTION {reverse.pass} +{ next.extra "b" = + { "a" 'extra.label := } + 'skip$ + if$ + label extra.label * 'label := + label width$ longest.label.width > + { label 'longest.label := + label width$ 'longest.label.width := + } + 'skip$ + if$ + extra.label 'next.extra := +} + +EXECUTE {initialize.longest.label} + +ITERATE {forward.pass} + +REVERSE {reverse.pass} + +FUNCTION {begin.bib} + +{ et.al.char.used + { "\newcommand{\etalchar}[1]{$^{#1}$}" write$ newline$ } + 'skip$ + if$ + preamble$ empty$ + + 'skip$ + { preamble$ write$ newline$ } + if$ + + "\begin{thebibliography}{" "}" * write$ newline$ + +} + +EXECUTE {begin.bib} + +EXECUTE {init.state.consts} + +ITERATE {call.type$} + +FUNCTION {end.bib} +{ newline$ + "\end{thebibliography}" write$ newline$ +} + +EXECUTE {end.bib} + diff --git a/report/pyp_clustering/acl09-short/common-bak/acl08.sty b/report/pyp_clustering/acl09-short/common-bak/acl08.sty new file mode 100644 index 00000000..358f6d27 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/acl08.sty @@ -0,0 +1,344 @@ +% File acl2005.sty +% October 11, 2004 +% Contact: oflazer@sabanciuniv.edu + +% This is the LaTeX style file for ACL 2005. It is nearly identical to the +% style files for ACL 2002, ACL 2001, ACL 2000, EACL 95 and EACL +% 99. +% + +% This is the LaTeX style file for ACL 2000. It is nearly identical to the +% style files for EACL 95 and EACL 99. Minor changes include editing the +% instructions to reflect use of \documentclass rather than \documentstyle +% and removing the white space before the title on the first page +% -- John Chen, June 29, 2000 + +% To convert from submissions prepared using the style file aclsub.sty +% prepared for the ACL 2000 conference, proceed as follows: +% 1) Remove submission-specific information: \whichsession, \id, +% \wordcount, \otherconferences, \area, \keywords +% 2) \summary should be removed. The summary material should come +% after \maketitle and should be in the ``abstract'' environment +% 3) Check all citations. This style should handle citations correctly +% and also allows multiple citations separated by semicolons. +% 4) Check figures and examples. Because the final format is double- +% column, some adjustments may have to be made to fit text in the column +% or to choose full-width (\figure*} figures. +% 5) Change the style reference from aclsub to acl2000, and be sure +% this style file is in your TeX search path + + +% This is the LaTeX style file for EACL-95. It is identical to the +% style file for ANLP '94 except that the margins are adjusted for A4 +% paper. -- abney 13 Dec 94 + +% The ANLP '94 style file is a slightly modified +% version of the style used for AAAI and IJCAI, using some changes +% prepared by Fernando Pereira and others and some minor changes +% by Paul Jacobs. + +% Papers prepared using the aclsub.sty file and acl.bst bibtex style +% should be easily converted to final format using this style. +% (1) Submission information (\wordcount, \subject, and \makeidpage) +% should be removed. +% (2) \summary should be removed. The summary material should come +% after \maketitle and should be in the ``abstract'' environment +% (between \begin{abstract} and \end{abstract}). +% (3) Check all citations. This style should handle citations correctly +% and also allows multiple citations separated by semicolons. +% (4) Check figures and examples. Because the final format is double- +% column, some adjustments may have to be made to fit text in the column +% or to choose full-width (\figure*} figures. + +% Place this in a file called aclap.sty in the TeX search path. +% (Placing it in the same directory as the paper should also work.) + +% Prepared by Peter F. Patel-Schneider, liberally using the ideas of +% other style hackers, including Barbara Beeton. +% This style is NOT guaranteed to work. It is provided in the hope +% that it will make the preparation of papers easier. +% +% There are undoubtably bugs in this style. If you make bug fixes, +% improvements, etc. please let me know. My e-mail address is: +% pfps@research.att.com + +% Papers are to be prepared using the ``acl'' bibliography style, +% as follows: +% \documentclass[11pt]{article} +% \usepackage{acl2000} +% \title{Title} +% \author{Author 1 \and Author 2 \\ Address line \\ Address line \And +% Author 3 \\ Address line \\ Address line} +% \begin{document} +% ... +% \bibliography{bibliography-file} +% \bibliographystyle{acl} +% \end{document} + +% Author information can be set in various styles: +% For several authors from the same institution: +% \author{Author 1 \and ... \and Author n \\ +% Address line \\ ... \\ Address line} +% if the names do not fit well on one line use +% Author 1 \\ {\bf Author 2} \\ ... \\ {\bf Author n} \\ +% For authors from different institutions: +% \author{Author 1 \\ Address line \\ ... \\ Address line +% \And ... \And +% Author n \\ Address line \\ ... \\ Address line} +% To start a seperate ``row'' of authors use \AND, as in +% \author{Author 1 \\ Address line \\ ... \\ Address line +% \AND +% Author 2 \\ Address line \\ ... \\ Address line \And +% Author 3 \\ Address line \\ ... \\ Address line} + +% If the title and author information does not fit in the area allocated, +% place \setlength\titlebox{} right after +% \usepackage{acl2000} +% where can be something larger than 2.25in + +% \typeout{Conference Style for ACL 2000 -- released June 20, 2000} +\typeout{Conference Style for ACL 2005 -- released Octobe 11, 2004} + +% NOTE: Some laser printers have a serious problem printing TeX output. +% These printing devices, commonly known as ``write-white'' laser +% printers, tend to make characters too light. To get around this +% problem, a darker set of fonts must be created for these devices. +% + +% Physical page layout - slightly modified from IJCAI by pj +\setlength\topmargin{0.0in} \setlength\oddsidemargin{-0.0in} +\setlength\textheight{9.0in} \setlength\textwidth{6.5in} +\setlength\columnsep{0.2in} +\newlength\titlebox +\setlength\titlebox{2.25in} +\setlength\headheight{0pt} \setlength\headsep{0pt} +%\setlength\footheight{0pt} +\setlength\footskip{0pt} +\thispagestyle{empty} \pagestyle{empty} +\flushbottom \twocolumn \sloppy + +%% A4 version of page layout +%\setlength\topmargin{-0.45cm} % changed by Rz -1.4 +%\setlength\oddsidemargin{.8mm} % was -0cm, changed by Rz +%\setlength\textheight{23.5cm} +%\setlength\textwidth{15.8cm} +%\setlength\columnsep{0.6cm} +%\newlength\titlebox +%\setlength\titlebox{2.00in} +%\setlength\headheight{5pt} +%\setlength\headsep{0pt} +%%\setlength\footheight{0pt} +%\setlength\footskip{0pt} +%\thispagestyle{empty} +%\pagestyle{empty} + +\flushbottom \twocolumn \sloppy + +% We're never going to need a table of contents, so just flush it to +% save space --- suggested by drstrip@sandia-2 +\def\addcontentsline#1#2#3{} + +% Title stuff, taken from deproc. +\def\maketitle{\par + \begingroup + \def\thefootnote{\fnsymbol{footnote}} + \def\@makefnmark{\hbox to 0pt{$^{\@thefnmark}$\hss}} + \twocolumn[\@maketitle] \@thanks + \endgroup + \setcounter{footnote}{0} + \let\maketitle\relax \let\@maketitle\relax + \gdef\@thanks{}\gdef\@author{}\gdef\@title{}\let\thanks\relax} +\def\@maketitle{\vbox to \titlebox{\hsize\textwidth + \linewidth\hsize \vskip 0.125in minus 0.125in \centering + {\Large\bf \@title \par} \vskip 0.2in plus 1fil minus 0.1in + {\def\and{\unskip\enspace{\rm and}\enspace}% + \def\And{\end{tabular}\hss \egroup \hskip 1in plus 2fil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf}% + \def\AND{\end{tabular}\hss\egroup \hfil\hfil\egroup + \vskip 0.25in plus 1fil minus 0.125in + \hbox to \linewidth\bgroup\large \hfil\hfil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf} + \hbox to \linewidth\bgroup\large \hfil\hfil + \hbox to 0pt\bgroup\hss \begin{tabular}[t]{c}\bf\@author + \end{tabular}\hss\egroup + \hfil\hfil\egroup} + \vskip 0.3in plus 2fil minus 0.1in +}} +\renewenvironment{abstract}{\centerline{\large\bf + Abstract}\vspace{0.5ex}\begin{quote} \small}{\par\end{quote}\vskip 1ex} + + +% bibliography + +\def\thebibliography#1{\section*{References} + \global\def\@listi{\leftmargin\leftmargini + \labelwidth\leftmargini \advance\labelwidth-\labelsep + \topsep 1pt plus 2pt minus 1pt + \parsep 0.25ex plus 1pt \itemsep 0.25ex plus 1pt} + \list {[\arabic{enumi}]}{\settowidth\labelwidth{[#1]}\leftmargin\labelwidth + \advance\leftmargin\labelsep\usecounter{enumi}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy + \sfcode`\.=1000\relax} + +\def\@up#1{\raise.2ex\hbox{#1}} + +% most of cite format is from aclsub.sty by SMS + +% don't box citations, separate with ; and a space +% also, make the penalty between citations negative: a good place to break +% changed comma back to semicolon pj 2/1/90 +% \def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi +% \def\@citea{}\@cite{\@for\@citeb:=#2\do +% {\@citea\def\@citea{;\penalty\@citeseppen\ }\@ifundefined +% {b@\@citeb}{{\bf ?}\@warning +% {Citation `\@citeb' on page \thepage \space undefined}}% +% {\csname b@\@citeb\endcsname}}}{#1}} + +% don't box citations, separate with ; and a space +% Replaced for multiple citations (pj) +% don't box citations and also add space, semicolon between multiple citations +\def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi + \def\@citea{}\@cite{\@for\@citeb:=#2\do + {\@citea\def\@citea{; }\@ifundefined + {b@\@citeb}{{\bf ?}\@warning + {Citation `\@citeb' on page \thepage \space undefined}}% + {\csname b@\@citeb\endcsname}}}{#1}} + +% Allow short (name-less) citations, when used in +% conjunction with a bibliography style that creates labels like +% \citename{, } +% +\let\@internalcite\cite +\def\cite{\def\citename##1{##1, }\@internalcite} +\def\shortcite{\def\citename##1{}\@internalcite} +\def\newcite{\def\citename##1{{\frenchspacing##1} (}\@internalciteb} + +% Macros for \newcite, which leaves name in running text, and is +% otherwise like \shortcite. +\def\@citexb[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi + \def\@citea{}\@newcite{\@for\@citeb:=#2\do + {\@citea\def\@citea{;\penalty\@m\ }\@ifundefined + {b@\@citeb}{{\bf ?}\@warning + {Citation `\@citeb' on page \thepage \space undefined}}% +{\csname b@\@citeb\endcsname}}}{#1}} +\def\@internalciteb{\@ifnextchar [{\@tempswatrue\@citexb}{\@tempswafalse\@citexb[]}} + +\def\@newcite#1#2{{#1\if@tempswa, #2\fi)}} + +\def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} + +%%% More changes made by SMS (originals in latex.tex) +% Use parentheses instead of square brackets in the text. +\def\@cite#1#2{({#1\if@tempswa , #2\fi})} + +% Don't put a label in the bibliography at all. Just use the unlabeled format +% instead. +\def\thebibliography#1{\small\vskip\parskip% +\vskip\baselineskip% +\def\baselinestretch{1}% +\ifx\@currsize\normalsize\@normalsize\else\@currsize\fi% +\vskip-\parskip% +\vskip-\baselineskip% +\section*{References\@mkboth + {References}{References}}\list + {}{\setlength{\labelwidth}{0pt}\setlength{\leftmargin}{\parindent} + \setlength{\itemsep}{-0.5ex} + \setlength{\itemindent}{-\parindent}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy\clubpenalty4000\widowpenalty4000 + \sfcode`\.=1000\relax} +\let\endthebibliography=\endlist + +% Allow for a bibliography of sources of attested examples +\def\thesourcebibliography#1{\vskip\parskip% +\vskip\baselineskip% +\def\baselinestretch{1}% +\ifx\@currsize\normalsize\@normalsize\else\@currsize\fi% +\vskip-\parskip% +\vskip-\baselineskip% +\section*{Sources of Attested Examples\@mkboth + {Sources of Attested Examples}{Sources of Attested Examples}}\list + {}{\setlength{\labelwidth}{0pt}\setlength{\leftmargin}{\parindent} + \setlength{\itemindent}{-\parindent}} + \def\newblock{\hskip .11em plus .33em minus -.07em} + \sloppy\clubpenalty4000\widowpenalty4000 + \sfcode`\.=1000\relax} +\let\endthesourcebibliography=\endlist + +\def\@lbibitem[#1]#2{\item[]\if@filesw + { \def\protect##1{\string ##1\space}\immediate + \write\@auxout{\string\bibcite{#2}{#1}}\fi\ignorespaces}} + +\def\@bibitem#1{\item\if@filesw \immediate\write\@auxout + {\string\bibcite{#1}{\the\c@enumi}}\fi\ignorespaces} + +% sections with less space +\def\section{\@startsection {section}{1}{\z@}{-2.0ex plus + -0.5ex minus -.2ex}{1.5ex plus 0.3ex minus .2ex}{\large\bf\raggedright}} +\def\subsection{\@startsection{subsection}{2}{\z@}{-1.8ex plus + -0.5ex minus -.2ex}{0.8ex plus .2ex}{\normalsize\bf\raggedright}} +\def\subsubsection{\@startsection{subsubsection}{3}{\z@}{1.5ex plus + 0.5ex minus .2ex}{0.5ex plus .2ex}{\normalsize\bf\raggedright}} +\def\paragraph{\@startsection{paragraph}{4}{\z@}{1.5ex plus + 0.5ex minus .2ex}{-1em}{\normalsize\bf}} +\def\subparagraph{\@startsection{subparagraph}{5}{\parindent}{1.5ex plus + 0.5ex minus .2ex}{-1em}{\normalsize\bf}} + +% Footnotes +\footnotesep 6.65pt % +\skip\footins 9pt plus 4pt minus 2pt +\def\footnoterule{\kern-3pt \hrule width 5pc \kern 2.6pt } +\setcounter{footnote}{0} + +% Lists and paragraphs +\parindent 1em +\topsep 4pt plus 1pt minus 2pt +\partopsep 1pt plus 0.5pt minus 0.5pt +\itemsep 2pt plus 1pt minus 0.5pt +\parsep 2pt plus 1pt minus 0.5pt + +\leftmargin 2em \leftmargini\leftmargin \leftmarginii 2em +\leftmarginiii 1.5em \leftmarginiv 1.0em \leftmarginv .5em \leftmarginvi .5em +\labelwidth\leftmargini\advance\labelwidth-\labelsep \labelsep 5pt + +\def\@listi{\leftmargin\leftmargini} +\def\@listii{\leftmargin\leftmarginii + \labelwidth\leftmarginii\advance\labelwidth-\labelsep + \topsep 2pt plus 1pt minus 0.5pt + \parsep 1pt plus 0.5pt minus 0.5pt + \itemsep \parsep} +\def\@listiii{\leftmargin\leftmarginiii + \labelwidth\leftmarginiii\advance\labelwidth-\labelsep + \topsep 1pt plus 0.5pt minus 0.5pt + \parsep \z@ \partopsep 0.5pt plus 0pt minus 0.5pt + \itemsep \topsep} +\def\@listiv{\leftmargin\leftmarginiv + \labelwidth\leftmarginiv\advance\labelwidth-\labelsep} +\def\@listv{\leftmargin\leftmarginv + \labelwidth\leftmarginv\advance\labelwidth-\labelsep} +\def\@listvi{\leftmargin\leftmarginvi + \labelwidth\leftmarginvi\advance\labelwidth-\labelsep} + +\abovedisplayskip 7pt plus2pt minus5pt% +\belowdisplayskip \abovedisplayskip +\abovedisplayshortskip 0pt plus3pt% +\belowdisplayshortskip 4pt plus3pt minus3pt% + +% Less leading in most fonts (due to the narrow columns) +% The choices were between 1-pt and 1.5-pt leading +%\def\@normalsize{\@setsize\normalsize{11pt}\xpt\@xpt} +%\def\small{\@setsize\small{10pt}\ixpt\@ixpt} +%\def\footnotesize{\@setsize\footnotesize{10pt}\ixpt\@ixpt} +%\def\scriptsize{\@setsize\scriptsize{8pt}\viipt\@viipt} +%\def\tiny{\@setsize\tiny{7pt}\vipt\@vipt} +%\def\large{\@setsize\large{14pt}\xiipt\@xiipt} +%\def\Large{\@setsize\Large{16pt}\xivpt\@xivpt} +%\def\LARGE{\@setsize\LARGE{20pt}\xviipt\@xviipt} +%\def\huge{\@setsize\huge{23pt}\xxpt\@xxpt} +%\def\Huge{\@setsize\Huge{28pt}\xxvpt\@xxvpt} + +\let\@@makecaption\@makecaption +\renewcommand{\@makecaption}[1]{\@@makecaption{\small #1}} + +\newcommand{\Thanks}[1]{\thanks{\ #1}} \ No newline at end of file diff --git a/report/pyp_clustering/acl09-short/common-bak/algorithmicx.sty b/report/pyp_clustering/acl09-short/common-bak/algorithmicx.sty new file mode 100644 index 00000000..bfb7daba --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/algorithmicx.sty @@ -0,0 +1,786 @@ +% ALGORITHMIC STYLE -- Released 27 APR 2005 +% for LaTeX version 2e +% +% Copyright Szasz Janos +% E-mail szaszjanos@users.sourceforge.net +% +% +% *** INITIALISING *** +% +% +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{algorithmicx}[2005/04/27 v1.2 Algorithmicx] +\RequirePackage{ifthen} +\typeout{Document Style algorithmicx 1.2 - a greatly improved `algorithmic' style} +% +\newcounter{ALG@line} +\newcounter{ALG@rem} +\newcounter{ALG@nested} +\newlength{\ALG@tlm} +\newlength{\ALG@thistlm} +\newcounter{ALG@Lnr}% the number of defined languages +\setcounter{ALG@Lnr}{0} +\newcounter{ALG@blocknr}% the number of defined blocks +\setcounter{ALG@blocknr}{0} +\newcounter{ALG@storecount}% number of stored but not restored algorithmic environments +\setcounter{ALG@storecount}{0} +\newcounter{ALG@tmpcounter}% only to decrement things +\newlength\ALG@tmplength% +%\def\algorithmicnoindent{-\ALG@tlm} +% \def\algbackskipbegin{\hskip\ALG@ctlm} +%\def\algbackskip{\hskip-\ALG@thistlm} +%\def\algbackskipend{\hskip-\ALG@tlm} +\def\ALG@defaultindent{\algorithmicindent} +% +% conditional states +% +\def\ALG@newcondstate#1% + {% + \expandafter\edef\csname ALG@x@#1\endcsname% + {\expandafter\noexpand\csname @@ALG@x@#1\endcsname}% + }% +\ALG@newcondstate{notext}% +\ALG@newcondstate{default}% +% +% +% *** ALGORITHMIC *** +% +% +\newcommand\ALG@beginblock[1]% #1 - indentation + {% + \ALG@thistlm\ALG@tlm% + \addtolength\ALG@tlm{#1}% + \addtocounter{ALG@nested}{1}% + \setlength\ALG@tmplength{#1}% + \expandafter\edef\csname ALG@ind@\theALG@nested\endcsname{\the\ALG@tmplength}% + }% +\newcommand\ALG@endblock% + {% + \addtolength\ALG@tlm{-\csname ALG@ind@\theALG@nested\endcsname}% + \addtocounter{ALG@nested}{-1}% + \ALG@thistlm\ALG@tlm% + }% +% +% algorithmic environment +% +\def\ALG@step% + {% + \addtocounter{ALG@line}{1}% + \addtocounter{ALG@rem}{1}% + \ifthenelse{\equal{\arabic{ALG@rem}}{\ALG@numberfreq}}% + {\setcounter{ALG@rem}{0}\alglinenumber{\arabic{ALG@line}}}% + {}% + }% +\newenvironment{algorithmic}[1][0]% + {% + \edef\ALG@numberfreq{#1}% + \def\@currentlabel{\theALG@line}% + % + \setcounter{ALG@line}{0}% + \setcounter{ALG@rem}{0}% + % + \let\\\algbreak% + % + \expandafter\edef\csname ALG@currentblock@\theALG@nested\endcsname{0}% + \expandafter\let\csname ALG@currentlifetime@\theALG@nested\endcsname\relax% + % + \begin{list}% + {\ALG@step}% + {% + \rightmargin\z@% + \itemsep\z@ \itemindent\z@ \listparindent2em% + \partopsep\z@ \parskip\z@ \parsep\z@% + \labelsep 0.5em \topsep 0.2em%\skip 1.2em + \ifthenelse{\equal{#1}{0}}% + {\labelwidth 0.5em}% + {\labelwidth 1.2em}% + \leftmargin\labelwidth \addtolength{\leftmargin}{\labelsep}% Ok. the perfect leftmargin :-)) + \ALG@tlm\z@% + }% + \setcounter{ALG@nested}{0}% + \ALG@beginalgorithmic% + }% + {% end{algorithmic} + % check if all blocks are closed + \ALG@closeloops% + \expandafter\ifnum\csname ALG@currentblock@\theALG@nested\endcsname=0\relax% + \else% + \PackageError{algorithmicx}{Some blocks are not closed!!!}{}% + \fi% + \ALG@endalgorithmic% + \end{list}% + }% +% +% +% *** Functional core *** +% +% +\def\ALG@makeentity#1% execute the entity (#1) + {% + \def\ALG@thisentity{#1}% + \expandafter\ifx\csname ALG@b@\ALG@L @#1@0\endcsname\relax% + \let\ALG@makenobeginrepeat\ALG@makenobegin\ALG@makenobeginrepeat% this entitie ends or continues blocks + \else% + \let\ALG@makebeginrepeat\ALG@makebegin\ALG@makebeginrepeat% this entitie can open blocks + \fi% + \ALG@entitiecommand% + }% +% +\def\ALG@makebegin% executes an entitie that can open blocks + {% + \expandafter\let\expandafter\ALG@thislifetime\csname ALG@currentlifetime@\theALG@nested\endcsname% + \ifx\ALG@thislifetime\relax% + \let\ALG@makebeginrepeat\ALG@doentity% in infinite block I can open my block + \else% + \ifnum\ALG@thislifetime>0\relax% + \ifnum\ALG@thislifetime>65534\else% + \setcounter{ALG@tmpcounter}{\ALG@thislifetime}% the block has 'space' for another included block + \addtocounter{ALG@tmpcounter}{-1}% + \expandafter\edef\csname ALG@currentlifetime@\theALG@nested\endcsname{\arabic{ALG@tmpcounter}}% + \fi% + \let\ALG@makebeginrepeat\ALG@doentity% + \else% the block needs to be closed + \expandafter\ifx\csname ALG@b@\ALG@L @\ALG@thisentity @\csname ALG@currentblock@\theALG@nested\endcsname\endcsname\relax% + \ALG@closebyforce% I can not close this block, continue after it is closed by force +% \ALG@makebegin% + \else% + % the block would be closed automatically, but this entitie can close it, so let's do it with the entity + \let\ALG@makebeginrepeat\ALG@doentity% + \fi% + \fi% + \fi% + \ALG@makebeginrepeat% + }% +% +\def\ALG@makenobegin% executes an entitie that can not open blocks + {% + \expandafter\ifx\csname ALG@currentlifetime@\theALG@nested\endcsname\relax% + \let\ALG@makenobeginrepeat\ALG@doentity% an infinite block must be broken + \else% + \expandafter\ifx\csname ALG@b@\ALG@L @\ALG@thisentity @\csname ALG@currentblock@\theALG@nested\endcsname\endcsname\relax% + \ALG@closebyforce% the block must be ended by force, + \else% + \let\ALG@makenobeginrepeat\ALG@doentity% I can continue / end this block, let's do it + \fi% + \fi% + \ALG@makenobeginrepeat% + }% +% +\def\ALG@dobegin% + {% + \ALG@beginblock{\csname ALG@i@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname}% + \expandafter\edef\csname ALG@currentblock@\theALG@nested\endcsname{\csname ALG@b@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname}% + \expandafter\ifx\csname ALG@c@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname\relax% + \expandafter\let\csname ALG@currentlifetime@\theALG@nested\endcsname\relax% + \else% + \expandafter\edef\csname ALG@currentlifetime@\theALG@nested\endcsname{\csname ALG@c@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname}% + \fi% + }% +% +\def\ALG@doend% + {% + \ALG@endblock% + }% +% +\def\ALG@doentity% the number of the closed block, the entitie + {% + \edef\ALG@thisblock{\csname ALG@currentblock@\theALG@nested\endcsname}% + \expandafter\ifx\csname ALG@b@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname\relax% + \def\ALG@thisblock{0}% + \fi% + \ALG@getentitytext% + \ifnum\ALG@thisblock=0\else\ALG@doend\fi% + \ifx\ALG@text\ALG@x@notext% + \item[]\nointerlineskip%\vskip-\prevdepth\nointerlineskip% bug: if there are no text and no lines, then this is wrong + \else% + \item% + \fi% + \noindent\hskip\ALG@tlm% + \expandafter\ifnum0=\csname ALG@b@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname\else% + \ALG@dobegin% + \fi% + \def\ALG@entitiecommand{\ALG@displayentity}% + }% +% +\def\ALG@getentitytext% + {% + \expandafter\let\expandafter\ALG@text\csname ALG@t@\ALG@L @\ALG@thisentity @\ALG@thisblock\endcsname% + \ifx\ALG@text\ALG@x@default% + % block specific - default + \expandafter\let\expandafter\ALG@text\csname ALG@t@\ALG@L @\ALG@thisentity\endcsname% + \ifx\ALG@text\ALG@x@default% + % block specific - default, language specific - default + \def\ALG@text{\ALG@deftext{\ALG@thisentity}}% + \fi% + \fi% + }% +% +\def\ALG@deftext{\csname ALG@deftext@\ALG@L\endcsname}% +% +\def\ALG@displayentity% + {% + \ifx\ALG@text\ALG@x@notext% + \let\ALG@text\relax% + \fi + \ALG@text% + }% +% +\def\ALG@closebyforce% + {% + \ALG@endblock% + }% +% +\def\ALG@closeloops% closes all finite blocks + {% + \expandafter\ifx\csname ALG@currentlifetime@\theALG@nested\endcsname\relax% + \else% only if it is finite + \ALG@closebyforce% the block must be ended by force, + \ALG@closeloops% the command still runs + \fi% + }% +% +% +% *** Low level block/entitie defining commands *** +% +% +\def\ALG@bl@{0}% the BIG block +\let\ALG@bl@@\ALG@bl@% the BIG block +% +% Create a block +% +\def\ALG@createblock#1% create the block #1, if it does not exists + {% + \@ifundefined{ALG@bl@\ALG@Ld @#1}% needs to be created? + {% + \addtocounter{ALG@blocknr}{1}% increment the block counter + \expandafter\edef\csname ALG@bl@\ALG@Ld @#1\endcsname{\arabic{ALG@blocknr}}% set the block number + }% + {}% + }% +% +% Get the block number +% +\def\ALG@getblocknumber#1{\csname ALG@bl@\ALG@Ld @#1\endcsname}% +% +% Create an entitie +% +\def\ALG@createentitie#1% create the entitie #1, if it does not exists + {% + \expandafter\ALG@edefcmd\csname #1\endcsname{\noexpand\ALG@makeentity{#1}}% + \@ifundefined{ALG@t@\ALG@Ld @#1}% the entity text is defined in this language? + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#1\endcsname\ALG@x@default% + }% + {}% + }% +% +\def\ALG@createtext#1#2% #1 = closed block; #2 = entitie; creates \ALG@t@#2@#1 + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#2@#1\endcsname\ALG@x@default% + }% +% +% End and Continue block +% +\def\ALG@endandcontinueblock#1#2#3#4#5% #1 = new block; #2 = old block; #3 = entitie; #4 = credits; #5 = indent + {% + \ifthenelse{\equal{#3}{}}{}% execute only if the entity is not empty + {% + \ALG@createentitie{#3}% create the entitie + \ALG@createblock{#2}% create the old block, if needed + \ifthenelse{\equal{#1}{}}% whe need to open a new block? + {\expandafter\edef\csname ALG@b@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname{0}}% no, just close the old one + {% yes, + \ALG@createblock{#1}% create the block + \expandafter\edef\csname ALG@b@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname{\ALG@getblocknumber{#1}}% ending the old block opens a new one + \ifthenelse{\equal{#4}{}}% infinite or finite credits? + {\expandafter\let\csname ALG@c@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname\relax}% infinite credits + {\expandafter\edef\csname ALG@c@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname{#4}}% finite credits + \ifthenelse{\equal{#5}{}}% default or specified indentation + {\expandafter\let\csname ALG@i@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname\ALG@defaultindent}% default indentation + {\expandafter\edef\csname ALG@i@\ALG@Ld @#3@\ALG@getblocknumber{#2}\endcsname{#5}}% indentation is specified + }% + \ALG@createtext{\ALG@getblocknumber{#2}}{#3}% + }% + }% +% +% macros used in declarations +% +\def\ALG@p@endtext@E{\algrenewtext{\ALG@v@end}}% +\def\ALG@p@endtext@xE{\algrenewtext[\ALG@v@newblock]{\ALG@v@end}}% +\def\ALG@p@endtext@nE{\algnotext{\ALG@v@end}}% +\def\ALG@p@endtext@xnE{\algnotext[\ALG@v@newblock]{\ALG@v@end}}% +\def\ALG@p@endtext@{}% +% starttext defines are more compex -- care must be taken for the optional parameters +\def\ALG@p@starttext@S{\ALG@p@s@process{\algrenewtext}}% +\def\ALG@p@starttext@C{\ALG@p@s@process{\algrenewtext}}% +\def\ALG@p@starttext@xC{\ALG@p@s@process{\algrenewtext[\ALG@v@oldblock]}}% +\def\ALG@p@s@process#1% + {% + \ifthenelse{\equal{\ALG@v@start}{}}% + {\ALG@p@endtext}% + {\@ifnextchar{[}{\ALG@p@s@getparamcount{#1}}{\ALG@p@s@simple{#1}}}% + }% +\def\ALG@p@s@getparamcount#1[#2]% + {% + \@ifnextchar{[}{\ALG@p@s@getdefparam{#1}{#2}}{\ALG@p@s@param{#1}{#2}}% + }% +\def\ALG@p@s@getdefparam#1#2[#3]% + {% + \ALG@p@s@defparam{#1}{#2}{#3}% + }% +\def\ALG@p@s@simple#1#2{#1{\ALG@v@start}{#2}\ALG@p@endtext}% +\def\ALG@p@s@param#1#2#3{#1{\ALG@v@start}[#2]{#3}\ALG@p@endtext}% +\def\ALG@p@s@defparam#1#2#3#4{#1{\ALG@v@start}[#2][#3]{#4}\ALG@p@endtext}% +% the rest of the crew +\def\ALG@p@starttext@nS{\algnotext{\ALG@v@start}\ALG@p@endtext}% +\def\ALG@p@starttext@nC{\algnotext{\ALG@v@start}\ALG@p@endtext}% +\def\ALG@p@starttext@xnC{\algnotext[\ALG@v@oldblock]{\ALG@v@start}\ALG@p@endtext}% +\def\ALG@p@starttext@{\ALG@p@endtext}% +\def\ALG@p@indent@def#1{\def\ALG@v@indent{#1}\ALG@p@setup}% +\def\ALG@p@indent@{\def\ALG@v@indent{}\ALG@p@setup}% +\def\ALG@p@credits@def#1{\def\ALG@v@credits{#1}\ALG@p@indent}% +\def\ALG@p@credits@{\ALG@p@indent}% +\def\ALG@p@end@def#1{\def\ALG@v@end{#1}\ALG@p@credits}% +\def\ALG@p@end@{\def\ALG@v@end{}\ALG@p@credits}% +\def\ALG@p@start@def#1{\def\ALG@v@start{#1}\ALG@p@end}% +\def\ALG@p@start@{\def\ALG@v@start{}\ALG@p@end}% +\def\ALG@p@oldblock@def#1{\def\ALG@v@oldblock{#1}\ALG@p@start}% +\def\ALG@p@oldblock@{\def\ALG@v@oldblock{}\ALG@p@start}% +\newcommand\ALG@p@newblock[1][]{\def\ALG@v@newblock{#1}\ALG@p@oldblock}% +\def\ALG@p@setup% + {% + \ifthenelse{\equal{\ALG@v@newblock}{}}% + {% + \ifthenelse{\equal{\ALG@v@start}{}}% + {% + \PackageError{algorithmicx}{Block or starting entitie must be specified!!!}{}% + }% + {% + \let\ALG@v@newblock\ALG@v@start% + }% + }% + {% + }% + \ALG@endandcontinueblock% + {\ALG@v@newblock}{\ALG@v@oldblock}{\ALG@v@start}% + {\ALG@v@credits}{\ALG@v@indent}% + \ALG@endandcontinueblock% + {}{\ALG@v@newblock}{\ALG@v@end}% + {}{}% + \ALG@p@starttext% + }% +% +% param handling +% +\newcommand\ALG@p@def[2][def]% + {% + \expandafter\let\csname ALG@p@#2\expandafter\endcsname\csname ALG@p@#2@#1\endcsname% + }% +\def\ALG@p@undef{\ALG@p@def[]}% +% +\def\ALG@p@ons{\ALG@p@def{start}}% +\def\ALG@p@onS{\ALG@p@def{start}\ALG@p@def[S]{starttext}}% +\def\ALG@p@onc{\ALG@p@def{oldblock}\ALG@p@def{start}}% +\def\ALG@p@onC{\ALG@p@def{oldblock}\ALG@p@def{start}\ALG@p@def[C]{starttext}}% +\def\ALG@p@one{\ALG@p@def{end}}% +\def\ALG@p@onE{\ALG@p@def{end}\ALG@p@def[E]{endtext}}% +\def\ALG@p@onxC{\ALG@p@def{oldblock}\ALG@p@def{start}\ALG@p@def[xC]{starttext}}% +\def\ALG@p@onxE{\ALG@p@def{end}\ALG@p@def[xE]{endtext}}% +\def\ALG@p@onnS{\ALG@p@def{start}\ALG@p@def[nS]{starttext}}% +\def\ALG@p@onnC{\ALG@p@def{oldblock}\ALG@p@def{start}\ALG@p@def[nC]{starttext}}% +\def\ALG@p@onnE{\ALG@p@def{end}\ALG@p@def[nE]{endtext}}% +\def\ALG@p@onxnC{\ALG@p@def{oldblock}\ALG@p@def{start}\ALG@p@def[xnC]{starttext}}% +\def\ALG@p@onxnE{\ALG@p@def{end}\ALG@p@def[xnE]{endtext}}% +\def\ALG@p@onb{\def\ALG@v@credits{}}% +\def\ALG@p@onl{\def\ALG@v@credits{1}}% +\def\ALG@p@onL{\ALG@p@def{credits}}% +\def\ALG@p@oni{\ALG@p@def{indent}}% +% +\def\ALG@p@main#1% + {% + \@ifundefined{ALG@ps@\ALG@p@state @#1}% + {% + \csname ALG@ps@\ALG@p@state @other\endcsname{#1}% + }% + {% + \csname ALG@ps@\ALG@p@state @#1\endcsname% + }% + \ALG@p@rec% + }% +% STATE : <> +\expandafter\def\csname ALG@ps@@]\endcsname{\let\ALG@p@rec\relax}% +\def\ALG@ps@@s{\ALG@p@ons}% +\def\ALG@ps@@S{\ALG@p@onS}% +\def\ALG@ps@@c{\ALG@p@onc}% +\def\ALG@ps@@C{\ALG@p@onC}% +\def\ALG@ps@@e{\ALG@p@one}% +\def\ALG@ps@@E{\ALG@p@onE}% +\def\ALG@ps@@N{\typeout{algdef: 'N' obsoloted, use 'nE'.}\ALG@p@onnE}% +\def\ALG@ps@@b{\ALG@p@onb}% +\def\ALG@ps@@l{\ALG@p@onl}% +\def\ALG@ps@@L{\ALG@p@onL}% +\def\ALG@ps@@i{\ALG@p@oni}% +\def\ALG@ps@@x{\def\ALG@p@state{x}}% +\def\ALG@ps@@n{\def\ALG@p@state{n}}% +\def\ALG@ps@@other#1{\typeout{algdef: Ignoring unknown token #1}}% +% STATE : x +\def\ALG@ps@x@C{\def\ALG@p@state{}\ALG@p@onxC}% +\def\ALG@ps@x@E{\def\ALG@p@state{}\ALG@p@onxE}% +\def\ALG@ps@x@N{\def\ALG@p@state{}\typeout{algdef: 'xN' obsoloted, use 'xnE'.}\ALG@p@onxnE}% +\def\ALG@ps@x@n{\def\ALG@p@state{xn}}% +\def\ALG@ps@x@other#1% + {% + \typeout{algdef: Ignoring 'x' before '#1'.}% + \def\ALG@p@state{}% + \def\ALG@p@rec{\let\ALG@p@rec\ALG@p@main\ALG@p@rec#1}% + }% +% STATE : n +\def\ALG@ps@n@S{\def\ALG@p@state{}\ALG@p@onnS}% +\def\ALG@ps@n@C{\def\ALG@p@state{}\ALG@p@onnC}% +\def\ALG@ps@n@E{\def\ALG@p@state{}\ALG@p@onnE}% +\def\ALG@ps@n@x{\def\ALG@p@state{nx}}% +\def\ALG@ps@n@other#1% + {% + \typeout{algdef: Ignoring 'n' before '#1'.}% + \def\ALG@p@state{}% + \def\ALG@p@rec{\let\ALG@p@rec\ALG@p@main\ALG@p@rec#1}% + }% +% STATE : xn +\def\ALG@ps@xn@C{\def\ALG@p@state{}\ALG@p@onxnC}% +\def\ALG@ps@xn@E{\def\ALG@p@state{}\ALG@p@onxnE}% +\def\ALG@ps@xn@x{\typeout{algdef: Ignoring 'x' after 'xn'.}}% +\def\ALG@ps@xn@n{\typeout{algdef: Ignoring 'n' after 'xn'.}}% +\def\ALG@ps@xn@other#1% + {% + \typeout{algdef: Ignoring 'xn' before '#1'.}% + \def\ALG@p@state{}% + \def\ALG@p@rec{\let\ALG@p@rec\ALG@p@main\ALG@p@rec#1}% + }% +% STATE : nx +\def\ALG@ps@nx@C{\def\ALG@p@state{}\ALG@p@onxnC}% +\def\ALG@ps@nx@E{\def\ALG@p@state{}\ALG@p@onxnE}% +\def\ALG@ps@nx@x{\typeout{algdef: Ignoring 'x' after 'nx'.}}% +\def\ALG@ps@nx@n{\typeout{algdef: Ignoring 'n' after 'nx'.}}% +\def\ALG@ps@nx@other#1% + {% + \typeout{algdef: Ignoring 'nx' before '#1'.}% + \def\ALG@p@state{}% + \def\ALG@p@rec{\let\ALG@p@rec\ALG@p@main\ALG@p@rec#1}% + }% +% +% +% *** User level block/entitie commands *** +% +% +% +% algdef{switches}... -- the king of all definitions in the algorithmicx package +% +\newcommand\algdef[1]% + {% + \ALG@p@undef{oldblock}% + \ALG@p@undef{start}% + \ALG@p@undef{end}% + \def\ALG@v@credits{}% + \ALG@p@undef{credits}% + \ALG@p@undef{indent}% + \ALG@p@undef{starttext}% + \ALG@p@undef{endtext}% + \def\ALG@p@state{}% + \let\ALG@p@rec\ALG@p@main% + \ALG@p@rec#1]% + \ALG@p@newblock% + }% +% +% a lot of other macros are provided for convenience +% +\def\algblock{\algdef{se}}% +\def\algcblock{\algdef{ce}}% +\def\algloop{\algdef{sl}}% +\def\algcloop{\algdef{cl}}% +\def\algsetblock{\algdef{seLi}}% +\def\algsetcblock{\algdef{ceLi}}% +\def\algblockx{\algdef{SxE}}% +\def\algblockdefx{\algdef{SE}}% +\def\algcblockx{\algdef{CxE}}% +\def\algcblockdefx{\algdef{CE}}% +\def\algsetblockx{\algdef{SxELi}}% +\def\algsetblockdefx{\algdef{SELi}}% +\def\algsetcblockx{\algdef{CxELi}}% +\def\algsetcblockdefx{\algdef{CELi}}% +\def\algloopdefx{\algdef{Sl}}% +\def\algcloopx{\algdef{xCl}}% +\def\algcloopdefx{\algdef{Cl}}% +% algloopx is not correct, use algloopdefx +% +% Text output commands +% +\newcommand\algrenewtext[2][]% [block]{entity} + {% + \ifthenelse{\equal{#2}{}}{}% + {% + \ifthenelse{\equal{#1}{}}% + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#2\endcsname\relax% + \expandafter\newcommand\csname ALG@t@\ALG@Ld @#2\endcsname% + }% + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#2@\ALG@getblocknumber{#1}\endcsname\relax% + \expandafter\newcommand\csname ALG@t@\ALG@Ld @#2@\ALG@getblocknumber{#1}\endcsname% + }% + }% + }% +% +\def\ALG@letentitytext#1#2% [block]{entity} + {% + \ifthenelse{\equal{#2}{}}{}% + {% + \ifthenelse{\equal{#1}{}}% + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#2\endcsname% + }% + {% + \expandafter\let\csname ALG@t@\ALG@Ld @#2@\ALG@getblocknumber{#1}\endcsname% + }% + }% + }% +% +\newcommand\algnotext[2][]% [block]{entity} + {% + \ALG@letentitytext{#1}{#2}\ALG@x@notext% + }% +% +\newcommand\algdefaulttext[2][]% [block]{entity} + {% + \ALG@letentitytext{#1}{#2}\ALG@x@default% + }% +% +\def\ALG@notext*{\algnotext}% +\def\algtext{\@ifnextchar{*}{\ALG@notext}{\algrenewtext}}% +% +% +% *** LANGUAGE SWITCHING *** +% +% +% +\newcommand\algnewlanguage[1]% + {% + \@ifundefined{ALG@L@#1}% needs to be created? + {}% + {% + \PackageError{algorithmicx}{Language '#1' already defined!}{}% + }% + \addtocounter{ALG@Lnr}{1}% increment the language counter + \expandafter\edef\csname ALG@L@#1\endcsname{\arabic{ALG@Lnr}}% set the language number + \edef\ALG@Ld{\csname ALG@L@#1\endcsname}% + \expandafter\let\csname ALG@bl@\ALG@Ld @\endcsname\ALG@bl@% the BIG block + \expandafter\let\csname ALG@bl@\ALG@Ld @@\endcsname\ALG@bl@% the BIG block + \algdef{SL}[STATE]{State}{0}{}% + \expandafter\def\csname ALG@deftext@\ALG@Ld\endcsname{\textbf}% + \algnewcommand\algorithmiccomment[1]{\hfill\(\triangleright\) ##1}% + \algnewcommand\algorithmicindent{1.5em}% + \algnewcommand\alglinenumber[1]{\footnotesize ##1:}% + \algnewcommand\ALG@beginalgorithmic\relax% for user overrides + \algnewcommand\ALG@endalgorithmic\relax% for user overrides + }% +% +\newcommand\algsetlanguage[1]% + {% + \@ifundefined{ALG@L@#1}% needs to be created? + {% + \PackageError{algorithmicx}{Language '#1' is not yet defined!}{}% + }{}% + \edef\ALG@L{\csname ALG@L@#1\endcsname}% + }% +% +\newcommand\algdeflanguage[1]% + {% + \@ifundefined{ALG@L@#1}% needs to be created? + {% + \PackageError{algorithmicx}{Language '#1' is not yet defined!}{}% + }{}% + \edef\ALG@Ld{\csname ALG@L@#1\endcsname}% + }% +% +\newcommand\alglanguage[1]% + {% + \algdeflanguage{#1}% + \algsetlanguage{#1}% + }% +% +% +% *** Defining language dependent stuff *** +% +% +\def\ALG@eatoneparam#1{}% +\def\ALG@defbasecmd#1#2% + {% + \edef\ALG@tmp{\expandafter\ALG@eatoneparam\string #2}% + \@ifundefined\ALG@tmp{\edef #2{\noexpand\csname ALG@cmd@\noexpand\ALG@L @\ALG@tmp\endcsname}}{}% + \expandafter#1\csname ALG@cmd@\ALG@Ld @\ALG@tmp\endcsname% + }% +\newcommand\algnewcommand{\ALG@defbasecmd\newcommand}% +\newcommand\algrenewcommand{\ALG@defbasecmd\renewcommand}% +\def\ALG@letcmd{\ALG@defbasecmd\let}% +\def\ALG@defcmd{\ALG@defbasecmd\def}% +\def\ALG@edefcmd{\ALG@defbasecmd\edef}% +% +% +% *** OTHERS *** +% +% +\def\BState{\State \algbackskip}% +\def\Statex{\item[]}% an empty line +\newcommand\algrenewcomment{\algrenewcommand\algorithmiccomment}% +\def\Comment{\algorithmiccomment}% +\def\algref#1#2{\ref{#1}.\ref{#2}}% +\algnewlanguage{default}% +\algsetlanguage{default}% +% +% +% *** Line breaks *** +% +% +\newcommand\algbreak% for multiline parameters !!! needs fix + {% + \item% +% \hskip\ALG@parindent%!!! not yet implemented +% \hskip-\algorithmicindent% + }% +% +\def\ALG@noputindents% + {% + \hskip\ALG@tlm% + }% +% +% +% *** algorithm store / restore *** +% +% +% store +% +\ALG@newcondstate{mustrestore}% +\def\algstore% + {% + \renewcommand\ALG@beginblock% + {% + \PackageError{algorithmicx}{The environment must be closed after store!}{}% + }% + \@ifstar{\ALG@starstore}{\ALG@nostarstore}% + }% +\def\ALG@nostarstore#1% save all infos into #1 and terminate the algorithmic block + {% + \addtocounter{ALG@storecount}{1}% + \expandafter\global\expandafter\let\csname ALG@save@mustrestore@#1\endcsname\ALG@x@mustrestore% + \ALG@starstore{#1}% + }% +\def\ALG@starstore#1% + {% + \@ifundefined{ALG@save@line@#1}{}% + {\PackageError{algorithmicx}{This save name '#1' is already used!}{}}% + \def\ALG@savename{#1}% + \expandafter\xdef\csname ALG@save@totalnr@\ALG@savename\endcsname{\theALG@nested}% + \expandafter\xdef\csname ALG@save@line@\ALG@savename\endcsname{\theALG@line}% + \expandafter\xdef\csname ALG@save@numberfreq@\ALG@savename\endcsname{\ALG@numberfreq}% + \expandafter\xdef\csname ALG@save@rem@\ALG@savename\endcsname{\theALG@rem}% + \let\ALG@storerepeat\ALG@store% + \ALG@storerepeat% + }% +\def\ALG@store% simply terminate all open blocks + {% + \ifnum\theALG@nested=0\let\ALG@storerepeat\relax% + \else% + \expandafter\xdef\csname ALG@save@currentblock@\ALG@savename @\theALG@nested\endcsname% + {\csname ALG@currentblock@\theALG@nested\endcsname}% + \expandafter\ifx\csname ALG@currentlifetime@\theALG@nested\endcsname\relax% + \else% + \expandafter\xdef\csname ALG@save@currentlifetime@\ALG@savename @\theALG@nested\endcsname% + {\csname ALG@currentlifetime@\theALG@nested\endcsname}% + \fi% + \expandafter\xdef\csname ALG@save@ind@\ALG@savename @\theALG@nested\endcsname% + {\csname ALG@ind@\theALG@nested\endcsname}% + \ALG@closebyforce% + \fi% + \ALG@storerepeat% + }% +% +% restore +% +\def\algrestore% + {% + \@ifstar{\ALG@starrestore}{\ALG@nostarrestore}% + }% +\def\ALG@starrestore% + {% + \let\ALG@restorerem\relax% + \let\ALG@restorereprem\relax% + \ALG@restoremain% + }% +\def\ALG@nostarrestore% + {% + \let\ALG@restorerem\ALG@restoreremovesave% + \let\ALG@restorereprem\ALG@restorerepremovesave% + \ALG@restoremain% + }% +\def\ALG@restoreremovesave% + {% + \expandafter\global\expandafter\let\csname ALG@save@totalnr@\ALG@savename\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@line@\ALG@savename\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@rem@\ALG@savename\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@totalnr@\ALG@savename\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@numberfreq@\ALG@savename\endcsname\relax% + }% +\def\ALG@restorerepremovesave% + {% + \expandafter\global\expandafter\let\csname ALG@save@currentblock@\ALG@savename @\theALG@tmpcounter\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@currentlifetime@\ALG@savename @\theALG@tmpcounter\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@currentlifetime@\ALG@savename @\theALG@tmpcounter\endcsname\relax% + \expandafter\global\expandafter\let\csname ALG@save@ind@\ALG@savename @\theALG@tmpcounter\endcsname\relax% + }% +\def\ALG@restoremain#1% restore all infos from #1 in an open algorithmic block + {% + \ifnum\theALG@line=0% + \else\PackageError{algorithmicx}{Restore might be used only at the beginning of the environment!}{}% + \fi% + \def\ALG@savename{#1}% + \expandafter\ifx\csname ALG@save@totalnr@\ALG@savename\endcsname\relax% + \PackageError{algorithmicx}{Save '\ALG@savename'\space not defined!!!}{}% + \fi% + \@ifundefined{ALG@save@mustrestore@\ALG@savename}{}% + {% + \addtocounter{ALG@storecount}{-1}% + \expandafter\global\expandafter\let\csname ALG@save@mustrestore@\ALG@savename\endcsname\relax% + }% + \setcounter{ALG@line}{\csname ALG@save@line@\ALG@savename\endcsname}% + \edef\ALG@numberfreq{\csname ALG@save@numberfreq@\ALG@savename\endcsname}% + \setcounter{ALG@rem}{\csname ALG@save@rem@\ALG@savename\endcsname}% + \setcounter{ALG@tmpcounter}{\csname ALG@save@totalnr@\ALG@savename\endcsname}% + \setcounter{ALG@nested}{0}% + \ALG@restorerem% + \let\ALG@restorerepeat\ALG@restore% + \ALG@restorerepeat% + }% +\def\ALG@restore% + {% + \ifnum\theALG@tmpcounter>0% + \expandafter\edef\csname ALG@currentblock@\theALG@tmpcounter\endcsname% + {\csname ALG@save@currentblock@\ALG@savename @\theALG@tmpcounter\endcsname}% + \expandafter\ifx\csname ALG@save@currentlifetime@\ALG@savename @\theALG@tmpcounter\endcsname\relax% + \expandafter\let\csname ALG@currentlifetime@\theALG@tmpcounter\endcsname\relax% + \else% + \expandafter\edef\csname ALG@currentlifetime@\theALG@tmpcounter\endcsname% + {\csname ALG@save@currentlifetime@\ALG@savename @\theALG@tmpcounter\endcsname}% + \fi% + % + \ALG@beginblock{\csname ALG@save@ind@\ALG@savename @\theALG@tmpcounter\endcsname}% + \ALG@restorereprem% + \addtocounter{ALG@tmpcounter}{-1}% + \else\let\ALG@restorerepeat\relax% + \fi% + \ALG@restorerepeat% + }% +\AtEndDocument% + {% + \ifnum\theALG@storecount>0\relax% + \PackageError{algorithmicx}{Some stored algorithms are not restored!}{}% + \fi% + }% diff --git a/report/pyp_clustering/acl09-short/common-bak/algpseudocode.sty b/report/pyp_clustering/acl09-short/common-bak/algpseudocode.sty new file mode 100644 index 00000000..fca966ac --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/algpseudocode.sty @@ -0,0 +1,92 @@ +% PSEUDOCODE ALGORITHMIC STYLE -- Released 27 APR 2005 +% for LaTeX version 2e +% +% Copyright Szasz Janos +% E-mail szaszjanos@users.sourceforge.net +% Based on Peter Williams's algorithmic.sty +% +\NeedsTeXFormat{LaTeX2e}% +\ProvidesPackage{algpseudocode}% +\RequirePackage{ifthen}% +\RequirePackage{algorithmicx}% +\typeout{Document Style - pseudocode environments for use with the `algorithmicx' style}% +% +\def\ALG@noend{f}% +\newboolean{ALG@compatible}% +\setboolean{ALG@compatible}{false}% +% +\DeclareOption{noend}{\def\ALG@noend{t}}% +\DeclareOption{end}{\def\ALG@noend{f}}% +\DeclareOption{compatible}{\typeout{For compatibility mode use algcompatible.sty!!!}\setboolean{ALG@compatible}{true}}% +\DeclareOption{noncompatible}{\setboolean{ALG@noncompatible}{false}}% +\ProcessOptions% +% +% *** DECLARATIONS *** +% +\algnewlanguage{pseudocode}% +\alglanguage{pseudocode}% +% +% *** KEYWORDS *** +% +\algnewcommand\algorithmicend{\textbf{end}} +\algnewcommand\algorithmicdo{\textbf{do}} +\algnewcommand\algorithmicwhile{\textbf{while}} +\algnewcommand\algorithmicfor{\textbf{for}} +\algnewcommand\algorithmicforall{\textbf{for all}} +\algnewcommand\algorithmicloop{\textbf{loop}} +\algnewcommand\algorithmicrepeat{\textbf{repeat}} +\algnewcommand\algorithmicuntil{\textbf{until}} +\algnewcommand\algorithmicprocedure{\textbf{procedure}} +\algnewcommand\algorithmicfunction{\textbf{function}} +\algnewcommand\algorithmicif{\textbf{if}} +\algnewcommand\algorithmicthen{\textbf{then}} +\algnewcommand\algorithmicelse{\textbf{else}} +\algnewcommand\algorithmicrequire{\textbf{Require:}} +\algnewcommand\algorithmicensure{\textbf{Ensure:}} +\algnewcommand\algorithmicreturn{\textbf{return}} +\algnewcommand\textproc{\textsc} +% +% *** DECLARED LOOPS *** +% +\algdef{SE}[WHILE]{While}{EndWhile}[1]{\algorithmicwhile\ #1\ \algorithmicdo}{\algorithmicend\ \algorithmicwhile}% +\algdef{SE}[FOR]{For}{EndFor}[1]{\algorithmicfor\ #1\ \algorithmicdo}{\algorithmicend\ \algorithmicfor}% +\algdef{S}[FOR]{ForAll}[1]{\algorithmicforall\ #1\ \algorithmicdo}% +\algdef{SE}[LOOP]{Loop}{EndLoop}{\algorithmicloop}{\algorithmicend\ \algorithmicloop}% +\algdef{SE}[REPEAT]{Repeat}{Until}{\algorithmicrepeat}[1]{\algorithmicuntil\ #1}% +\algdef{SE}[IF]{If}{EndIf}[1]{\algorithmicif\ #1\ \algorithmicthen}{\algorithmicend\ \algorithmicif}% +\algdef{C}[IF]{IF}{ElsIf}[1]{\algorithmicelse\ \algorithmicif\ #1\ \algorithmicthen}% +\algdef{Ce}[ELSE]{IF}{Else}{EndIf}{\algorithmicelse}% +\algdef{SE}[PROCEDURE]{Procedure}{EndProcedure}% + [2]{\algorithmicprocedure\ \textproc{#1}\ifthenelse{\equal{#2}{}}{}{(#2)}}% + {\algorithmicend\ \algorithmicprocedure}% +\algdef{SE}[FUNCTION]{Function}{EndFunction}% + [2]{\algorithmicfunction\ \textproc{#1}\ifthenelse{\equal{#2}{}}{}{(#2)}}% + {\algorithmicend\ \algorithmicfunction}% +% +\ifthenelse{\equal{\ALG@noend}{t}}% + {% + \algtext*{EndWhile}% + \algtext*{EndFor}% + \algtext*{EndLoop}% + \algtext*{EndIf}% + \algtext*{EndProcedure}% + \algtext*{EndFunction}% + }{}% +% +% *** OTHER DECLARATIONS *** +% +\algnewcommand\Require{\item[\algorithmicrequire]}% +\algnewcommand\Ensure{\item[\algorithmicensure]}% +\algnewcommand\Return{\algorithmicreturn{} }% +\algnewcommand\Call[2]{\textproc{#1}\ifthenelse{\equal{#2}{}}{}{(#2)}}% +% +% +% +\ifthenelse{\boolean{ALG@compatible}}% + {% + \ifthenelse{\equal{\ALG@noend}{t}}% + {\RequirePackage[noend]{algcompatible}}% + {\RequirePackage{algcompatible}}% + }% + {}% +% diff --git a/report/pyp_clustering/acl09-short/common-bak/hyphen.sty b/report/pyp_clustering/acl09-short/common-bak/hyphen.sty new file mode 100644 index 00000000..028e8fb1 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/hyphen.sty @@ -0,0 +1,23 @@ +%%%%%%%%% HYPHENATION CONTROL %%%%%%%% + +\pretolerance 250 +\tolerance 500 +% \hyphenpenalty 250 +\hyphenpenalty 200 +\exhyphenpenalty 100 +\doublehyphendemerits 7500 +\finalhyphendemerits 7500 +\brokenpenalty 10000 +\lefthyphenmin 3 +\righthyphenmin 3 +\widowpenalty 10000 +\clubpenalty 10000 +\displaywidowpenalty 10000 +\looseness 1 + +\hyphenation{phon-emic} +\hyphenation{Cam-er-oon} +\hyphenation{Kam-erun} +\hyphenation{ex-am-ple} +\hyphenation{para-digm} +\hyphenation{para-digms} diff --git a/report/pyp_clustering/acl09-short/common-bak/jeffe.sty b/report/pyp_clustering/acl09-short/common-bak/jeffe.sty new file mode 100644 index 00000000..d2cd2e99 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/jeffe.sty @@ -0,0 +1,566 @@ +% ============================-*- LaTeX -*-============================= +% +% jeffe.sty -- macros I use everywhere +% +% Jeff Erickson (jeffe@cs.uiuc.edu) +% Last modified 09 Jul 2000 +% This is free; caveat emptor! +% +% Requirements that may not be part of every TeX distribution: +% (1) The standard AMS math packages amsmath and amssymb. These are +% absolutely vital to anyone who typesets mathematics of any +% kind. If you don't have them, get them NOW!! +% (2) The textcomp package. Cosmetic only. +% (3) Knuth's manual font `manfnt'. Cosmetic only. +% +% ====================================================================== +\RequirePackage{latexsym,amsmath,amssymb} + +\hyphenation{co-or-din-ate co-or-din-ates half-space stereo-iso-mers +stereo-iso-mer Round-table} + +% ---------------------------------------------------------------------- +% Common abbreviations and words with accents +% ---------------------------------------------------------------------- + +% ---- LATIN ---- +\def\etal{\emph{et~al.}} +\def\ie{\emph{i.e.}} +\def\eg{\emph{e.g.}} +\def\vitae{vit\ae{}} +\def\apriori{\emph{a~priori}} +\def\aposteriori{\emph{a~posteriori}} + +% ---- FRENCH ---- +\def\naive{na\"{\i}ve} +\def\Naive{Na\"{\i}ve} +\def\naively{na\"{\i}vely} % Okay, I know, this isn't French. +\def\Naively{Na\"{\i}vely} +\def\cafe{caf\'e} + +% ---- GERMAN ---- +\def\fur{f\"ur} +\def\Universitat{Universit\"at} +\def\Saarbrucken{Saar\-br\"ucken} % Bypass TeX hyphenation +\def\Zurich{Z\"urich} + +% ---- PORTUGESE (Hi Jorge!) ---- +\def\Computacao{Computa\c{c}\~ao} + +% ---- PROPER NAMES (because I'm lazy) ---- +\def\Benes{Bene\v{s}} % ...network +\def\Bezier{B\'ezier} % ...spline/curve/surface +\def\Bjorner{Bj\"orner} +\def\Bochis{Bochi\c{s}} % Daciana +\def\Boruvka{Bor\.uvka} % ...'s MST algorithm +\def\Bragger{Br\"agger} +\def\Bronnimann{Br\"onnimann} \def\Herve{Herv\'e} +\def\Bruckner{Br\"uckner} +\def\Caratheodory{Carath\'eodory} % Constantin +\def\Chvatal{Chv\'atal} \def\Vasek{Va\v{s}ek} + \def\Joao{Jo\~ao} % Compa +\def\Cortes{Cort\'es} % Carmen +\def\Dujmovic{Dujmovi\'c} % Vida + \def\Fredo{Fr\'edo} % Durand +\def\Erdos{Erd\H{o}s} \def\Pal{P\'al} +\def\Furedi{F\"uredi} \def\Zoltan{Zolt\'an} +\def\Grobner{Gr\"obner} % ... basis +\def\Grunbaum{Gr\"unbaum} % Branko +\def\Hanoi{Hano\"\i} % Tower of... +\def\Jarnik{Jarn\a'{\i}k} % ...'s (`Prim's') MST algorithm +\def\Komlos{Koml\'os} +\def\Kovari{K\"ov\'ari} +\def\Lovasz{Lov\'asz} \def\Laszlo{L\'aszl\'o} +\def\Matousek{Matou\v{s}ek} \def\Jiri{Ji\v{r}\'\i} +\def\Mnev{Mn\"ev} +\def\Mobius{M\"obius} % ... strip/transformation/function +\def\Mucke{M\"ucke} % Ernst +\def\ODunliang{\'O'D\'unliang} +\def\Oleinik{Ole\u{\i}nik} + \def\Janos{J\'anos} % Pach +\def\Palasti{Pal\'asti} + \def\Belen{Bel\'en} % Palop +\def\Petrovskii{Petrovski\u{\i}} +\def\Pinar{P\i nar} % Ali +\def\Plucker{Pl\"ucker} % ... coordinates +\def\Poincare{Poincar\'e} % ... duality/halfplane + \def\Gunter{G\"unter} % Rote, Ziegler +\def\Sacristan{Sacrist\'an} % Vera +\def\Saskin{\v{S}a\v{s}kin} +\def\Schomer{Sch\"omer} +\def\Schonhardt{Sch\"onhardt} % ... polyhedron +\def\Sos{S\'os} +\def\Stackel{St\"ackel} % Paul +\def\Szekely{Sz\'ekely} +\def\Szemeredi{Szemer\'edi} +\def\Toth{T\'{o}th} % Geza +\def\Turan{Tur\'an} +\def\Ungor{\"Ung\"or} % Alper +\def\Voronoi{Vorono\"i} % ... diagram [for francophile pedants only] + +% Other +\def\Cplusplus{C\raisebox{0.5ex}{\tiny\bf++}} + +% ---------------------------------------------------------------------- +% Simple math stuff +% ---------------------------------------------------------------------- + +% ---- SYMBOLS ---- +\let\e\varepsilon % a ``real'' epsilon + +\def\Integer{\mathsf{Z\hspace{-0.4em}Z}} +\def\Natural{\mathrm{I\!N}} +\def\Real{\mathrm{I\!R}} +\def\Proj{\mathrm{I\!P}} % projective space +\def\Hyper{\mathrm{I\!H}} % hyperbolic space + +% +% These two look okay in Computer Modern 11pt, or Concrete Roman +% 12pt, but they need serious work in other sizes. I need to figure +% out how to draw a vertical bar, or better yet a shallow arc, inside +% the bowls. Or maybe I need to learn METAFONT. +% +\def\Complex{\mathrm{\,\raise 0.33ex\hbox{\scriptsize\bf(}\!\!\!C}} +\def\Rational{\mathrm{\,\raise 0.33ex\hbox{\scriptsize\bf(}\!\!\!Q}} + +% +% Here are more standard, but uglier, versions of these symbols. +% They stick out, because they're all in Times Roman Bold Outline! +% I *really* want a Concrete Blackboard or Euler Blackboard font, but +% I'd settle for Computer Modern Blackboard. +% +%\def\Real{\mathbb{R}} +%\def\Proj{\mathbb{P}} +%\def\Hyper{\mathbb{H}} +%\def\Integer{\mathbb{Z}} +%\def\Natural{\mathbb{N}} +%\def\Complex{\mathbb{C}} +%\def\Rational{\mathbb{Q}} + +\let\N\Natural +\let\Q\Rational +\let\R\Real +\let\Z\Integer +\def\Rd{\Real^d} +\def\RP{\Real\Proj} +\def\CP{\Complex\Proj} + +% ---- OPERATORS (requires amsmath) ---- +\def\aff{\operatorname{aff}} % (\Line is better!) +\def\area{\operatorname{area}} +\def\argmax{\operatornamewithlimits{arg\,max}} +\def\argmin{\operatornamewithlimits{arg\,min}} +\def\Aut{\operatorname{Aut}} % Automorphism group +\def\card{\operatorname{card}} % cardinality, deprecated for \abs +\def\conv{\operatorname{conv}} % (\overline is better!) +\def\E{\operatorname{E}} % Expectation: $\E[X]$ (like \Pr) +\def\EE{\operatornamewithlimits{E}} +\def\Hom{\operatorname{Hom}} % Homomorphism group +\def\id{\operatorname{id}} % identity +\def\im{\operatorname{im}} % image +\def\lcm{\operatorname{lcm}} +\def\lfs{\operatorname{lfs}} % local feature size +\def\poly{\operatorname{poly}} +\def\polylog{\operatorname{polylog}} +\def\rank{\operatorname{rank}} +\def\rel{\operatorname{rel\,}} % relative (interior, boundary, etc.) +\def\sgn{\operatorname{sgn}} +\def\vol{\operatorname{vol}} % volume + +\def\fp#1{^{\underline{#1}}} % falling powers: $n\fp{d}$ +\def\rp#1{^{\overline{#1}}} % rising powers: $n\rp{d}$ + +% --- Cheap displaystyle operators --- +\def\Frac#1#2{{\displaystyle\frac{#1}{#2}}} +\def\Sum{\sum\limits} +\def\Prod{\prod\limits} +\def\Union{\bigcup\limits} +\def\Inter{\bigcap\limits} +\def\Lor{\bigvee\limits} +\def\Land{\bigwedge\limits} +\def\Lim{\lim\limits} +\def\Max{\max\limits} +\def\Min{\min\limits} + +% ---- RELATORS ---- +\def\deq{\stackrel{\scriptscriptstyle\triangle}{=}} +\def\mapsfrom{\leftarrow\!\mapstochar\,} +\let\into\hookrightarrow % = one-to-one +\let\onto\twoheadrightarrow + +% ---- DELIMITER PAIRS ---- +\def\floor#1{\lfloor #1 \rfloor} +\def\ceil#1{\lceil #1 \rceil} +\def\seq#1{\langle #1 \rangle} +\def\set#1{\{ #1 \}} +\def\abs#1{\mathopen| #1 \mathclose|} % use instead of $|x|$ +\def\norm#1{\mathopen\| #1 \mathclose\|}% use instead of $\|x\|$ +\def\indic#1{\big[#1\big]} % indicator variable; Iverson notation + % e.g., Kronecker delta = [x=0] + +% --- Self-scaling delmiter pairs --- +\def\Floor#1{\left\lfloor #1 \right\rfloor} +\def\Ceil#1{\left\lceil #1 \right\rceil} +\def\Seq#1{\left\langle #1 \right\rangle} +\def\Set#1{\left\{ #1 \right\}} +\def\Abs#1{\left| #1 \right|} +\def\Norm#1{\left\| #1 \right\|} +\def\Paren#1{\left( #1 \right)} % need better macro name! +\def\Brack#1{\left[ #1 \right]} % need better macro name! +\def\Indic#1{\left[ #1 \right]} % indicator variable; Iverson notation + +% +% Macros to typeset sets like {foo|bar} with all three delimiters +% correctly scaled to fit. What I *really* want is a \middle macro +% that acts just like \left and \right. Grumble. +% +\def\Bigbar#1{\mathrel{\left|\vphantom{#1}\right.\n@space}} +\def\Setbar#1#2{\Set{#1 \Bigbar{#1 #2} #2}} +\def\Seqbar#1#2{\Seq{#1 \Bigbar{#1 #2} #2}} +\def\Brackbar#1#2{\Brack{#1 \Bigbar{#1 #2} #2}} + +% C-style arithmetic if-then-else +\def\arithif#1#2#3{#1 \mathbin? #2 \mathbin: #3} + +% Math mode fbox +\def\mfbox#1{\mathchoice{{\fbox{\ensuremath{\displaystyle #1}}}} + {{\fbox{\ensuremath{\textstyle #1}}}} + {{\fbox{\ensuremath{\scriptstyle #1}}}} + {{\fbox{\ensuremath{\scriptscriptstyle #1}}}}} + +% ---- ``ACCENTS'' ---- +% NB: Commands equivalent to \lrarrowfill and \overlrarrow are +% already defined in amstex! +\def\lrarrowfill{$\m@th\mathord\leftarrow\mkern-6mu% + \cleaders\hbox{$\mkern-2mu\mathord-\mkern-2mu$}\hfill + \mkern-6mu\mathord\rightarrow$} +\def\overlrarrow#1{\vbox{\ialign{##\crcr + \lrarrowfill\crcr\noalign{\kern-\p@\nointerlineskip} + $\hfil\displaystyle{#1}\hfil$\crcr}}} + +\def\Line#1{\!\overlrarrow{\vphantom{t}\smash{\,#1\,}}\!} +\def\Ray#1{\overrightarrow{\vphantom{t}#1\,}\!} +\def\Seg#1{\overline{\vphantom{t}#1}} + +% --- TEXT STYLES --- +\def\mathsc#1{\text{\textsc{#1}}} +\def\mathbs#1{\text{\boldmath\ensuremath{#1}}} + +% ---------------------------------------------------------------------- +% \begin{bigabstract}...\end{bigabstract} +% For use in documents with title pages. Use normal-sized text in +% the abstract! +% ---------------------------------------------------------------------- +\newenvironment{bigabstract}% +{\medskip\noindent\centerline{\textbf{\large\abstractname}}\begin{quotation}}% +{\end{quotation}} + +% ---------------------------------------------------------------------- +% Make captions smaller than the text, make their titles bold. +% Arguments: #1 == figure name: "Figure 5" +% #2 == caption: "Papers by \Erdos, 1900--2000 (projected)" +% Less space after floats and before caption, since captions are smaller +% ---------------------------------------------------------------------- +\@ifundefined{abovecaptionskip}{\newlength\abovecaptionskip} +\long\def\@makecaption#1#2{ + \vskip \abovecaptionskip + \setbox\@tempboxa\hbox{{\sf\footnotesize \textbf{#1.} #2}} + \ifdim \wd\@tempboxa >\hsize % IF longer than one line: + {\sf\footnotesize \textbf{#1.} #2\par}% THEN set as ordinary paragraph. + \else % ELSE center. + \hbox to\hsize{\hfil\box\@tempboxa\hfil} + \fi} +\dbltextfloatsep 18pt plus 2pt minus 4pt% was 20pt plus 2pt minus 4pt +\textfloatsep 18pt plus 2pt minus 4pt % was 20pt plus 2pt minus 4pt +\abovecaptionskip 6pt % was 10pt + +% ---------------------------------------------------------------------- +% Revised theorem environment: +% Add a period after the theorem number, and make theorems slanted +% instead of italic to help distinguish text from math. Use +% \boldmath in theorem label in case it includes math. +% ---------------------------------------------------------------------- +\def\@opargbegintheorem#1#2#3{\trivlist + \item[\hskip\labelsep{\bf\boldmath #1\ #2\ (#3).}]\sl} +\def\@begintheorem#1#2{\trivlist + \item[\hskip\labelsep{\bf\boldmath #1\ #2.}]\sl} + +% ---------------------------------------------------------------------- +% \newproof{type}{text}(style)[post]: +% Define a new type of unnumbered ``theorem'' environment. The last +% two arguments are optional; most environments will not use them. +% +% To change label style, put new style command in label: +% \newproof{comment}{\sf Comment} +% ---------------------------------------------------------------------- +\def\newproof#1#2{\@ifnextchar({\@snproof{#1}{#2}}{\@snproof{#1}{#2}(\rm)}} +\def\@snproof#1#2(#3){\@ifnextchar[{\@xnproof{#1}{#2}{#3}} + {\@xnproof{#1}{#2}{#3}[]}} + +\def\@xnproof#1#2#3[#4]{%\expandafter\@ifdefinable\csname #1\endcsname +{\global\@namedef{#1}{\@prf{#2}{#3}}\global\@namedef{end#1}{\@endprf{#4}}}} + +\def\@prf#1#2{\@ifnextchar[{\@xprf{#1}{#2}}{\@yprf{#1}{#2}}} +\def\@xprf#1#2[#3]{\@yprf{#1\ (#3)}{#2}} +\def\@yprf#1#2{\begin{trivlist}\item[\hskip\labelsep{\bf\boldmath #1:}]#2} + +\def\@endprf#1{#1\end{trivlist}} + +% +% Standard proof envrionment: last line has a halmos at the right margin. +% +\@ifundefined{square}{\let\square\Box}{} % grumble latex2e +\def\QED{\ensuremath{{\square}}} +\def\markatright#1{\leavevmode\unskip\nobreak\quad\hspace*{\fill}{#1}} +\def\qed{\markatright{\QED}} +\newproof{proof}{Proof}[\qed] +\newproof{sketch}{Proof Sketch}[\qed] + +\newenvironment{rawproof}% + {\begin{trivlist}\item[\hskip\labelsep\textbf{Proof:}]}% + {\qed\end{trivlist}} + +% +% If the proof ends with a displayed equation, use \aftermath just +% before \end{proof} to put the halmos in the ``right'' place. This +% may not work near page boundaries. +% +\def\aftermath{\par\vspace{-\belowdisplayskip}\vspace{-\parskip}\vspace{-\baselineskip}} + +% ---------------------------------------------------------------------- +% One-shot theoremish environment. For named things like ``Zorn's +% Lemma'' or ``The Death Leap Principle'' or ``The Zone Theorem''. +% To get one-shot proofish environment, put \rm inside. +% New improved version stolen from Erik Demaine. +% ---------------------------------------------------------------------- +\newenvironment{oneshot}[1]{\@begintheorem{#1}{\unskip}}{\@endtheorem} + +% ---------------------------------------------------------------------- +% Algorithm and code environments -- Algorithms are set in normal +% text, with 2em indenting, surrounded by a box. Code is set in +% typewriter text, with 4 space indenting, and no box. +% +% NOTE! The true width of the minipage environment is determined by +% the contents of the longest line, but ONLY because the minipage +% contains a tabbing environment, and nothing else. It's a HaX! +% +% Both environments need optional line numbering and comment macros, +% but bold keywords are just distracting. +% ---------------------------------------------------------------------- +\def\begin@lgo{\begin{minipage}{1in}\begin{tabbing} + \quad\=\qquad\=\qquad\=\qquad\=\qquad\=\qquad\=\qquad\=\kill} +\def\end@lgo{\end{tabbing}\end{minipage}} + +\newenvironment{algorithm} +{\begin{tabular}{|l|}\hline\begin@lgo} +{\end@lgo\\\hline\end{tabular}} + +\newenvironment{algo} +{\begin{center}\begin{algorithm}} +{\end{algorithm}\end{center}} + +\def\Comment#1{\textsf{\textsl{$\langle\!\langle$#1\/$\rangle\!\rangle$}}} + +\def\beginc@de{\noindent\begin{center}\begin{minipage}{1in}\tt + \begin{tabbing}~~~~\=~~~~\=~~~~\=~~~~\=~~~~\=~~~~\=~~~~\=\kill} +\def\endc@de{\end{tabbing}\end{minipage}\end{center}} + +\newenvironment{code}{\beginc@de}{\endc@de} + +% +% I think these only work with <1 line of text. Use sparingly!! +% +\def\textul#1{\underline{\smash{#1}\vphantom{,}}} +\def\strike#1{\ensuremath{\overline{\text{\smash{#1}\vphantom{.}}}}} + +% ---------------------------------------------------------------------- +% Include a file verbatim. Searches the TEXINPUTS path for the file, +% even though that's probably not what you really want. Stolen from +% ``verbatimfiles.sty'' by Chris Rowley and others. [Don't use their +% \vertbatimlisting macro; it has some bizarre side-effects!] +% ---------------------------------------------------------------------- +\def\verbinput#1{ +\begingroup\@verbatim\frenchspacing\@vobeyspaces\input#1\endgroup +} + +% ---------------------------------------------------------------------- +% \now -- Current time in h:mm AM/PM format +% \mdyy -- Today's date in m/d/yy format. Forget Y2K; this is for humans! +% ---------------------------------------------------------------------- +\newcount\timehh\timehh=\time +\divide\timehh by 60 +\newcount\timemm\timemm=\time +\count255=\timehh +\multiply\count255 by -60 +\advance\timemm by \count255 +\newif\iftimePM +\ifnum\timehh>11 \timePMtrue\else\timePMfalse\fi +\ifnum\timehh<1 \advance\timehh by 12\fi +\ifnum\timehh>12 \advance\timehh by -12\fi +\def\now{\number\timehh:\ifnum\timemm<10 0\fi\number\timemm + \iftimePM pm\else am\fi} +\newcount\mdYY\mdYY=\year +\count255=\year +\divide\count255 by 100 +\multiply\count255 by 100 +\advance\mdYY by -\count255 +\def\mdyy{\number\month/\number\day/\ifnum\mdYY<10 0\fi\number\mdYY} + +% ---------------------------------------------------------------------- +% Notes to myself +% ---------------------------------------------------------------------- +\def\n@te#1{\textsf{$\langle\!\langle$#1$\rangle\!\rangle$}\leavevmode} +\def\n@tew@rn{\GenericWarning{}{AUTHOR WARNING: Unresolved \protect\note}} + +\def\n@ten@te#1{\marginpar + [\hfill\llap{\textcircled{\small#1}$\!\Longrightarrow$}] + {\rlap{$\Longleftarrow\!$\textcircled{\small#1}}}} + +\def\n@tedingb@t{\@ifundefined{textmusicalnote} + {$\circledcirc$} % if you don't have textcomp + {\textmusicalnote}} % if you do have textcomp + +\def\note#1{\n@tew@rn\n@te{\n@ten@te{\n@tedingb@t}#1}} + +\def\sidenote#1{\marginpar{\tiny\sf #1}} + + +%---------------------------------------------------------------------- +% Bibliography aliases, so I can use mnemonic citation keys and +% geom.bib at the same time. \bibalias{foo}{bar} makes \cite{foo} +% act (almost) exactly like \cite{bar}. +%---------------------------------------------------------------------- +\def\bibalias#1#2{% + \global\@namedef{b@#1}{% + {\@ifundefined{b@#2}{\textsf{?}}{}\csname b@#2\endcsname}}} + +%---------------------------------------------------------------------- +% Flag bad citations and refernces in the margin. Dangerous if you +% have lots of undefined refs; use batchmode the first time. +% Incompatible with some document classes. +%---------------------------------------------------------------------- +\DeclareOption{flagerrors} +{ +\def\badcite{\ifinner\else\n@ten@te{\textcent}\fi} + +\def\badref{\ifinner\else\n@ten@te{$\Join$}\fi} + +\def\bibalias#1#2{% + \global\@namedef{b@#1}{% + {\@ifundefined{b@#2}{\textsf{>>#1??}}{}\csname b@#2\endcsname}}} + +\def\@citex[#1]#2{% + \let\@citea\@empty + \@cite{\@for\@citeb:=#2\do + {\@citea\def\@citea{,\penalty\@m\ }% + \edef\@citeb{\expandafter\@firstofone\@citeb\@empty}% + \if@filesw\immediate\write\@auxout{\string\citation{\@citeb}}\fi + \@ifundefined{b@\@citeb}{\mbox{\reset@font\sffamily>\@citeb?}% + \badcite% + \G@refundefinedtrue% + \@latex@warning + {Citation `\@citeb' on page \thepage \space undefined}}% + {\hbox{\csname b@\@citeb\endcsname}}}}{#1}} + +\def\@setref#1#2#3{ + \ifx#1\relax + \protect\G@refundefinedtrue + \badref\mbox{\reset@font\sffamily >#3?} + \@latex@warning{Reference `#3' on page \thepage \space undefined}% + \else + \expandafter#2#1\null + \fi} +} + + +% ---------------------------------------------------------------------- +% Draft notice. Put \DRAFT on the title page, in place of \date. +% ---------------------------------------------------------------------- +\def\draftnotice{Preliminary draft --- \mdyy\ --- Not for distribution} + +\def\DRAFT{ +\date{\fbox{\textsf{\textbf{\draftnotice}}}} +\def\@oddfoot{\footnotesize\hss\fbox{\textbf{\textsf{\draftnotice}}}\hss} +\let\@evenfoot\@oddfoot +} + +% ---------------------------------------------------------------------- +% Dingbats from the TeX/METAFONT-book manual font +% Use only with 11 or 12 pt text. +% Doesn't work with TeXtures, which doesn't know about manfnt. +% ---------------------------------------------------------------------- +\font\manual=manfnt +\def\goodcube{{\manual\char28}} % possible cube from MF-book +\def\badcube{{\manual\char29}} % impossible cube from MF-book +\def\starknot{{\manual\char30}} % 2,5 torus knot from MF-book +\def\target{{\manual\char36}} % concentric circles from MF-book +\def\xflower{{\manual\char38}} % X flower dingbat from MF-book +\def\tflower{{\manual\char39}} % + flower dingbat from MF-book +\def\changeto{{\manual\char121}}% change arrow +\def\dbend{{\manual\char127}} % dangerous bend sign + +% ---------------------------------------------------------------------- +% Dangerous bend environments, stolen from TeXbook and slightly +% massaged. +% +% Invoke as \begin{[d]danger}...\end{[d]danger}. The enclosed text +% is offset just like any other list (theorem, proof, etc.) Puts +% sign[s] in front of first two lines of first paragraph, which are +% (supposedly) never split by a page boundary. Does NOT shrink or +% otherwise modify the text. +% ---------------------------------------------------------------------- +% The dangerous bend signs are the correct size to match two lines of +% Computer Modern 9pt type. I really ought to scale them up to +% fit whatever size we're using, but I won't. Unfortunately, manfnt +% thinks it's a 10pt font, so if we want to match 11pt Computer +% Modern, we have to scale manfnt to 12.222222pt! Thanks, Don. +% +% TeX is a HaX. +% ---------------------------------------------------------------------- + +% Single danger +\def\danger{\begin{trivlist}\item[]\noindent% +\begingroup\hangindent=2.5pc\hangafter=-2\clubpenalty=10000% +\def\par{\endgraf\endgroup}% +\hbox to 0pt{\hskip-\hangindent\dbend\hfill}\ignorespaces} +\def\enddanger{\end{trivlist}} + +% Double danger +\def\ddanger{\begin{trivlist}\item[]\noindent% +\begingroup\hangindent=4pc\hangafter=-2\clubpenalty=10000% +\def\par{\endgraf\endgroup}% +\hbox to 0pt{\hskip-\hangindent\dbend\kern2pt\dbend\hfill}\ignorespaces} +\def\endddanger{\end{trivlist}} + +%---------------------------------------------------------------------- +% 'cramped' list style, stolen from Jeff Vitter +%---------------------------------------------------------------------- +\def\cramped + {\parskip\@outerparskip\@topsep\parskip + \@topsepadd2pt\itemsep0pt +% \settowidth{\labelwidth}{\@itemlabel} +% \advance\leftmargin-\labelsep +% \advance\leftmargin-\labelwidth +% \advance\@totalleftmargin-\leftmargin +% \advance\linewidth\leftmargin +% \parshape1\@totalleftmargin\linewidth +} + +%---------------------------------------------------------------------- +% More footnote symbols, please. +%---------------------------------------------------------------------- +\def\@fnsymbol#1{\ensuremath{\ifcase#1\or *\or \dagger\or \ddagger\or + \mathsection\or \mathparagraph\or \|\or **\or \dagger\dagger + \or \ddagger\ddagger \or \mathsection\mathsection \or + \mathparagraph\mathparagraph \else\@ctrerr\fi}} + +%---------------------------------------------------------------------- +% TeXbook style quotations -- I should really use the right font, tho. +%---------------------------------------------------------------------- +\newenvironment{rightquote}[1] +{\flushright\begin{minipage}{#1\textwidth} +\parskip0pt\footnotesize\sffamily\slshape} +{\end{minipage}\par\bigskip} +\def\quotee#1{\par\vspace{0.5ex}\flushright\unskip\textup{--- #1}} + +\ProcessOptions diff --git a/report/pyp_clustering/acl09-short/common-bak/prettyref.sty b/report/pyp_clustering/acl09-short/common-bak/prettyref.sty new file mode 100644 index 00000000..67940f3b --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/prettyref.sty @@ -0,0 +1,37 @@ +%% +%% This is file `prettyref.sty', +%% generated with the docstrip utility. +%% +%% The original source files were: +%% +%% prettyref.dtx (with options: `style') +%% +%% Copyright (c) 1995 Kevin Ruland +%% +%% +%% prettyref v3.0 +%% +%% Copyright 1995,1998. by Kevin Ruland kevin@rodin.wustl.edu +%% +\ProvidesPackage{prettyref}[1998/07/09 v3.0] +\def\newrefformat#1#2{% + \@namedef{pr@#1}##1{#2}} +\newrefformat{eq}{\textup{(\ref{#1})}} +\newrefformat{lem}{Lemma \ref{#1}} +\newrefformat{thm}{Theorem \ref{#1}} +\newrefformat{cha}{Chapter \ref{#1}} +\newrefformat{sec}{Section \ref{#1}} +\newrefformat{tab}{Table \ref{#1} on page \pageref{#1}} +\newrefformat{fig}{Figure \ref{#1} on page \pageref{#1}} +\def\prettyref#1{\@prettyref#1:} +\def\@prettyref#1:#2:{% + \expandafter\ifx\csname pr@#1\endcsname\relax% + \PackageWarning{prettyref}{Reference format #1\space undefined}% + \ref{#1:#2}% + \else% + \csname pr@#1\endcsname{#1:#2}% + \fi% +} +\endinput +%% +%% End of file `prettyref.sty'. diff --git a/report/pyp_clustering/acl09-short/common-bak/scrunchacl.bst b/report/pyp_clustering/acl09-short/common-bak/scrunchacl.bst new file mode 100644 index 00000000..26e1fca6 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/scrunchacl.bst @@ -0,0 +1,1317 @@ + +% BibTeX `scrunchacl' style file made by modifying `acl' style file. +% Lillian Lee, February 2003. Abbreviate first names, no explicit +% marker for page numbers, +% "editors" -> "eds", but still consistent with `fullname.sty'. No +% "and" between multiple authors. +% Also, might still have trouble with double-years, sometimes + +% BibTeX `acl' style file for BibTeX version 0.99c, LaTeX version 2.09 +% This version was made by modifying `aaai-named' format based on the master +% file by Oren Patashnik (PATASHNIK@SCORE.STANFORD.EDU) + +% Copyright (C) 1985, all rights reserved. +% Modifications Copyright 1988, Peter F. Patel-Schneider +% Further modifictions by Stuart Shieber, 1991, and Fernando Pereira, 1992. +% Copying of this file is authorized only if either +% (1) you make absolutely no changes to your copy, including name, or +% (2) if you do make changes, you name it something other than +% btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst. +% This restriction helps ensure that all standard styles are identical. + +% There are undoubtably bugs in this style. If you make bug fixes, +% improvements, etc. please let me know. My e-mail address is: +% pfps@spar.slb.com + +% Citation format: [author-last-name, year] +% [author-last-name and author-last-name, year] +% [author-last-name {\em et al.}, year] +% +% Reference list ordering: alphabetical by author or whatever passes +% for author in the absence of one. +% +% This BibTeX style has support for short (year only) citations. This +% is done by having the citations actually look like +% \citename{name-info, }year +% The LaTeX style has to have the following +% \let\@internalcite\cite +% \def\cite{\def\citename##1{##1}\@internalcite} +% \def\shortcite{\def\citename##1{}\@internalcite} +% \def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} +% which makes \shortcite the macro for short citations. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Changes made by SMS for thesis style +% no emphasis on "et al." +% "Ph.D." includes periods (not "PhD") +% moved year to immediately after author's name +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +ENTRY + { address + author + booktitle + chapter + edition + editor + howpublished + institution + journal + key + month + note + number + organization + pages + publisher + school + series + title + type + volume + year + } + {} + { label extra.label sort.label } + +INTEGERS { output.state before.all mid.sentence after.sentence after.block } + +FUNCTION {init.state.consts} +{ #0 'before.all := + #1 'mid.sentence := + #2 'after.sentence := + #3 'after.block := +} + +STRINGS { s t } + +FUNCTION {output.nonnull} +{ 's := + output.state mid.sentence = + { ", " * write$ } + { output.state after.block = + { add.period$ write$ + newline$ + "\newblock " write$ + } + { output.state before.all = + 'write$ + { add.period$ " " * write$ } + if$ + } + if$ + mid.sentence 'output.state := + } + if$ + s +} + +FUNCTION {output} +{ duplicate$ empty$ + 'pop$ + 'output.nonnull + if$ +} + +FUNCTION {output.check} +{ 't := + duplicate$ empty$ + { pop$ "empty " t * " in " * cite$ * warning$ } + 'output.nonnull + if$ +} + + +FUNCTION {output.bibitem} +{ + "\bibitem[" write$ + label write$ + "]{" write$ + + cite$ write$ + "}" write$ + "" + before.all 'output.state := +} + +FUNCTION {fin.entry} +{ add.period$ + write$ + "\bibsnip" write$ + newline$ +} + +FUNCTION {new.block} +{ output.state before.all = + 'skip$ + { after.block 'output.state := } + if$ +} + +FUNCTION {new.sentence} +{ output.state after.block = + 'skip$ + { output.state before.all = + 'skip$ + { after.sentence 'output.state := } + if$ + } + if$ +} + +FUNCTION {not} +{ { #0 } + { #1 } + if$ +} + +FUNCTION {and} +{ 'skip$ + { pop$ #0 } + if$ +} + +FUNCTION {or} +{ { pop$ #1 } + 'skip$ + if$ +} + +FUNCTION {new.block.checka} +{ empty$ + 'skip$ + 'new.block + if$ +} + +FUNCTION {new.block.checkb} +{ empty$ + swap$ empty$ + and + 'skip$ + 'new.block + if$ +} + +FUNCTION {new.sentence.checka} +{ empty$ + 'skip$ + 'new.sentence + if$ +} + +FUNCTION {new.sentence.checkb} +{ empty$ + swap$ empty$ + and + 'skip$ + 'new.sentence + if$ +} + +FUNCTION {field.or.null} +{ duplicate$ empty$ + { pop$ "" } + 'skip$ + if$ +} + +FUNCTION {emphasize} +{ duplicate$ empty$ + { pop$ "" } + { "{\em " swap$ * "}" * } + if$ +} + +INTEGERS { nameptr namesleft numnames } + + + +FUNCTION {format.names} +{ 's := + #1 'nameptr := + s num.names$ 'numnames := + numnames 'namesleft := + { namesleft #0 > } + { s nameptr "{f.~}{vv~}{ll}{, jj}" format.name$ 't := + nameptr #1 > + { namesleft #1 > + { ", " * t * } + { numnames #2 > + { "" * } + 'skip$ + if$ + t "others" = + { " et~al." * } + { ", " * t * } + if$ + } + if$ + } + 't + if$ + nameptr #1 + 'nameptr := + namesleft #1 - 'namesleft := + } + while$ +} + +FUNCTION {format.authors} +{ author empty$ + { "" } + { author format.names } + if$ +} + +FUNCTION {format.editors} +{ editor empty$ + { "" } + { editor format.names + editor num.names$ #1 > + { ", eds." * } + { ", ed." * } + if$ + } + if$ +} + +FUNCTION {format.title} +{ title empty$ + { "" } + { title "t" change.case$ } + if$ +} + +FUNCTION {n.dashify} +{ 't := + "" + { t empty$ not } + { t #1 #1 substring$ "-" = + { t #1 #2 substring$ "--" = not + { "--" * + t #2 global.max$ substring$ 't := + } + { { t #1 #1 substring$ "-" = } + { "-" * + t #2 global.max$ substring$ 't := + } + while$ + } + if$ + } + { t #1 #1 substring$ * + t #2 global.max$ substring$ 't := + } + if$ + } + while$ +} + +FUNCTION {format.date} +{ year empty$ + { month empty$ + { "" } + { "there's a month but no year in " cite$ * warning$ + month + } + if$ + } + { month empty$ + 'year + { month " " * year * } + if$ + } + if$ +} + +FUNCTION {format.btitle} +{ title emphasize +} + +FUNCTION {tie.or.space.connect} +{ duplicate$ text.length$ #3 < + { "~" } + { " " } + if$ + swap$ * * +} + +FUNCTION {either.or.check} +{ empty$ + 'pop$ + { "can't use both " swap$ * " fields in " * cite$ * warning$ } + if$ +} + +FUNCTION {format.bvolume} +{ volume empty$ + { "" } + { "volume" volume tie.or.space.connect + series empty$ + 'skip$ + { " of " * series emphasize * } + if$ + "volume and number" number either.or.check + } + if$ +} + +FUNCTION {format.number.series} +{ volume empty$ + { number empty$ + { series field.or.null } + { output.state mid.sentence = + { "number" } + { "Number" } + if$ + number tie.or.space.connect + series empty$ + { "there's a number but no series in " cite$ * warning$ } + { " in " * series * } + if$ + } + if$ + } + { "" } + if$ +} + +FUNCTION {format.edition} +{ edition empty$ + { "" } + { output.state mid.sentence = + { edition "l" change.case$ " edition" * } + { edition "t" change.case$ " edition" * } + if$ + } + if$ +} + +INTEGERS { multiresult } + +FUNCTION {multi.page.check} +{ 't := + #0 'multiresult := + { multiresult not + t empty$ not + and + } + { t #1 #1 substring$ + duplicate$ "-" = + swap$ duplicate$ "," = + swap$ "+" = + or or + { #1 'multiresult := } + { t #2 global.max$ substring$ 't := } + if$ + } + while$ + multiresult +} + +FUNCTION {format.pages} +{ pages empty$ + { "" } + { pages multi.page.check + { "" pages n.dashify tie.or.space.connect } + { "pg." pages tie.or.space.connect } + if$ + } + if$ +} + +FUNCTION {format.year.label} +{ year extra.label * +} + +FUNCTION {format.vol.num.pages} +{ volume field.or.null + number empty$ + 'skip$ + { "(" number * ")" * * + volume empty$ + { "there's a number but no volume in " cite$ * warning$ } + 'skip$ + if$ + } + if$ + pages empty$ + 'skip$ + { duplicate$ empty$ + { pop$ format.pages } + { ":" * pages n.dashify * } + if$ + } + if$ +} + +FUNCTION {format.chapter.pages} +{ chapter empty$ + 'format.pages + { type empty$ + { "chapter" } + { type "l" change.case$ } + if$ + chapter tie.or.space.connect + pages empty$ + 'skip$ + { ", " * format.pages * } + if$ + } + if$ +} + +FUNCTION {format.in.ed.booktitle} +{ booktitle empty$ + { "" } + { editor empty$ + { "In " booktitle emphasize * } + { "In " format.editors * ", " * booktitle emphasize * } + if$ + } + if$ +} + +FUNCTION {empty.misc.check} +{ author empty$ title empty$ howpublished empty$ + month empty$ year empty$ note empty$ + and and and and and + + key empty$ not and + + { "all relevant fields are empty in " cite$ * warning$ } + 'skip$ + if$ +} + +FUNCTION {format.thesis.type} +{ type empty$ + 'skip$ + { pop$ + type "t" change.case$ + } + if$ +} + +FUNCTION {format.tr.number} +{ type empty$ + { "Technical Report" } + 'type + if$ + number empty$ + { "t" change.case$ } + { number tie.or.space.connect } + if$ +} + +FUNCTION {format.article.crossref} +{ key empty$ + { journal empty$ + { "need key or journal for " cite$ * " to crossref " * crossref * + warning$ + "" + } + { "In {\em " journal * "\/}" * } + if$ + } + { "In " key * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {format.crossref.editor} +{ editor #1 "{vv~}{ll}" format.name$ + editor num.names$ duplicate$ + #2 > + { pop$ " et~al." * } + { #2 < + 'skip$ + { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" = + { " et~al." * } + { " and " * editor #2 "{vv~}{ll}" format.name$ * } + if$ + } + if$ + } + if$ +} + +FUNCTION {format.book.crossref} +{ volume empty$ + { "empty volume in " cite$ * "'s crossref of " * crossref * warning$ + "In " + } + { "Volume" volume tie.or.space.connect + " of " * + } + if$ + editor empty$ + editor field.or.null author field.or.null = + or + { key empty$ + { series empty$ + { "need editor, key, or series for " cite$ * " to crossref " * + crossref * warning$ + "" * + } + { "{\em " * series * "\/}" * } + if$ + } + { key * } + if$ + } + { format.crossref.editor * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {format.incoll.inproc.crossref} +{ editor empty$ + editor field.or.null author field.or.null = + or + { key empty$ + { booktitle empty$ + { "need editor, key, or booktitle for " cite$ * " to crossref " * + crossref * warning$ + "" + } + { "In {\em " booktitle * "\/}" * } + if$ + } + { "In " key * } + if$ + } + { "In " format.crossref.editor * } + if$ + " \cite{" * crossref * "}" * +} + +FUNCTION {article} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { journal emphasize "journal" output.check + format.vol.num.pages output + } + { format.article.crossref output.nonnull + format.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {book} +{ output.bibitem + author empty$ + { format.editors "author and editor" output.check } + { format.authors output.nonnull + crossref missing$ + { "author and editor" editor either.or.check } + 'skip$ + if$ + } + if$ + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + crossref missing$ + { format.bvolume output + new.block + format.number.series output + new.sentence + publisher "publisher" output.check + address output + } + { new.block + format.book.crossref output.nonnull + } + if$ + format.edition output + note output + fin.entry +} + +FUNCTION {booklet} +{ output.bibitem + format.authors output + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + howpublished address new.block.checkb + howpublished output + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {inbook} +{ output.bibitem + author empty$ + { format.editors "author and editor" output.check } + { format.authors output.nonnull + crossref missing$ + { "author and editor" editor either.or.check } + 'skip$ + if$ + } + if$ + format.year.label "year" output.check + new.block + new.block + format.btitle "title" output.check + crossref missing$ + { format.bvolume output + format.chapter.pages "chapter and pages" output.check + new.block + format.number.series output + new.sentence + publisher "publisher" output.check + address output + } + { format.chapter.pages "chapter and pages" output.check + new.block + format.book.crossref output.nonnull + } + if$ + format.edition output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {incollection} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { format.in.ed.booktitle "booktitle" output.check + format.bvolume output + format.number.series output + format.chapter.pages output + new.sentence + publisher "publisher" output.check + address output + format.edition output + } + { format.incoll.inproc.crossref output.nonnull + format.chapter.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {inproceedings} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + crossref missing$ + { format.in.ed.booktitle "booktitle" output.check + format.bvolume output + format.number.series output + format.pages output + address empty$ + { organization publisher new.sentence.checkb + organization output + publisher output + } + { address output.nonnull + new.sentence + organization output + publisher output + } + if$ + } + { format.incoll.inproc.crossref output.nonnull + format.pages output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {conference} { inproceedings } + +FUNCTION {manual} +{ output.bibitem + author empty$ + { organization empty$ + 'skip$ + { organization output.nonnull + address output + } + if$ + } + { format.authors output.nonnull } + if$ + format.year.label "year" output.check + new.block + new.block + format.btitle "title" output.check + author empty$ + { organization empty$ + { address new.block.checka + address output + } + 'skip$ + if$ + } + { organization address new.block.checkb + organization output + address output + } + if$ + format.edition output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {mastersthesis} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + "Master's thesis" format.thesis.type output.nonnull + school "school" output.check + address output + note output + fin.entry +} + +FUNCTION {misc} +{ output.bibitem + format.authors output + new.block + format.year.label output + new.block + title howpublished new.block.checkb + format.title output + howpublished new.block.checka + howpublished output + format.date output + new.block + note output + fin.entry + empty.misc.check +} + +FUNCTION {phdthesis} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + new.block + "{Ph.D.} thesis" format.thesis.type output.nonnull + school "school" output.check + address output + note output + fin.entry +} + +FUNCTION {proceedings} +{ output.bibitem + editor empty$ + { organization output } + { format.editors output.nonnull } + if$ + new.block + format.year.label "year" output.check + new.block + format.btitle "title" output.check + format.bvolume output + format.number.series output + address empty$ + { editor empty$ + { publisher new.sentence.checka } + { organization publisher new.sentence.checkb + organization output + } + if$ + publisher output + format.date output + } + { address output.nonnull + format.date output + new.sentence + editor empty$ + 'skip$ + { organization output } + if$ + publisher output + } + if$ + new.block + note output + fin.entry +} + +FUNCTION {techreport} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + format.tr.number output.nonnull + institution "institution" output.check + address output + format.date output + new.block + note output + fin.entry +} + +FUNCTION {unpublished} +{ output.bibitem + format.authors "author" output.check + new.block + format.year.label "year" output.check + new.block + format.title "title" output.check + new.block + note "note" output.check + fin.entry +} + +FUNCTION {default.type} { misc } + +MACRO {jan} {"January"} + +MACRO {feb} {"February"} + +MACRO {mar} {"March"} + +MACRO {apr} {"April"} + +MACRO {may} {"May"} + +MACRO {jun} {"June"} + +MACRO {jul} {"July"} + +MACRO {aug} {"August"} + +MACRO {sep} {"September"} + +MACRO {oct} {"October"} + +MACRO {nov} {"November"} + +MACRO {dec} {"December"} + +MACRO {acmcs} {"ACM Computing Surveys"} + +MACRO {acta} {"Acta Informatica"} + +MACRO {cacm} {"Communications of the ACM"} + +MACRO {ibmjrd} {"IBM Journal of Research and Development"} + +MACRO {ibmsj} {"IBM Systems Journal"} + +MACRO {ieeese} {"IEEE Transactions on Software Engineering"} + +MACRO {ieeetc} {"IEEE Transactions on Computers"} + +MACRO {ieeetcad} + {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"} + +MACRO {ipl} {"Information Processing Letters"} + +MACRO {jacm} {"Journal of the ACM"} + +MACRO {jcss} {"Journal of Computer and System Sciences"} + +MACRO {scp} {"Science of Computer Programming"} + +MACRO {sicomp} {"SIAM Journal on Computing"} + +MACRO {tocs} {"ACM Transactions on Computer Systems"} + +MACRO {tods} {"ACM Transactions on Database Systems"} + +MACRO {tog} {"ACM Transactions on Graphics"} + +MACRO {toms} {"ACM Transactions on Mathematical Software"} + +MACRO {toois} {"ACM Transactions on Office Information Systems"} + +MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"} + +MACRO {tcs} {"Theoretical Computer Science"} + +READ + +FUNCTION {sortify} +{ purify$ + "l" change.case$ +} + +INTEGERS { len } + +FUNCTION {chop.word} +{ 's := + 'len := + s #1 len substring$ = + { s len #1 + global.max$ substring$ } + 's + if$ +} + +INTEGERS { et.al.char.used } + +FUNCTION {initialize.et.al.char.used} +{ #0 'et.al.char.used := +} + +EXECUTE {initialize.et.al.char.used} + +FUNCTION {format.lab.names} +{ 's := + s num.names$ 'numnames := + + numnames #1 = + { s #1 "{vv }{ll}" format.name$ } + { numnames #2 = + { s #1 "{vv }{ll }and " format.name$ s #2 "{vv }{ll}" format.name$ * + } + { s #1 "{vv }{ll }\bgroup et al.\egroup " format.name$ } + if$ + } + if$ + +} + +FUNCTION {author.key.label} +{ author empty$ + { key empty$ + + { cite$ #1 #3 substring$ } + + { key #3 text.prefix$ } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {author.editor.key.label} +{ author empty$ + { editor empty$ + { key empty$ + + { cite$ #1 #3 substring$ } + + { key #3 text.prefix$ } + if$ + } + { editor format.lab.names } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {author.key.organization.label} +{ author empty$ + { key empty$ + { organization empty$ + + { cite$ #1 #3 substring$ } + + { "The " #4 organization chop.word #3 text.prefix$ } + if$ + } + { key #3 text.prefix$ } + if$ + } + { author format.lab.names } + if$ +} + +FUNCTION {editor.key.organization.label} +{ editor empty$ + { key empty$ + { organization empty$ + + { cite$ #1 #3 substring$ } + + { "The " #4 organization chop.word #3 text.prefix$ } + if$ + } + { key #3 text.prefix$ } + if$ + } + { editor format.lab.names } + if$ +} + +FUNCTION {calc.label} +{ type$ "book" = + type$ "inbook" = + or + 'author.editor.key.label + { type$ "proceedings" = + 'editor.key.organization.label + { type$ "manual" = + 'author.key.organization.label + 'author.key.label + if$ + } + if$ + } + if$ + duplicate$ + + "\protect\citename{" swap$ * "}" * + year field.or.null purify$ * + 'label := + year field.or.null purify$ * + + sortify 'sort.label := +} + +FUNCTION {sort.format.names} +{ 's := + #1 'nameptr := + "" + s num.names$ 'numnames := + numnames 'namesleft := + { namesleft #0 > } + { nameptr #1 > + { " " * } + 'skip$ + if$ + + s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't := + + nameptr numnames = t "others" = and + { "et al" * } + { t sortify * } + if$ + nameptr #1 + 'nameptr := + namesleft #1 - 'namesleft := + } + while$ +} + +FUNCTION {sort.format.title} +{ 't := + "A " #2 + "An " #3 + "The " #4 t chop.word + chop.word + chop.word + sortify + #1 global.max$ substring$ +} + +FUNCTION {author.sort} +{ author empty$ + { key empty$ + { "to sort, need author or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {author.editor.sort} +{ author empty$ + { editor empty$ + { key empty$ + { "to sort, need author, editor, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { editor sort.format.names } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {author.organization.sort} +{ author empty$ + { organization empty$ + { key empty$ + { "to sort, need author, organization, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { "The " #4 organization chop.word sortify } + if$ + } + { author sort.format.names } + if$ +} + +FUNCTION {editor.organization.sort} +{ editor empty$ + { organization empty$ + { key empty$ + { "to sort, need editor, organization, or key in " cite$ * warning$ + "" + } + { key sortify } + if$ + } + { "The " #4 organization chop.word sortify } + if$ + } + { editor sort.format.names } + if$ +} + +FUNCTION {presort} + +{ calc.label + sort.label + " " + * + type$ "book" = + + type$ "inbook" = + or + 'author.editor.sort + { type$ "proceedings" = + 'editor.organization.sort + { type$ "manual" = + 'author.organization.sort + 'author.sort + if$ + } + if$ + } + if$ + + * + + " " + * + year field.or.null sortify + * + " " + * + title field.or.null + sort.format.title + * + #1 entry.max$ substring$ + 'sort.key$ := +} + +ITERATE {presort} + +SORT + +STRINGS { longest.label last.sort.label next.extra } + +INTEGERS { longest.label.width last.extra.num } + +FUNCTION {initialize.longest.label} +{ "" 'longest.label := + #0 int.to.chr$ 'last.sort.label := + "" 'next.extra := + #0 'longest.label.width := + #0 'last.extra.num := +} + +FUNCTION {forward.pass} +{ last.sort.label sort.label = + { last.extra.num #1 + 'last.extra.num := + last.extra.num int.to.chr$ 'extra.label := + } + { "a" chr.to.int$ 'last.extra.num := + "" 'extra.label := + sort.label 'last.sort.label := + } + if$ +} + +FUNCTION {reverse.pass} +{ next.extra "b" = + { "a" 'extra.label := } + 'skip$ + if$ + label extra.label * 'label := + label width$ longest.label.width > + { label 'longest.label := + label width$ 'longest.label.width := + } + 'skip$ + if$ + extra.label 'next.extra := +} + +EXECUTE {initialize.longest.label} + +ITERATE {forward.pass} + +REVERSE {reverse.pass} + +FUNCTION {begin.bib} + +{ et.al.char.used + { "\newcommand{\etalchar}[1]{$^{#1}$}" write$ newline$ } + 'skip$ + if$ + preamble$ empty$ + + 'skip$ + { preamble$ write$ newline$ } + if$ + + "\begin{thebibliography}{" "}" * write$ newline$ + +} + +EXECUTE {begin.bib} + +EXECUTE {init.state.consts} + +ITERATE {call.type$} + +FUNCTION {end.bib} +{ newline$ + "\end{thebibliography}" write$ newline$ +} + +EXECUTE {end.bib} + + diff --git a/report/pyp_clustering/acl09-short/common-bak/standard.bib b/report/pyp_clustering/acl09-short/common-bak/standard.bib new file mode 100644 index 00000000..be782326 --- /dev/null +++ b/report/pyp_clustering/acl09-short/common-bak/standard.bib @@ -0,0 +1,2702 @@ +%% This BibTeX bibliography file was created using BibDesk. +%% http://bibdesk.sourceforge.net/ + + +%% Created for Phil Blunsom at 2009-10-22 14:37:31 +0100 + + +%% Saved with string encoding Unicode (UTF-8) + + +@string{+aaai1980 = {Proc.\ of the 1st Conference on Artificial Intelligence (AAAI-80)}} + +@string{+aaai1991 = {Proc.\ of the 9th Annual Conference on Artificial Intelligence (AAAI-91)}} + +@string{+acl1983 = {Proc.\ of the 21st Annual Meeting of the ACL}} + +@string{+acl1984 = {Proc.\ of the 22nd Annual Meeting of the ACL}} + +@string{+acl1985 = {Proc.\ of the 23rd Annual Meeting of the ACL}} + +@string{+acl1986 = {Proc.\ of the 24th Annual Meeting of the ACL}} + +@string{+acl1987 = {Proc.\ of the 25th Annual Meeting of the ACL}} + +@string{+acl1988 = {Proc.\ of the 26th Annual Meeting of the ACL}} + +@string{+acl1989 = {Proc.\ of the 27th Annual Meeting of the ACL}} + +@string{+acl1990 = {Proc.\ of the 28th Annual Meeting of the ACL}} + +@string{+acl1991 = {Proc.\ of the 29th Annual Meeting of the ACL}} + +@string{+acl1992 = {Proc.\ of the 30th Annual Meeting of the ACL}} + +@string{+acl1992-ss = {Proc.\ of the 30th Annual Meeting of the ACL, Student Session}} + +@string{+acl1993 = {Proc.\ of the 31st Annual Meeting of the ACL}} + +@string{+acl1994 = {Proc.\ of the 32nd Annual Meeting of the ACL}} + +@string{+acl1995 = {Proc.\ of the 33rd Annual Meeting of the ACL}} + +@string{+acl1996 = {Proc.\ of the 34th Annual Meeting of the ACL}} + +@string{+acl1997 = {Proc.\ of the 35th Annual Meeting of the ACL and 8th Conference of the EACL (ACL-EACL'97)}} + +@string{+acl1997ws-ca = {Proc.\ of ACL/EACL'97 Workshop on Natural Language Processing for Communication Aids}} + +@string{+acl1997ws-ie = {Proc.\ of ACL/EACL'97 Workshop on Automatic Information Extraction and Building of Lexical Semantic Resources for NLP Applications}} + +@string{+acl1998 = {Proc.\ of the 36th Annual Meeting of the ACL and 17th International Conference on Computational Linguistics: COLING/ACL-98}} + +@string{+acl1998-ss = {Proc.\ of the 36th Annual Meeting of the ACL and 17th International Conference on Computational Linguistics (COLING/ACL-98), Student Session}} + +@string{+acl1999 = {Proc.\ of the 37th Annual Meeting of the ACL}} + +@string{+acl2000 = {Proc.\ of the 38th Annual Meeting of the ACL}} + +@string{+acl2001 = {Proc.\ of the 39th Annual Meeting of the ACL and 10th Conference of the EACL (ACL-EACL 2001)}} + +@string{+acl2001-ddmt = {Proc.\ of the ACL/EACL 2001 Workshop on Data-Driven Methods in Machine Translation}} + +@string{+acl2001-wscoll = {Proc.\ of the ACL/EACL 2001 Workshop on the Computational Extraction, Analysis and Exploitation of Collocations}} + +@string{+acl2002 = {Proc.\ of the 40th Annual Meeting of the ACL and 3rd Annual Meeting of the NAACL (ACL-2002)}} + +@string{+acl2002-lexacq = {Proc.\ of the ACL-2002 SIGLEX Workshop on Unsupervised Lexical Acquisition}} + +@string{+acl2002-sigphon = {Proc.\ of the 6th Meeting of the ACL Special Interest Group in Computational Phonology}} + +@string{+acl2002-ss = {Proc.\ of the Student Research Workshop, 40th Annual Meeting of the ACL (ACL-2002)}} + +@string{+acl2002-wsd = {Proc.\ of the ACL-2002 Workshop on Word Sense Disambiguation: Recent Successes and Future Directions}} + +@string{+acl2003 = {Proc.\ of the 41st Annual Meeting of the ACL (ACL-2003)}} + +@string{+acl2003-mwe = {Proc.\ of the ACL-2003 Workshop on Multiword Expressions: Analysis, Acquisition and Treatment}} + +@string{+acl2004 = {Proc.\ of the 42nd Annual Meeting of the ACL (ACL-2004)}} + +@string{+acl2004-mwe = {Proc.\ of the ACL 2004 Workshop on Multiword Expressions: Integrating Processing}} + +@string{+acl2004-senseval = {Proc.\ of Senseval-3: Third International Workshop on the Evaluation of Systems for the Semantic Analysis of Text}} + +@string{+acl2004-tmi = {Proc.\ of the 2nd Workshop on Text Meaning and Interpretation}} + +@string{+acl2005 = {Proc.\ of the 43rd Annual Meeting of the ACL (ACL-2005)}} + +@string{+acl2005-dla = {Proc.\ of the ACL 2005 Workshop on Deep Lexical Acquisition}} + +@string{+acl2005-pt = {Proc.\ of the ACL 2005 Workshop on Parallel Texts}} + +@string{+acl2006 = {Proc.\ of the 44th Annual Meeting of the ACL and 21st International Conference on Computational Linguistics (COLING/ACL-2006)}} + +@string{+acl2007 = {Proc.\ of the 45th Annual Meeting of the ACL (ACL-2007)}} + +@string{+acl2008 = {Proc.\ of the 46th Annual Conference of the Association for Computational Linguistics: Human Language Technologies (ACL-08:HLT)}} + +@string{+acl2009 = {Proc.\ of the Joint conference of the 47th Annual Meeting of the Association for Computational Linguistics and the 4th International Joint Conference on Natural Language Processing of the Asian Federation of Natural Language Processing (ACL/IJCNLP-09)}} + +@string{+amta1998 = {Proc.\ of AMTA'98: Conference of the Association for Machine Translation in the Americas}} + +@string{+amta2002 = {Proc.\ of the 6th Conference of the Association for Machine Translation in the Americas (AMTA)}} + +@string{+anlp1988 = {Proc.\ of the 2nd Conference on Applied Natural Language Processing (ANLP)}} + +@string{+anlp1992 = {Proc.\ of the 3rd Conference on Applied Natural Language Processing (ANLP)}} + +@string{+anlp1994 = {Proc.\ of the 4th Conference on Applied Natural Language Processing (ANLP)}} + +@string{+anlp1997 = {Proc.\ of the 5th Conference on Applied Natural Language Processing (ANLP)}} + +@string{+anlp2000 = {Proc.\ of the 6th Conference on Applied Natural Language Processing (ANLP)}} + +@string{+arpa1993 = {Proc.\ of the ARPA Human Language Technology Workshop}} + +@string{+arpa1994 = {Proc.\ of the ARPA Human Language Technology Workshop}} + +@string{+asialex2003 = {Proc.\ of the Asian Association for Lexicography (ASIALEX 2003)}} + +@string{+bls1988 = {Proc.\ of the 14th Annual Meeting of the Berkeley Linguistics Society}} + +@string{+bls2001 = {Proc.\ of the 27th Annual Meeting of the Berkeley Linguistics Society}} + +@string{+cicling2002 = {Proc.\ of the 3rd International Conference on Intelligent Text Processing and Computational Linguistics (CICLing-2002)}} + +@string{+cicling2003 = {Proc.\ of the 4th International Conference on Intelligent Text Processing and Computational Linguistics (CICLing-2003)}} + +@string{+cl = {Computational Linguistics}} + +@string{+clef2003 = {Working Notes for the CLEF 2003 Workshop}} + +@string{+clin1994 = {Papers from the 4th CLIN Meeting}} + +@string{+clin2000 = {Computational Linguistics in the Netherlands 2000}} + +@string{+clin2003 = {Papers from the 14th Meeting of Computational Linguistics in the Netherlands}} + +@string{+cls1968 = {Papers of the 4th Regional Meeting of the Chicago Linguistics Society}} + +@string{+cls1982 = {Papers of the 18th Regional Meeting of the Chicago Linguistics Society}} + +@string{+cls1988 = {Papers of the 24th Regional Meeting of the Chicago Linguistics Society}} + +@string{+cls1995 = {Papers of the 31st Regional Meeting of the Chicago Linguistics Society}} + +@string{+cluk1999 = {Proc.\ of the 2nd UK Special Interest Group for Computational Linguistics (CLUK2)}} + +@string{+cluk2001 = {Proc.\ of the 4th UK Special Interest Group for Computational Linguistics (CLUK4)}} + +@string{+coe1998 = {Proc.\ of the Kanda University of International Studies Graduate School of Language Sciences Centre of Excellence in Linguistics (COE) International Workshop}} + +@string{+cogsci2000 = {Proc.\ of the 22nd Annual Meeting of the Cognitive Science Society (CogSci 2000)}} + +@string{+coling1980 = {Proc.\ of the 8th International Conference on Computational Linguistics (COLING '80)}} + +@string{+coling1982 = {Proc.\ of the 9th International Conference on Computational Linguistics (COLING '82)}} + +@string{+coling1984 = {Proc.\ of the 10th International Conference on Computational Linguistics (COLING '84)}} + +@string{+coling1986 = {Proc.\ of the 11th International Conference on Computational Linguistics (COLING '86)}} + +@string{+coling1990 = {Proc.\ of the 13th International Conference on Computational Linguistics (COLING '90)}} + +@string{+coling1992 = {Proc.\ of the 14th International Conference on Computational Linguistics (COLING '92)}} + +@string{+coling1994 = {Proc.\ of the 15th International Conference on Computational Linguistics (COLING '94)}} + +@string{+coling1996 = {Proc.\ of the 16th International Conference on Computational Linguistics (COLING '96)}} + +@string{+coling1998 = {Proc.\ of the 36th Annual Meeting of the ACL and 17th International Conference on Computational Linguistics (COLING/ACL-98)}} + +@string{+coling1998-nominals = {Proc.\ of the COLING-ACL'98 Workshop on the Computational Treatment of Nominals}} + +@string{+coling1998-term = {Proc.\ of the COLING-ACL'98 Workshop on Computational Terminology}} + +@string{+coling1998-wordnet = {Proc.\ of the COLING-ACL'98 Workshop on the Usage of {WordNet} in Natural Language Processing Systems}} + +@string{+coling2000 = {Proc.\ of the 18th International Conference on Computational Linguistics (COLING-2000)}} + +@string{+coling2000-semws = {Proc.\ of the COLING 2000 Workshop on Semantic Annotation and Intelligent Content}} + +@string{+coling2002 = {Proc.\ of the 19th International Conference on Computational Linguistics (COLING-2002)}} + +@string{+coling2002-gee = {Proc.\ of the Workshop on Grammar Engineering and Evaluation at the 19th International Conference on Computational Linguistics}} + +@string{+coling2002-mt = {Proc.\ of the COLING-2002 Workshop on Machine Translation in Asia}} + +@string{+coling2004 = {Proc.\ of the 20th International Conference on Computational Linguistics (COLING-2004)}} + +@string{+coling2004-dict = {Proc.\ of the COLING-2004 Workshop on Enhancing and Using Electronic Dictionaries}} + +@string{+coling2004-ml = {Proc.\ of the COLING-2004 Workshop on Multilingual Resources}} + +@string{+coling2006 = {Proc.\ of the 44th Annual Meeting of the ACL and 21st International Conference on Computational Linguistics (COLING/ACL 06)}} + +@string{+coling2008 = {Proc.\ of the 22th International Conference on Computational Linguistics (COLING-2008)}} + +@string{+conll1997 = {Proc.\ of the Conference on Computational Natural Language Learning (CoNLL-97)}} + +@string{+conll1998 = {Proc.\ of the Joint Conference on New Methods in Language Processing and Computational Natural Language Learning (NeMLaP3/CoNLL98)}} + +@string{+conll1999 = {Proc.\ of the 3rd Conference on Computational Natural Language Learning (CoNLL-99)}} + +@string{+conll2000 = {Proc.\ of the 4th Conference on Computational Natural Language Learning (CoNLL-2000)}} + +@string{+conll2001 = {Proc.\ of the ACL/EACL-2001 Workshop on Computational Natural Language Learning (CoNLL-2001)}} + +@string{+conll2002 = {Proc.\ of the 6th Conference on Natural Language Learning (CoNLL-2002)}} + +@string{+conll2003 = {Proc.\ of the 7th Conference on Natural Language Learning (CoNLL-2003)}} + +@string{+conll2004 = {Proc.\ of the 8th Conference on Natural Language Learning (CoNLL-2004)}} + +@string{+conll2005 = {Proc.\ of the 9th Conference on Natural Language Learning (CoNLL-2005)}} + +@string{+cpcol = {The International Journal on Computer Processing of Oriental Language}} + +@string{+csl-mwe = {Computer Speech and Language, Special Issue on Multiword Expressions}} + +@string{+darpa1992 = {Proc.\ of the 4th DARPA Speech and Natural Language Workshop}} + +@string{+eacl1993 = {Proc.\ of the 6th Conference of the European Chapter of the Association for Computational Linguistics (EACL-93)}} + +@string{+eacl1995 = {Proc.\ of the 7th Conference of the European Chapter of the Association for Computational Linguistics (EACL-95)}} + +@string{+eacl1997 = {Proc.\ of the 35th Annual Meeting of the ACL and 8th Conference of the EACL (ACL-EACL-97)}} + +@string{+eacl1999 = {Proc.\ of the 9th Conference of the European Chapter of the Association for Computational Linguistics (EACL-99)}} + +@string{+eacl2001 = {Proc.\ of the 39th Annual Meeting of the ACL and 10th Conference of the EACL (ACL-EACL 2001)}} + +@string{+eacl2003 = {Proc.\ of the 10th Conference of the EACL (EACL-2003)}} + +@string{+emnlp1996 = {Proc.\ of the Conference on Empirical Methods in Natural Language Processing (EMNLP-96)}} + +@string{+emnlp1997 = {Proc.\ of the 2nd Conference on Empirical Methods in Natural Language Processing (EMNLP-97)}} + +@string{+emnlp1998 = {Proc.\ of the 3rd Conference on Empirical Methods in Natural Language Processing (EMNLP-98)}} + +@string{+emnlp1999 = {Proc.\ of the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora (EMNLP/VLC-99)}} + +@string{+emnlp2000 = {Proc.\ of the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora (EMNLP/VLC-2000)}} + +@string{+emnlp2001 = {Proc.\ of the 6th Conference on Empirical Methods in Natural Language Processing (EMNLP-2001)}} + +@string{+emnlp2002 = {Proc.\ of the 2002 Conference on Empirical Methods in Natural Language Processing (EMNLP-2002)}} + +@string{+emnlp2003 = {Proc.\ of the 2003 Conference on Empirical Methods in Natural Language Processing (EMNLP-2003)}} + +@string{+emnlp2004 = {Proc.\ of the 2004 Conference on Empirical Methods in Natural Language Processing (EMNLP-2004)}} + +@string{+emnlp2005 = {Proc.\ of the 2005 Conference on Empirical Methods in Natural Language Processing (EMNLP-2005)}} + +@string{+emnlp2006 = {Proc.\ of the 2006 Conference on Empirical Methods in Natural Language Processing (EMNLP-2006)}} + +@string{+emnlp2007 = {Proc.\ of the 2007 Conference on Empirical Methods in Natural Language Processing (EMNLP-2007)}} + +@string{+ewnlg1999 = {Proc.\ of the 7th European Workshop on Natural Language Generation (EWNLG'99)}} + +@string{+gl2002 = {Proc.\ of the 1st International Workshop on Generative Approaches to the Lexicon}} + +@string{+gl2003 = {Proc.\ of the 2nd International Workshop on Generative Approaches to the Lexicon}} + +@string{+hlt2001 = {Proc.\ of Human Language Technology (HLT) 2001}} + +@string{+hpsg2002 = {Proc.\ of the 9th International Conference on Head-Driven Phrase Structure Grammar (HPSG-2002)}} + +@string{+hpsg2003 = {Proc.\ of the 10th International Conference on Head-Driven Phrase Structure Grammar (HPSG-2003)}} + +@string{+icml1992 = {Proc.\ of the 9th International Machine Learning Conference}} + +@string{+icml1993 = {Proc.\ of the 10th International Conference on Machine Learning}} + +@string{+icml1994 = {Proc.\ of the 11th International Conference on Machine Learning}} + +@string{+icml1999 = {Proc.\ of the 16th International Conference on Machine Learning}} + +@string{+icslp1992 = {Proc.\ of the 2nd International Conference on Spoken Language Processing (ICSLP'92)}} + +@string{+icslp1996 = {Proc.\ of the 4th International Conference on Spoken Language Processing (ICSLP'96)}} + +@string{+icslp1998 = {Proc.\ of the 5th International Conference on Spoken Language Processing (ICSLP'98)}} + +@string{+ijcai1989 = {Proc.\ of the 11th International Joint Conference on Artificial Intelligence (IJCAI-89)}} + +@string{+ijcai1991 = {Proc.\ of the 12th International Joint Conference on Artificial Intelligence (IJCAI-91)}} + +@string{+ijcai1993 = {Proc.\ of the 13th International Joint Conference on Artificial Intelligence (IJCAI-93)}} + +@string{+ijcai1995 = {Proc.\ of the 14th International Joint Conference on Artificial Intelligence (IJCAI-95)}} + +@string{+ijcai1997 = {Proc.\ of the 15th International Joint Conference on Artificial Intelligence (IJCAI-97)}} + +@string{+ijcai1999 = {Proc.\ of the 16th International Joint Conference on Artificial Intelligence (IJCAI-99)}} + +@string{+ijcnlp2004 = {Proc.\ of the First International Joint Conference on Natural Language Processing (IJCNLP-2004)}} + +@string{+iral2003 = {Proc.\ of the The Sixth International Workshop on Information Retrieval with Asian Languages (IRAL2003)}} + +@string{+iwslt2005 = {Proc.\ of the International Workshop on Spoken Language Translation}} + +@string{+jml = {Journal of Memory and Language}} + +@string{+jnlp = {Journal of Natural Language Processing}} + +@string{+jnlp1996 = {Proc.\ of the 2nd Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+jnlp1998 = {Proc.\ of the 4th Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+jnlp1999 = {Proc.\ of the 5th Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+jnlp2001 = {Proc.\ of the 7th Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+jnlp2002 = {Proc.\ of the 8th Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+jnlp2004 = {Proc.\ of the 10th Annual Meeting of the Association for Natural Language Processing (Japan)}} + +@string{+lrec1998 = {Proc.\ of the 1st International Conference on Language Resources and Evaluation (LREC-98)}} + +@string{+lrec2000 = {Proc.\ of the 2nd International Conference on Language Resources and Evaluation (LREC-2000)}} + +@string{+lrec2002 = {Proc.\ of the 3rd International Conference on Language Resources and Evaluation (LREC-2002)}} + +@string{+lrec2004 = {Proc.\ of the 4th International Conference on Language Resources and Evaluation (LREC-2004)}} + +@string{+lrec2006 = {Proc.\ of the 5th International Conference on Language Resources and Evaluation (LREC-2006)}} + +@string{+lsa = {Proc.\ of the Linguistics Society of America (LSA) Annual Meeting}} + +@string{+ml = {Machine Learning}} + +@string{+mtsummit1997 = {Proc.\ of the Fifth Machine Translation Summit (MT Summit V)}} + +@string{+mtsummit2003 = {Proc.\ of the Ninth Machine Translation Summit (MT Summit IX)}} + +@string{+naacl2000 = {Proc.\ of the 1st Annual Meeting of the North American Chapter of Association for Computational Linguistics (NAACL2000)}} + +@string{+naacl2001 = {Proc.\ of the 2nd Annual Meeting of the North American Chapter of Association for Computational Linguistics (NAACL2001)}} + +@string{+naacl2001-wn = {Proc.\ of the NAACL 2001 Workshop on WordNet and Other Lexical Resources: Applications, Extensions and Customizations}} + +@string{+naacl2002 = {Proc.\ of the 40th Annual Meeting of the ACL and 3rd Annual Meeting of the NAACL (ACL-2002)}} + +@string{+naacl2003 = {Proc.\ of the 3rd International Conference on Human Language Technology Research and 4th Annual Meeting of the NAACL (HLT-NAACL 2003)}} + +@string{+naacl2004 = {Proc.\ of the 4th International Conference on Human Language Technology Research and 5th Annual Meeting of the NAACL (HLT-NAACL 2004)}} + +@string{+naacl2005 = {Proc.\ of the 5th International Conference on Human Language Technology Research and 6th Annual Meeting of the NAACL (HLT-NAACL 2005)}} + +@string{+naacl2006 = {Proc.\ of the 6th International Conference on Human Language Technology Research and 7th Annual Meeting of the NAACL (HLT-NAACL 2006)}} + +@string{+naacl2006-smt = {Proc.\ of the HLT-NAACL 2006 Workshop on Statistical Machine Translation}} + +@string{+naacl2007 = {Proc.\ of the 7th International Conference on Human Language Technology Research and 8th Annual Meeting of the NAACL (HLT-NAACL 2007)}} + +@string{+naacl2007-ssst = {Proc.\ of the HLT-NAACL Workshop on Syntax and Structure in Statistical Translation (SSST 2007)}} + +@string{+nemlap1994 = {Proc.\ of the Conference on New Methods in Natural Language Processing}} + +@string{+nemlap1996 = {Proc.\ of the 2nd International Conference on New Methods in Natural Language Processing}} + +@string{+nemlap1998 = {Proc.\ of the Joint Conference on New Methods in Natural Language Processing and Natural Language Learning (NeMLaP3/CoNLL-98)}} + +@string{+nle = {Natural Language Engineering}} + +@string{+nlprs1995 = {Proc.\ of the 3rd Natural Language Processing Pacific Rim Symposium 1995 (NLPRS'95)}} + +@string{+nlprs1997 = {Proc.\ of the 4th Natural Language Processing Pacific Rim Symposium 1997 (NLPRS'97)}} + +@string{+paclic2000 = {Proc.\ of the 14th Pacific Asia Conference on Language, Information and Computation (PACLIC 14)}} + +@string{+paclic2004 = {Proc.\ of the 18th Pacific Asia Conference on Language, Information and Computation (PACLIC 18)}} + +@string{+pacling2005 = {Proc.\ of the 6th Meeting of the Pacific Association for Computational Linguistics (PACLING-2005)}} + +@string{+papillon2003 = {Proc.\ of the Papillon-2003 Workshop}} + +@string{+papillon2004 = {Proc.\ of the Papillon-2004 Workshop on Multilingual Lexical Databases}} + +@string{+riao1988 = {Proc.\ of Recherche d'Informations Assistee par Ordinateur 1988 (RIAO'88)}} + +@string{+riao2000 = {Proc.\ of Recherche d'Informations Assistee par Ordinateur 2000 (RIAO'2000)}} + +@string{+sigir1990 = {Proc.\ of 13th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'90)}} + +@string{+sigir1993 = {Proc.\ of 16th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'93)}} + +@string{+sigir1994 = {Proc.\ of 17th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'94)}} + +@string{+sigir1995 = {Proc.\ of 18th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'95)}} + +@string{+sigir1996 = {Proc.\ of 19th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'96)}} + +@string{+sigir1997 = {Proc.\ of 20th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'97)}} + +@string{+sigir1998 = {Proc.\ of 21st International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'98)}} + +@string{+sigir1999 = {Proc.\ of 22nd International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR'99)}} + +@string{+sigir2000 = {Proc.\ of 23rd International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR 2000)}} + +@string{+sigir2001 = {Proc.\ of 24th International ACM-SIGIR Conference on Research and Development in Information Retrieval (SIGIR 2001)}} + +@string{+sigsemprep2003 = {Proc.\ of the ACL-SIGSEM Workshop on the Linguistic Dimensions of Prepositions and their Use in Computational Linguistics Formalisms and Applications}} + +@string{+sigsemprep2005 = {Proc.\ of the Second ACL-SIGSEM Workshop on the Linguistic Dimensions of Prepositions and their Use in Computational Linguistics Formalisms and Applications}} + +@string{+taln1999 = {Actes de la 6e Conf\'erence annuelle sur le Traitement Automatique des Langues Naturelles (TALN '99)}} + +@string{+tmi1993 = {Proc.\ of the 5th International Conference on Theoretical and Methodological Issues in Machine Translation (TMI-93)}} + +@string{+tmi1995 = {Proc.\ of the 6th International Conference on Theoretical and Methodological Issues in Machine Translation (TMI-95)}} + +@string{+tmi1997 = {Proc.\ of the 7th International Conference on Theoretical and Methodological Issues in Machine Translation (TMI-97)}} + +@string{+tmi1999 = {Proc.\ of the 8th International Conference on Theoretical and Methodological Issues in Machine Translation (TMI-99)}} + +@string{+tmi2002 = {Proc.\ of the 9th International Conference on Theoretical and Methodological Issues in Machine Translation (TMI 2002)}} + +@string{+trec1999 = {Proc.\ of the 7th Text REtrieval Conference (TREC-7)}} + +@string{+trec2000 = {Proc.\ of the 8th Text REtrieval Conference (TREC-8)}} + +@string{+wcn1994 = {Proc.\ of the Workshop on Compound Nouns: Multilingual Aspects of Nominal Composition}} + +@string{+wmt2008 = {Proc.\ of the Third Workshop on Machine Translation}} + +@string{+wvlc1993 = {Proc.\ of the Workshop on Very Large Corpora: Academic and Industrial Perspectives}} + +@string{+wvlc1994 = {Proc.\ of the 2nd Annual Workshop on Very Large Corpora}} + +@string{+wvlc1995 = {Proc.\ of the 3rd Annual Workshop on Very Large Corpora}} + +@string{+wvlc1996 = {Proc.\ of the 4th Annual Workshop on Very Large Corpora}} + +@string{+wvlc1997 = {Proc.\ of the 5th Annual Workshop on Very Large Corpora}} + +@string{+wvlc1998 = {Proc.\ of the 6th Annual Workshop on Very Large Corpora}} + +@string{ajcl = {American Journal of Computational Linguistics}} + + +@techreport{ALPAC:1966, + Author = {{Automatic Language Processing Advisory Committee}}, + Institution = {National Academy of sciences, U.S. National Research Council}, + Title = {Language and Machine: Computers in Translation and Linguistics}, + Year = 1966} + +@inproceedings{AOkumura94, + Address = {Princeton, USA}, + Author = {Akitoshi Okumura and Eduard Hovy}, + Booktitle = +ARPA1994, + Pages = {141--6}, + Title = {Building {Japanese-English} Dictionary based on Ontology for Machine Translation}, + Year = 1994} + +@article{Aarts89a, + Author = {Bas Aarts}, + Journal = {Journal of Linguistics}, + Pages = {277--90}, + Title = {Verb-preposition Constructions and Small Clauses in {English}}, + Volume = 25, + Year = 1989} + +@book{Aarts89b, + Address = {Berlin}, + Author = {Bas Aarts}, + Publisher = {Mouton de Gruyter}, + Title = {Small Clauses in {English}: the Non-verbal Types}, + Year = 1989} + +@inproceedings{Abeille88, + Author = {Anne Abeill\'e}, + Booktitle = +CLS1988, + Title = {Light verb constructions and extraction out of {NP} in a tree adjoining grammar}, + Year = 1988} + +@inproceedings{Abeille90, + Author = {Anne Abeill\'e}, + Booktitle = +ACL1990, + Pages = {292--8}, + Title = {Lexical and Syntactic Rules in a Tree Adjoining Grammar}, + Year = 1990} + +@incollection{Abeille95, + Author = {Anne Abeill\'e}, + Chapter = 1, + Crossref = {_Idioms:StructuralPsychologicalPerspectives}, + Title = {The Flexibility of {French} Idioms: A Representation with {Lexicalised Tree Adjoining Grammar}}} + +@inproceedings{Abekawa:2001, + Address = {Tokyo, Japan}, + Author = {Takeshi Abekawa and Kiyoaki Shirai and Hozumi Tanaka and Takenobu Tokunaga}, + Booktitle = +JNLP2001, + Note = {(in Japanese)}, + Pages = {269--72}, + Title = {\textit{T\=okei-j\=oh\=o-o riy\=o-shita Nihongo-rentai-sh\=ushoku-setsu no kaiseki} (Statistical Analysis of {Japanese} Relative Clause Constructions)}, + Year = 2001} + +@inproceedings{Abney99, + Author = {Steven Abney and Robert E. Schapire and Yoram Singer}, + Booktitle = {Proc.\ of the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}, + Title = {Boosting Applied to Tagging and PP attachment}, + Year = 1999} + +@book{Akimoto89, + Author = {Minoji Akimoto}, + Publisher = {Tokyo: Shinozaki Shorin}, + Title = {A Study of Verbo-Nominal Structures in English}, + Year = 1989} + +@inproceedings{Aha94, + Author = {David W. Aha and Richard L. Bankert}, + Booktitle = {Proc.\ of the AAAI-94 Workshop on Case-Based Reasoning}, + Title = {Feature Selection for Case-Based Classification of Cloud Types: An Empirical Comparison}, + Year = 1994} + +@inproceedings{Ahmed+:2004, + Author = {Bashir Ahmed and {Sung-Hyuk} Cha and Charles Tappert}, + Booktitle = {Proc.\ of the Student/Faculty Research Day, CSIS, Pace University}, + Title = {Language Identification from Text Using N-gram Based Cumulative Frequency Addition}, + Year = 2004} + +@inproceedings{Akiba94, + Address = {Kyoto, Japan}, + Author = {Tomoyoshi Akiba and Hozumi Tanaka}, + Booktitle = +COLING1994, + Pages = {1212--8}, + Title = {A {Bayesian} approach to user modeling in dialogue systems}, + Year = 1994} + +@inproceedings{Akiba:Watanabe:Sumita:2002, + Address = {Taipei}, + Author = {Yasuhiro Akiba and Taro Watanabe and Eichiro Sumita}, + Booktitle = coling-2002, + Pages = {8--14}, + Title = {Using Language and Translation Models to Select the Best among Outputs from Multiple Machine Translation Systems}, + Volume = 1, + Year = 2002} + +@inproceedings{Aldinger:2004, + Address = {Lisbon, Portugal}, + Author = {Nadine Aldinger}, + Booktitle = +LREC2004, + Title = {Towards a dynamic lexicon: Predicting the syntactic argument structure of complex verbs}, + Year = 2004} + +@inproceedings{Alegria+:2004, + Address = {Barcelona, Spain}, + Author = {Inaki Alegria and Olatz Ansa and Xabier Artola and Nerea Ezeiza and Koldo Gojenola and Ruben Urizar}, + Booktitle = +ACL2004-MWE, + Title = { Representation and Treatment of Multiword Expressions in Basque}, + Year = 2004} + +@article{Alexander78, + Author = {Richard J. Alexander}, + Journal = {Anglistik und Englischunterricht}, + Pages = {171--88}, + Title = {Fixed Expressions in {English}: A Linguistic, Psycholinguistic, Sociolinguistic and Didactic Study (Part 1)}, + Volume = 6, + Year = 1978} + +@article{Alexander79, + Author = {Richard J. Alexander}, + Journal = {Anglistik und Englischunterricht}, + Pages = {181--202}, + Title = {Fixed Expressions in {English}: A Linguistic, Psycholinguistic, Sociolinguistic and Didactic Study (Part 2)}, + Volume = 7, + Year = 1979} + +@article{Allan:1980, + Author = {Keith Allan}, + Journal = {Language}, + Number = 3, + Pages = {541--67}, + Title = {Nouns and countability}, + Volume = 56, + Year = 1980} + +@book{Allen87, + Address = {Cambridge, UK}, + Author = {Jonathan Allen and M. Sharon Hunnicutt and Dennis Klatt}, + Publisher = {Cambridge University Press}, + Title = {From Text to Speech: The {MITTalk} System}, + Year = 1987} + +@book{Allerton02, + Address = {London, UK}, + Author = {D.J. Allerton}, + Publisher = {Routledge}, + Title = {Stretched Verb Constructions in {English}}, + Year = 2002} + +@article{Allerton84, + Author = {D.J. Allerton}, + Journal = {Lingua}, + Pages = {17--40}, + Title = {Three (or four) levels of word cooccurrence restriction}, + Volume = 63, + Year = 1984} + +@inproceedings{Almuallim91, + Address = {Anaheim, USA}, + Author = {Hussein Almuallim and Thomas G. Dietterich}, + Booktitle = +AAAI1991, + Pages = {547-52}, + Title = {Learning with Many Irrelevant Features}, + Year = 1991} + +@inproceedings{Almuallim92, + Author = {Hussein Almuallim and Thomas G. Dietterich}, + Booktitle = {Proc.\ of the 9th Canadian Conference on Artificial Intelligence}, + Pages = {38--45}, + Title = {Efficient Algorithms for Identifying Relevant Features}, + Year = 1992} + +@inproceedings{Almuallim94, + Address = {Kyoto, Japan}, + Author = {Hussein Almuallim and Yasuhiro Akiba and Takefumi Yamazaki}, + Booktitle = +COLING1994, + Pages = {57--63}, + Title = {Two Methods for Learning {ALT-J/E} Rules from Examples and a Semantic Hierarchy}, + Year = 1994} + +@inproceedings{Alshawi92, + Address = {Newark, USA}, + Author = {Hiyan Alshawi and Richard Crouch}, + Booktitle = +ACL1992, + Title = {Monotonic semantic interpretation}, + Year = 1992} + +@article{Alshawi94a, + Author = {Hiyan Alshawi and David Carter}, + Journal = +CL, + Number = 4, + Pages = {635--48}, + Title = {Training and Scaling Preference Functions for Disambiguation}, + Volume = 20, + Year = 1994} + +@misc{AltaVista, + Howpublished = {{\tt http:/\hspace*{-0.3ex}/altavista.digital.com}}, + Key = {Alta Vista}, + Title = {Alta Vista search engine}, + Url = {{http://altavista.digital.com}}, + Bdsk-Url-1 = {%7Bhttp://altavista.digital.com%7D}} + +@book{Altman:1991, + Author = {Douglas G. Altman}, + Publisher = {Chapman and Hall}, + Title = {Practical Statistics for Medical Research}, + Year = 1991} + +@inproceedings{Ananiadou94, + Address = {Kyoto, Japan}, + Author = {Sophia Ananiadou}, + Booktitle = +COLING1994, + Pages = {1034--8}, + Title = {A methodology for automatic term recognition}, + Year = 1994} + +@incollection{Anderson85, + Address = {Cambridge, UK}, + Author = {Susan R. Anderson and Edward L. Keenan}, + Booktitle = {Linguistic typology and syntactic description}, + Editor = {Shopen, Timothy}, + Pages = {259--308}, + Publisher = {Cambridge University Press}, + Title = {Deixis}, + Volume = {III}, + Year = 1985} + +@article{Ando:Lee:2003, + Author = {Rie {Kubota Ando} and Lillian Lee}, + Issue = 2, + Journal = +NLE, + Pages = {127--49}, + Title = {Mostly-Unsupervised Statistical Segmentation of {Japanese} Kanji Sequences}, + Volume = 9, + Year = 2003} + +@article{Aoe:1992, + Author = {J. Aoe and K. Morimoto and T. Sato}, + Journal = {Software Practice \& Experiments}, + Number = 9, + Pages = {695--721}, + Title = {An Efficient Implementation of Trie Structures}, + Volume = 22, + Year = 1992} + +@inproceedings{Apel:Quint:2004, + Address = {Geneva, Switzerland}, + Author = {Ulrich Apel and Julien Quint}, + Booktitle = +COLING2004-ML, + Title = {Building a Graphetic Dictionary for the Description of Japanese Kanji Brush Strokes, Stroke Groups, their Position and Path Data}, + Year = 2004} + +@article{Appelt85, + Author = {Douglas E. Appelt}, + Journal = {Artificial Intelligence}, + Pages = {1--33}, + Title = {Planning {English} referring expressions}, + Volume = 26, + Year = 1985} + +@techreport{Arakawa:1998, + Address = {Kyoto}, + Author = {Naoya Arakawa}, + Institution = {ATR}, + Number = {TR-IT-0280}, + Title = {The Recognition of Noun Usage and Pronominal Anaphora in {Japanese}}, + Year = 1998} + +@inproceedings{Arehart:2003, + Author = {Mark Arehart}, + Booktitle = +LSA, + Title = {Linguistic versus nonlinguistic constraints on noun compound interpretation}, + Year = 2003} + +@book{Arnold+:1994, + Address = {London, UK}, + Author = {Doug J. Arnold and Lorna Balkan and Siety Meijer and R. Lee Humphreys and Louisa Sadler}, + Publisher = {Blackwells-NCC}, + Title = {Machine Translation: an Introductory Guide}, + Year = 1994} + +@incollection{Arnold99, + Address = {London, UK}, + Author = {Doug Arnold and Louisa Sadler}, + Booktitle = {Recent Developments and Applications of Natural Language Processing}, + Editor = {J. Peckham}, + Publisher = {Kogan Page}, + Title = {Non-compositionality and Translation}, + Year = 1988} + +@inproceedings{Smith:2005, + Address = {Ann Arbor, USA}, + Author = {Kim Smith}, + Booktitle = +ACL2005, + Pages = {1--8}, + Title = {{LT} Stuff}, + Year = 2005} + +@article{bowman:reasoning, + Author = {Mic Bowman and Saumya K. Debray and Larry L. Peterson}, + Journal = {ACM Trans. Program. Lang. Syst.}, + Month = {November}, + Number = {5}, + Pages = {795-825}, + Title = {Reasoning About Naming Systems}, + Volume = {15}, + Year = {1993}} + +@article{braams:babel, + Author = {Johannes Braams}, + Journal = {TUGboat}, + Month = {June}, + Number = {2}, + Pages = {291-301}, + Title = {Babel, a Multilingual Style-Option System for Use with LaTeX's Standard Document Styles}, + Volume = {12}, + Year = {1991}} + +@inproceedings{clark:pct, + Author = {Malcolm Clark}, + Booktitle = {Proc.\ of TeX90 Conference}, + Month = {March}, + Organization = {TeX Users Group}, + Pages = {84-89}, + Title = {Post Congress Tristesse}, + Year = {1991}} + +@article{herlihy:methodology, + Author = {Maurice Herlihy}, + Journal = {ACM Trans. Program. Lang. Syst.}, + Month = {November}, + Number = {5}, + Pages = {745-770}, + Title = {A Methodology for Implementing Highly Concurrent Data Objects}, + Volume = {15}, + Year = {1993}} + +@book{Lamport:LaTeX, + Address = {Reading, Massachusetts}, + Author = {Leslie Lamport}, + Publisher = {Addison-Wesley Publishing Company}, + Title = {LaTeX User's Guide and Document Reference Manual}, + Year = {1986}} + +@book{salas:calculus, + Address = {New York}, + Author = {S.L. Salas and Einar Hille}, + Publisher = {John Wiley and Sons}, + Title = {Calculus: One and Several Variable}, + Year = {1978}} + +@inproceedings{roark04, + Address = {Barcelona, Spain}, + Author = {Brian Roark and Murat Saraclar and Michael Collins and Mark Johnson}, + Booktitle = {Proc.\ of the 42nd Annual Meeting of the Association for Computational Linguistics}, + Pages = {48--55}, + Title = {Discriminative Language Modeling with Conditional Random Fields and the Perceptron Algorithm}, + Year = 2004} + +@inproceedings{geman02lfg, + Address = {Philadelphia, USA}, + Author = {Stuart Geman and Mark Johnson}, + Booktitle = {Proc.\ of the 40nd Annual Meeting of the Association for Computational Linguistics}, + Pages = {279--286}, + Title = {Dynamic programming for parsing and estimation of stochastic unification-based grammars}, + Year = 2002} + +@inproceedings{johnson99lfg, + Address = {University of Maryland, USA}, + Author = {Mark Johnson and Stuart Geman and Stephen Canon and Zhiyi Chi and Stefan Riezler}, + Booktitle = {Proc.\ of the 37th Annual Meeting of the Association for Computational Linguistics}, + Pages = {535--541}, + Title = {Estimators for stochastic `unification based' grammars}, + Year = 1999} + +@inproceedings{lafferty01, + Address = {Williamstown, USA}, + Author = {J. Lafferty and A. McCallum and F. Pereira}, + Booktitle = {Proceedings of ICML}, + Month = {June}, + Pages = {282--289}, + Title = {Conditional Random Fields: {P}robabilistic models for segmenting and labelling sequence data}, + Year = 2001} + +@inproceedings{ratnaparkhi96, + Author = {A. Ratnaparkhi}, + Booktitle = +EMNLP1996, + Title = {A maximum entropy part-of-speech tagger}, + Year = 1996} + +@inproceedings{mccallum04, + Author = {Charles Sutton and Khashayar Rohanimanesh and Andrew McCallum}, + Booktitle = {Proceedings of the 21st International Conference on Machine Learning}, + Title = {Dynamic Conditional Random Fields: {F}actorized Probabilistic Models for Labelling and Segmenting Sequence Data}, + Year = 2004} + +@inproceedings{pinto03, + Author = {David Pinto and Andrew McCallum and Xing Wei and Bruce Croft}, + Booktitle = {Proceedings of the Annual International ACM SIGIR Conference on Research and Development in Information Retrieval}, + Pages = {235--242}, + Title = {Table extraction using conditional random fields}, + Year = 2003} + +@inproceedings{mccallum03ner, + Author = {Andrew McCallum and Wei Li}, + Booktitle = {Proceedings of the 7th Conference on Natural Language Learning}, + Pages = {188--191}, + Title = {Early results for named entity recognition with conditional random fields, feature induction and web-enhanced lexicons}, + Year = 2003} + +@inproceedings{mccallum03induction, + Author = {Andrew McCallum}, + Booktitle = {Proceedings of the Conference on Uncertainty in Artificial Intelligence}, + Pages = {403--410}, + Title = {Efficiently inducing features of Conditional Random Fields}, + Year = 2003} + +@inproceedings{malouf02, + Address = {Taipei, Taiwan}, + Author = {Robert Malouf}, + Booktitle = +CONLL2002, + Month = {August}, + Pages = {49--55}, + Title = {A comparison of algorithms for maximum entropy parameter estimation}, + Year = 2002} + +@mastersthesis{wallach02, + Author = {Hanna Wallach}, + School = {University of Edinburgh}, + Title = {Efficient training of conditional random fields}, + Year = 2002} + +@article{rosenfeld1999, + Author = {S. Chen and R. Rosenfeld}, + Journal = {IEEE Transactions on Speech and Audio Processing}, + Number = 1, + Pages = {37--50}, + Title = {A Survey of Smoothing Techniques for maximum entropy Models}, + Volume = 8, + Year = 1999} + +@inproceedings{berger99, + Author = {Adam Berger}, + Booktitle = {Proceedings of IJCAI: Workshop on machine learning for information filtering}, + Title = {Error-Correcting Output Coding for Text Classification}, + Year = 1999} + +@article{berger96maximum, + Author = {Adam L. Berger and Stephen Della Pietra and Vincent J. Della Pietra}, + Journal = {Computational Linguistics}, + Number = {1}, + Pages = {39-71}, + Title = {A Maximum Entropy Approach to Natural Language Processing}, + Url = {citeseer.ist.psu.edu/berger96maximum.html}, + Volume = {22}, + Year = {1996}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/berger96maximum.html}} + +@article{dietterich95, + Author = {Thomas G. Dietterich and Ghulum Bakiri}, + Journal = {Journal of Artificial Intelligence Reseach}, + Pages = {263--286}, + Title = {Solving Multiclass Learning Problems via Error-Correcting Output Codes}, + Volume = 2, + Year = 1995} + +@inproceedings{kong95, + Author = {Eun Bae Kong and Thomas G. Dietterich}, + Booktitle = {Proceedings of the 12th International Conference of Machine Learning}, + Pages = {313--321}, + Title = {Error-correcting output coding corrects bias and variance}, + Year = 1995} + +@book{macwilliams, + Address = {Amsterdam}, + Author = {Florence MacWilliams and Neil Sloane}, + Publisher = {North Holland}, + Title = {The theory of error-correcting codes}, + Year = 1977} + +@book{pearl, + Author = {Judea Pearl}, + Publisher = {Morgan Kaufmann}, + Title = {Probabilistic Reasoning in Intelligent Systems: Networks of Plausible Inference}, + Year = 1988} + +@inproceedings{sang00, + Author = {Erik F. Tjong Kim Sang and Sabine Buchholz}, + Booktitle = +CONLL2000, + Pages = {127--132}, + Title = {Introduction to the {CoNLL}-2000 shared task: Chunking}, + Year = {2000}} + +@inproceedings{sang03, + Address = {Edmonton, Canada}, + Author = {Erik F. Tjong Kim Sang and Fien De Meulder}, + Booktitle = +CONLL2003, + Pages = {142-147}, + Title = {Introduction to the {CoNLL}-2003 Shared Task: Language-Independent Named Entity Recognition}, + Year = {2003}} + +@inproceedings{ghani, + Author = {Rayid Ghani}, + Booktitle = {ICML 2000: Proceedings of the Seventeenth International Conference on Machine Learning}, + Isbn = {1-55860-707-2}, + Pages = {303--310}, + Publisher = {Morgan Kaufmann Publishers Inc.}, + Title = {Using Error-Correcting Codes for Text Classification}, + Year = {2000}} + +@inproceedings{cohn05, + Author = {Trevor Cohn and Andrew Smith and Miles Osborne}, + Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics}, + Note = {To appear}, + Title = {Scaling Conditional Random Fields using Error Correcting Codes}, + Year = {2005}} + +@inproceedings{carreras05, + Author = {Xavier Carreras and Llu{\'\i}s M{\`a}rquez}, + Booktitle = +CONLL2005, + Title = {{Introduction to the CoNLL-2005 Shared Task: Semantic Role Labeling}}, + Year = {2005}} + +@inproceedings{carreras04, + Author = {Xavier Carreras and Llu{\'\i}s M{\`a}rquez}, + Booktitle = +CONLL2004, + Title = {{Introduction to the CoNLL-2004 Shared Task: Semantic Role Labeling}}, + Year = {2004}} + +@inproceedings{pradhan04, + Author = {S. Pradhan, K. Hacioglu, W. Ward, J. Martin and D. Jurafsky}, + Booktitle = +CONLL2004, + Title = {Semantic Role Labeling by Tagging Syntactic Chunks}, + Year = {2004}} + +@inproceedings{gildea02, + Author = {Daniel Gildea and Martha Palmer}, + Title = {The Necessity of Parsing for Predicate Argument Recognition}, + Url = {citeseer.ist.psu.edu/article/gildea02necessity.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/article/gildea02necessity.html}} + +@inproceedings{lim04, + Author = {Joon-Ho Lim and Young-Sook Hwang and So-Young Park and Hae-Chang Rim}, + Booktitle = +CONLL2004, + Title = {Semantic role labeling using maximum entropy model}, + Year = {2004}} + +@inproceedings{xue04, + Author = {Nianwen Xue and Martha Palmer}, + Booktitle = {Proceedings of EMNLP}, + Title = {Calibrating features for semantic role labeling}, + Year = {2004}} + +@inproceedings{pradhan05, + Author = {Sameer Pradhan and Kadri Hacioglu and Valerie Krugler and Wayne Ward and James Martin and Daniel Jurafsky}, + Booktitle = {To appear in Machine Learning journal, Special issue on Speech and Natural Language Processing}, + Title = {Support Vector Learning for Semantic Argument Classification }, + Year = {2005}} + +@inproceedings{scholkopf01, + Author = {B. Sch\"{o}lkopf and R. Herbrich and A. J. Smola}, + Booktitle = {Proc.\ of the Annual Conference on Computational Learning Theory}, + Pages = {416--426}, + Title = {A generalized representer theorem}, + Year = {2001}} + +@inproceedings{altun04, + Author = {Y. Altun and T. Hofmann and A.J. Smola}, + Booktitle = {In Uncertainty in Artificial Intelligence UAI}, + Title = {Exponential families for conditional random fields}, + Year = {2004}} + +@inproceedings{altun204, + Author = {Y. Altun and T. Hofmann and A.J. Smola}, + Booktitle = {Proc.\ of International Conference on Machine Learning (ICML)}, + Title = {Gaussian process classification for segmenting and annotating sequences}, + Year = {2004}} + +@inproceedings{smola00, + Author = {A. J. Smola and B. Sch\"{o}lkopf}, + Booktitle = {Proc.\ of the International Conference on Machine Learning ICML}, + Pages = {911--918}, + Title = {Sparse greedy matrix approximation for machine learning}, + Year = {2000}} + +@inproceedings{Li02, + Author = {X. Li and D. Roth}, + Booktitle = {Proc.\ of the 19th International Conference on Computational Linguistics (COLING'02)}, + Title = {Learning Question Classifiers}, + Year = {2002}} + +@inproceedings{zhang03, + Author = {D. Zhang and W.S. Lee}, + Booktitle = {Proceedings of the 26th annual international ACM SIGIR conference on Research and development in informaion retrieval}, + Title = {Question Classification with Support Vector Machines}, + Year = {2003}} + +@inproceedings{hacioglu03, + Address = {Edmonton, Canada}, + Author = {K. Hacioglu and W. Ward}, + Booktitle = +NAACL2003, + Pages = {28--30}, + Title = {Question Classification with Support Vector Machines and Error Correcting Codes}, + Year = {2003}} + +@inproceedings{Harabagiu+00, + Author = {S. Harabagiu and D. Moldovan and M. Pa\c{s}ca and R. Mihalcea and M. Surdeanu and R. Bunescu and R. G\^{\i}rju and V. Rus and P. Mor\u{a}rescu}, + Booktitle = {Proceedings of {Text} {REtrieval} {Conference} (TREC-9)}, + Title = {{FALCON}: Boosting Knowledge for Answer Engines}, + Year = {2000}} + +@article{HirschmanGaizauskas01, + Author = {L. Hirschman and R. Gaizauskas}, + Journal = {Journal of Natural Language Engineering}, + Number = 4, + Pages = {275--300}, + Title = {Natural Language Question Answering: The View from Here}, + Volume = 7, + Year = 2001} + +@book{cristianini00, + Author = {N. Cristianini and J. Shawe-Taylor}, + Publisher = {Cambridge University Press}, + Title = {An Introduction to Support Vector Machines}, + Year = 2000} + +@techreport{Kocik04, + Author = {K. Kocik}, + Institution = {University of Sydney}, + Title = {Question Classification using Maximum Entropy Models}, + Type = {Honours thesis}, + Year = 2004} + +@inproceedings{clark02, + Address = {Venice, Italy}, + Author = {Stephen Clark}, + Booktitle = {Proc.\ of the 6th International Workshop on Tree Adjoining Grammars and Related Frameworks}, + Pages = {19--24}, + Title = {Supertagging for Combinatory Categorial Grammar}, + Year = {2002}} + +@inproceedings{CurranClark03, + Address = {Budapest, Hungary}, + Author = {James R. Curran and Stephen Clark}, + Booktitle = {Proc.\ of the 10th Meeting of the EACL}, + Pages = {91--98}, + Title = {Investigating {G}{I}{S} and Smoothing for Maximum Entropy Taggers}, + Year = {2003}} + +@inproceedings{clark04parsing, + Address = {Barcelona, Spain}, + Author = {Stephen Clark and James R. Curran}, + Booktitle = +ACL2004, + Pages = {103--110}, + Title = {Parsing the {WSJ} using {CCG} and Log-Linear Models}, + Year = {2004}} + +@inproceedings{clark04supertagging, + Address = {Geneva, Switzerland}, + Author = {Clark, Stephen and Curran, James R.}, + Booktitle = +COLING2004, + Month = {Aug 23--Aug 27}, + Pages = {282--288}, + Publisher = {COLING}, + Title = {The Importance of Supertagging for Wide-Coverage {CCG} Parsing }, + Year = 2004} + +@inproceedings{Clark+04, + Address = {Barcelona, Spain}, + Author = {Stephen Clark and Mark Steedman and James R. Curran}, + Booktitle = +EMNLP2004, + Pages = {111--118}, + Title = {Object-Extraction and Question-Parsing using {CCG}}, + Year = 2004} + +@article{clark07ccg, + Author = {Stephen Clark and James R. Curran}, + Date-Modified = {2009-10-22 14:36:17 +0100}, + Journal = {Computational Linguistics}, + Number = 4, + Read = {Yes}, + Title = {Wide-Coverage Efficient Statistical Parsing with {CCG} and Log-Linear Models}, + Volume = 33, + Year = 2007, + Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUIJidUJHRvcFgkb2JqZWN0c1gkdmVyc2lvblkkYXJjaGl2ZXLRBgdUcm9vdIABqAkKFRYXGyIjVSRudWxs0wsMDQ4RFFpOUy5vYmplY3RzV05TLmtleXNWJGNsYXNzog8QgASABqISE4ACgAOAB1lhbGlhc0RhdGFccmVsYXRpdmVQYXRo0hgNGRpXTlMuZGF0YU8RAY4AAAAAAY4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarigZIKwAAAAvL1A5jbDA2cGFyc2VyLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC8xFwyBFVgAAAAAAAAAAAAQAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAwyA3RgAAAAEAEAALy9QACTqHAAk6egAAkOcAAgA7TWFjaW50b3NoIEhEOlVzZXJzOnBibHVuc29tOkRvY3VtZW50czpwYXBlcnM6Y2wwNnBhcnNlci5wZGYAAA4AHgAOAGMAbAAwADYAcABhAHIAcwBlAHIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASAC5Vc2Vycy9wYmx1bnNvbS9Eb2N1bWVudHMvcGFwZXJzL2NsMDZwYXJzZXIucGRmABMAAS8AABUAAgAP//8AAIAF0hwdHh9YJGNsYXNzZXNaJGNsYXNzbmFtZaMfICFdTlNNdXRhYmxlRGF0YVZOU0RhdGFYTlNPYmplY3RfECsuLi8uLi8uLi8uLi9Eb2N1bWVudHMvcGFwZXJzL2NsMDZwYXJzZXIucGRm0hwdJCWiJSFcTlNEaWN0aW9uYXJ5EgABhqBfEA9OU0tleWVkQXJjaGl2ZXIACAARABYAHwAoADIANQA6ADwARQBLAFIAXQBlAGwAbwBxAHMAdgB4AHoAfACGAJMAmACgAjICNAI5AkICTQJRAl8CZgJvAp0CogKlArICtwAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALJ}} + +@inproceedings{Hermjakob01, + Author = {U. Hermjakob}, + Booktitle = {Proc.\ of the ACL Workshop on Open-Domain Question Answering}, + Pages = {17--22}, + Title = {Parsing and Question Classification for Question Answering}, + Year = 2001} + +@misc{Gerber01, + Author = {L. Gerber}, + Note = {(in prep)}, + Title = {A \qa Typology for Webclopedia}, + Year = 2001} + +@inproceedings{Hovy+01a, + Author = {E. Hovy and L. Gerber and U. Hermjakob. M. Junk and C. Lin}, + Booktitle = {Proc.\ of the Ninth Text REtrieval Conference (\trec-9)}, + Pages = 655, + Title = {Question Answering in Webclopedia}, + Year = 2001} + +@inproceedings{Hovy+01b, + Author = {E. Hovy and U. Hermjakob and D. Ravichandran}, + Booktitle = {Proc.\ of the DARPA Human Language Technology Conference}, + Title = {A Question/Answer Typology with Surface Text Patterns}, + Year = 2001} + +@article{Minnen+01, + Address = {Cambridge, UK}, + Author = {G. Minnen and J. Carroll and D. Pearce}, + Journal = {Natural Language Engineering}, + Number = {3}, + Pages = {207--223}, + Publisher = {Cambridge University Press}, + Title = {Applied morphological processing of {English}}, + Volume = {7}, + Year = {2001}} + +@techreport{Carlson+99, + Author = {A. Carlson and C. Cumby and J. Rosen and D. Roth}, + Institution = {University of Illinois at Urbana-Champaign}, + Number = {UIUCDCS-R-99-2101}, + Title = {The SnoW Learning Architecture}, + Year = 1999} + +@book{wordnet, + Address = {Cambridge, MA USA}, + Editor = {C. Fellbaum}, + Publisher = {MIT Press}, + Title = {{WordNet}: An Electronic Lexical Database}, + Year = 1998} + +@article{och03, + Author = {Franz Josef Och and Hermann Ney}, + Journal = {Computational Linguistics}, + Number = {1}, + Pages = {19--52}, + Title = {A systematic comparison of various statistical alignment models}, + Volume = {29}, + Year = {2003}} + +@inproceedings{taskar05, + Address = {Vancouver, Canada}, + Author = {B. Taskar and S. Lacoste-Julien and D. Klein}, + Booktitle = +EMNLP2005, + Month = {October}, + Pages = {73--80}, + Title = {A Discriminative Matching Approach to Word Alignment}, + Url = {http://www.aclweb.org/anthology/H/H05/H05-1010}, + Year = {2005}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/H/H05/H05-1010}} + +@inproceedings{taskar03max, + Author = {B. Taskar and C. Guestrin and D. Koller}, + Booktitle = {Proc.\ of NIPS}, + Title = {Max margin Markov networks}, + Url = {citeseer.ist.psu.edu/article/taskar03maxmargin.html}, + Year = {2003}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/article/taskar03maxmargin.html}} + +@inproceedings{toutanova02, + Address = {Philadelphia, USA}, + Author = {K. Toutanova and H. Tolga Ilhan and C Manning}, + Booktitle = +EMNLP2002, + Month = {July}, + Pages = {87--94}, + Title = {Extentions to {HMM}-based Statistical Word Alignment Models}, + Year = 2002} + +@inproceedings{vogel96, + Address = {Copenhagen, Denmark}, + Author = {S. Vogel and H. Ney and C. Tillmann}, + Booktitle = +COLING1996, + Month = {August}, + Pages = {836--841}, + Title = {{HMM}-based word alignment in statistical translation}, + Year = 1996} + +@article{tillmann07block, + Address = {New York, NY, USA}, + Author = {Christoph Tillmann and Tong Zhang}, + Doi = {http://doi.acm.org/10.1145/1255171.1255172}, + Issn = {1550-4875}, + Journal = {ACM Transactions Speech Language Processing}, + Number = {3}, + Pages = {6}, + Publisher = {ACM}, + Title = {A block bigram prediction model for statistical machine translation}, + Volume = {4}, + Year = {2007}, + Bdsk-Url-1 = {http://doi.acm.org/10.1145/1255171.1255172}} + +@phdthesis{och02, + Author = {Franz Josef Och}, + School = {Computer Science Department, RWTH}, + Title = {Statistical Machine Translation: From Single-Word Models to Alignment Templates}, + Year = 2002} + +@article{och04, + Author = {Franz Josef Och and Hermann Ney}, + Journal = {Computational Linguistics}, + Number = {4}, + Pages = {417--449}, + Title = {The Alignment Template approach to Statistical Machine Translation}, + Volume = {30}, + Year = {2004}} + +@inproceedings{liu05, + Address = {Ann Arbor, USA}, + Author = {Y. Liu and Q. Liu and S. Lin}, + Booktitle = +ACL2005, + Month = {June}, + Pages = {459--466}, + Title = {Log-linear models for word alignment}, + Year = 2005} + +@inproceedings{mihalcea03, + Address = {Edmonton, Canada}, + Author = {R. Mihalcea and T. Pedersen}, + Booktitle = {Proc.\ of HLT-NAACL 2003 Workshop, Building and Using Parallel Texts: Data Driven Machine Translation and Beyond}, + Month = {May}, + Pages = {1--6}, + Title = {An evaluation exercise for word alignment}, + Year = 2003} + +@inproceedings{moore05, + Address = {Vancouver, Canada}, + Author = {R. C. Moore}, + Booktitle = +EMNLP2005, + Month = {October}, + Pages = {81--88}, + Title = {A discriminative framework for bilingual word alignment}, + Year = 2005} + +@article{dice45, + Author = {L. R. Dice}, + Journal = {Journal of Ecology}, + Pages = {297--302}, + Title = {Measures of the amount of ecologic association between species}, + Volume = {26}, + Year = {1945}} + +@inproceedings{koehn03, + Address = {Edmonton, Canada}, + Author = {Philipp Koehn and Franz Josef Och and Daniel Marcu}, + Booktitle = +NAACL2003, + Month = {May}, + Pages = {81--88}, + Title = {Statistical Phrase-Based Translation}, + Year = 2003} + +@inproceedings{ittycheriah05, + Address = {Vancouver, Canada}, + Author = {Abraham Ittycheriah and Salim Roukos}, + Booktitle = +EMNLP2005, + Month = {October}, + Pages = {89--96}, + Title = {A Maximum Entropy Word Aligner for {A}rabic-{E}nglish Machine Translation}, + Url = {http://www.aclweb.org/anthology/H/H05/H05-1012}, + Year = {2005}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/H/H05/H05-1012}} + +@inproceedings{ittycheriah07dtm, + Address = {Rochester, USA}, + Author = {Abraham Ittycheriah and Salim Roukos}, + Booktitle = +NAACL2007, + Pages = {57--64}, + Title = {Direct Translation Model 2}, + Year = {2007}} + +@inproceedings{martin05, + Address = {Ann Arbor, USA}, + Author = {J. Martin and R. Mihalcea and T. Pedersen}, + Booktitle = {Proc.\ of the ACL Workshop on Building and Using Parallel Texts}, + Month = {June}, + Pages = {65--74}, + Title = {Word Alignment for Languages with Scarce Resources}, + Url = {http://www.aclweb.org/anthology/W/W05/W05-0809}, + Year = {2005}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W05/W05-0809}} + +@article{Brown93, + Annote = {\AlsoinArmstrong94ed{pp 223-272}}, + Author = {P. F. Brown and S. A. Della Pietra and V. J. Della Pietra and R. L. Mercer}, + Journal = {Computational Linguistics}, + Number = 2, + Pages = {263-311}, + Title = {The Mathematics of Statistical Machine Translation: Parameter Estimation}, + Url = {http://www.aclweb.org/anthology/J93-2003.pdf}, + Volume = 19, + Year = 1993, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/J93-2003.pdf}} + +@inproceedings{callison04, + Address = {Barcelona, Spain}, + Author = {C. Callison-Burch and D. Talbot and M. Osborne}, + Booktitle = +ACL2004, + Month = {July}, + Pages = {175--182}, + Title = {Statistical Machine Translation with Word- and Sentence-Aligned Parallel Corpora}, + Year = 2004} + +@article{knight99decoding, + Author = {Kevin Knight}, + Journal = {Computational Linguistics}, + Number = {4}, + Pages = {607-615}, + Title = {Decoding Complexity in Word-Replacement Translation Models}, + Url = {citeseer.ist.psu.edu/knight99decoding.html}, + Volume = {25}, + Year = {1999}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/knight99decoding.html}} + +@inproceedings{germann03greedy, + Address = {Edmonton, Canada}, + Author = {Ulrich Germann}, + Booktitle = {Greedy Decoding for Statistical Machine Translation in Almost Linear Time}, + Journal = +NAACL2003, + Year = {2003}} + +@article{brown90statistical, + Author = {Peter F. Brown and John Cocke and Stephen Della Pietra and Vincent J. Della Pietra and Frederick Jelinek and John D. Lafferty and Robert L. Mercer and Paul S. Roossin}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {79-85}, + Title = {A Statistical Approach to Machine Translation}, + Url = {citeseer.ist.psu.edu/brown90statistical.html}, + Volume = {16}, + Year = {1990}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/brown90statistical.html}} + +@inproceedings{marcu02phrase, + Author = {Daniel Marcu and William Wong}, + Booktitle = +EMNLP2002, + Title = {A Phrase-Based, Joint Probability Model for Statistical Machine Translation}, + Url = {citeseer.ist.psu.edu/marcu02phrasebased.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/marcu02phrasebased.html}} + +@inproceedings{wu96polynomialtime, + Address = {San Francisco}, + Author = {Dekai Wu}, + Booktitle = +ACL1996, + Pages = {152--158}, + Title = {A Polynomial-Time Algorithm for Statistical Machine Translation}, + Url = {citeseer.ist.psu.edu/wu96polynomialtime.html}, + Year = {1996}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/wu96polynomialtime.html}} + +@article{wu97itg, + Address = {Cambridge, MA, USA}, + Author = {Dekai Wu}, + Issn = {0891-2017}, + Journal = {Computational Linguistics}, + Number = {3}, + Pages = {377--403}, + Publisher = {MIT Press}, + Title = {Stochastic inversion transduction grammars and bilingual parsing of parallel corpora}, + Volume = {23}, + Year = {1997}} + +@inproceedings{yamada01syntaxbased, + Author = {Kenji Yamada and Kevin Knight}, + Booktitle = +ACL2001, + Pages = {523-530}, + Title = {A Syntax-based Statistical Translation Model}, + Url = {citeseer.ist.psu.edu/article/yamada01syntaxbased.html}, + Year = {2001}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/article/yamada01syntaxbased.html}} + +@inproceedings{yamada02decoder, + Address = {Philadelphia, USA}, + Author = {Kenji Yamada and Kevin Knight}, + Booktitle = +ACL2002, + Title = {A Decoder for Syntax-based Statistical {MT}}, + Year = {2002}} + +@inproceedings{galley04rule, + Address = {Boston, USA}, + Author = {Galley, Michel and Hopkins, Mark and Knight, Kevin and Marcu, Daniel}, + Booktitle = +NAACL2004, + Month = May, + Title = {What's in a translation rule?}, + Url = {http://www.isi.edu/natural-language/projects/rewrite/whatsin.pdf}, + Year = 2004, + Bdsk-Url-1 = {http://www.isi.edu/natural-language/projects/rewrite/whatsin.pdf}} + +@inproceedings{galley06scalable, + Address = {Sydney, Australia}, + Author = {Galley, Michel and Graehl, Jonathan and Knight, Kevin and Marcu, Daniel and DeNeefe, Steve and Wang, Wei and Thayer, Ignacio}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {961--968}, + Title = {Scalable Inference and Training of Context-Rich Syntactic Translation Models}, + Year = {2006}} + +@inproceedings{marcu06spmt, + Address = {Sydney, Australia}, + Author = {Marcu, Daniel and Wang, Wei and Echihabi, Abdessamad and Knight, Kevin}, + Booktitle = +EMNLP2006, + Month = {July}, + Pages = {44--52}, + Title = {{SPMT}: Statistical Machine Translation with Syntactified Target Language Phrases}, + Url = {http://www.aclweb.org/anthology/W/W06/W06-1606}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W06/W06-1606}} + +@inproceedings{chiang05hierarchical, + Address = {Ann Arbor, Michigan}, + Author = {Chiang, David}, + Booktitle = +ACL2005, + Month = {June}, + Pages = {263--270}, + Title = {A Hierarchical Phrase-Based Model for Statistical Machine Translation}, + Url = {http://www.aclweb.org/anthology/P/P05/P05-1033}, + Year = {2005}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P05/P05-1033}} + +@article{chiang07hierarchical, + Author = {David Chiang}, + Issn = {0891-2017}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {201--228}, + Publisher = {MIT Press}, + Title = {Hierarchical Phrase-Based Translation}, + Volume = {33}, + Year = {2007}} + +@article{ker97classbased, + Author = {Sue J. Ker and Jason S. Chang}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {313-343}, + Title = {A Class-based Approach to Word Alignment}, + Url = {citeseer.ist.psu.edu/ker97classbased.html}, + Volume = {23}, + Year = {1997}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/ker97classbased.html}} + +@article{melamed00models, + Author = {I. Dan Melamed}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {221-249}, + Title = {Models of Translational Equivalence among Words}, + Url = {citeseer.ist.psu.edu/article/melamed00models.html}, + Volume = {26}, + Year = {2000}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/article/melamed00models.html}} + +@inproceedings{wellington06, + Address = {Boston, USA}, + Author = {Benjamin Wellington and Joseph Turian and Chris Pike and I. Dan Melamed }, + Booktitle = {Proc.\ of the 7th Biennial Conference of the Association for Machine Translation in the Americas (AMTA)}, + Title = {Scalable Purely-Discriminative Training for Word and Tree Transducers}, + Year = {2006}} + +@article{darroch72gis, + Author = {J. N. Darroch and D. Ratcliff}, + Journal = {Annals of Mathematical Statistics}, + Pages = {1470-1480}, + Title = {Generalized iterative scaling for log-linear models}, + Volume = {43}, + Year = {1972}} + +@inproceedings{lacostejulien06qap, + Address = {New York City, USA}, + Author = {Lacoste-Julien, Simon and Taskar, Ben and Klein, Dan and Jordan, Michael I.}, + Booktitle = +NAACL2006, + Month = {June}, + Pages = {112--119}, + Title = {Word Alignment via Quadratic Assignment}, + Url = {http://www.aclweb.org/anthology/N/N06/N06-1015}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/N/N06/N06-1015}} + +@inproceedings{liang06alignment, + Address = {New York City, USA}, + Author = {Liang, Percy and Taskar, Ben and Klein, Dan}, + Booktitle = +NAACL2006, + Month = {June}, + Pages = {104--111}, + Title = {Alignment by Agreement}, + Url = {http://www.aclweb.org/anthology/N/N06/N06-1014}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/N/N06/N06-1014}} + +@inproceedings{liang06perceptron, + Address = {Sydney, Australia}, + Author = {Percy Liang and Alexandre Bouchard-C\^{o}t\'{e} and Dan Klein and Ben Taskar}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {761--768}, + Title = {An end-to-end discriminative approach to machine translation}, + Year = {2006}} + +@inproceedings{liang07hdppcfg, + Address = {Prague, Czech Republic}, + Author = {Liang, Percy and Petrov, Slav and Jordan, Michael and Klein, Dan }, + Booktitle = +EMNLP2007, + Pages = {688--697}, + Title = {The Infinite {PCFG} Using Hierarchical {Dirichlet} Processes}, + Year = {2007}} + +@inproceedings{finkel07infinite, + Address = {Prague, Czech Republic}, + Author = {Jenny Rose Finkel and Trond Grenager and Christopher D. Manning}, + Booktitle = +ACL2007, + Title = {The Infinite Tree}, + Year = {2007}} + +@inproceedings{moore06improved, + Address = {Sydney, Australia}, + Author = {Moore, Robert C. and Yih, Wen-tau and Bode, Andreas}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {513--520}, + Title = {Improved Discriminative Bilingual Word Alignment}, + Url = {http://www.aclweb.org/anthology/P/P06/P06-1065}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P06/P06-1065}} + +@inproceedings{collins02new, + Address = {Philadelphia, USA}, + Author = {M. Collins and N. Duffy}, + Booktitle = +ACL2002, + Title = {New ranking algorithms for parsing and tagging: Kernels over discrete structures}, + Url = {citeseer.ist.psu.edu/article/collins02new.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/article/collins02new.html}} + +@inproceedings{ayan06cper, + Address = {Sydney, Australia}, + Author = {Ayan, Necip Fazil and Dorr, Bonnie J.}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {9--16}, + Title = {Going Beyond {AER}: An Extensive Analysis of Word Alignments and Their Impact on {MT}}, + Url = {http://www.aclweb.org/anthology/P/P06/P06-1002}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P06/P06-1002}} + +@inproceedings{fraser06, + Address = {Sydney, Australia}, + Author = {Fraser, Alexander and Marcu, Daniel}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {769--776}, + Publisher = {Association for Computational Linguistics}, + Title = {Semi-Supervised Training for Statistical Word Alignment}, + Url = {http://www.aclweb.org/anthology/P/P06/P06-1097}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P06/P06-1097}} + +@techreport{fraser06measure, + Author = {Alexander Fraser and Daniel Marcu}, + Institution = {ISI/University of Southern California}, + Month = May, + Title = {Measuring Word Alignment Quality for Statistical Machine Translation}, + Type = {ISI-TR-616 tecnical report}, + Year = 2006} + +@misc{papineni01bleu, + Author = {K. Papineni and S. Roukos and T. Ward and W. Zhu}, + Text = {Papineni, K.A., Roukos, S., Ward, T., Zhu, W.J.: Bleu: a method for automatic evaluation of machine translation. Technical Report RC22176 (W0109-022), IBM Research Division, Thomas J. Watson Research Center (2001)}, + Title = {Bleu: a method for automatic evaluation of machine translation}, + Url = {citeseer.ist.psu.edu/papineni02bleu.html}, + Year = {2001}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/papineni02bleu.html}} + +@inproceedings{papineni02bleu, + Address = {Philadelphia, Pennsylvania}, + Author = {Kishore Papineni and Salim Roukos and Todd Ward and Wei-Jing Zhu}, + Booktitle = +ACL2002, + Pages = {311--318}, + Title = {BLEU: a method for automatic evaluation of machine translation}, + Year = {2002}} + +@article{papineni98dtm, + Author = {Papineni, K.A. and Roukos, S. and Ward, R.T.}, + Journal = {Acoustics, Speech and Signal Processing, 1998. Proceedings of the 1998 IEEE International Conference on}, + Pages = {189-192 vol.1}, + Title = {Maximum likelihood and discriminative training of direct translation models}, + Volume = {1}, + Year = {12-15 May 1998}} + +@inproceedings{koehn04pharaoh, + Author = {Philipp Koehn}, + Booktitle = {Proc.\ of the AMTA-2004}, + Title = {Pharaoh: a Beam Search Decoder for Phrase-Based Statistical Machine Translation Models}, + Year = {2004}} + +@inproceedings{koehn07moses, + Address = {Prague}, + Author = {Philipp Koehn and Hieu Hoang and Alexandra Birch and Chris Callison-Burch and Marcello Federico and Nicola Bertoldi and Brooke Cowan and Wade Shen and Christine Moran and Richard Zens and Chris Dyer and Ondrej Bojar and Alexandra Constantin and Evan Herbst}, + Booktitle = +ACL2007, + Title = {Moses: Open Source Toolkit for Statistical Machine Translation}, + Year = {2007}} + +@article{bangalore99supertagging, + Author = {Srinivas Bangalore and Aravind K. Joshi}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {237-265}, + Title = {Supertagging: An Approach to Almost Parsing}, + Url = {citeseer.ist.psu.edu/bangalore99supertagging.html}, + Volume = {25}, + Year = {1999}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/bangalore99supertagging.html}} + +@inproceedings{hockenmaier02ccgbank, + Address = {Las Palmas, Spain}, + Author = {Julia Hockenmaier and Mark Steedman}, + Booktitle = +LREC2002, + Pages = {1974--1981}, + Title = {Acquiring Compact Lexicalized Grammars from a Cleaner Treebank}, + Url = {citeseer.ist.psu.edu/531192.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/531192.html}} + +@inproceedings{Copestake:Flickinger:2000, + Address = {Athens, Greece}, + Author = {Ann Copestake and Dan Flickinger}, + Booktitle = +LREC2000, + Title = {An open-source grammar development environment and broad-coverage {English} grammar using {HPSG}}, + Year = 2000} + +@incollection{Flickinger:2002, + Author = {Dan Flickinger}, + Crossref = {_CollaborativeLangEng}, + Title = {On building a more efficient grammar by exploiting types}} + +@book{_CollaborativeLangEng, + Address = {Stanford, USA}, + Booktitle = {Collaborative Language Engineering}, + Editor = {Stephan Oepen and Dan Flickinger and Jun'ichi Tsujii and Hans Uszkoreit}, + Publisher = {CSLI Publications}, + Title = {Collaborative Language Engineering}, + Year = 2002} + +@inproceedings{Uszkoreit:2002, + Address = {Taipei, Taiwan}, + Author = {Hans Uszkoreit}, + Booktitle = +COLING2002, + Title = {New Chances for Deep Linguistic Processing}, + Year = {2002}} + +@book{Butt+:1999, + Address = {Stanford, USA}, + Author = {Miriam Butt and Tracy {Holloway King} and Maria-Eugenia Nino and Frederique Segond}, + Publisher = {CSLI Publications}, + Title = {A Grammar Writer's Cookbook}, + Year = 1999} + +@inproceedings{Bos+:2004, + Address = {Geneva, Switzerland}, + Author = {Johan Bos and Stephen Clark and Mark Steedman and James R. Curran and Julia Hockenmaier}, + Booktitle = +COLING2004, + Pages = {1240--7}, + Title = {Wide-Coverage Semantic Representations from a {CCG} Parser}, + Year = 2004} + +@inproceedings{vanNoord:2004, + Address = {Barcelona, Spain}, + Author = {Gertjan {van Noord}}, + Booktitle = +ACL2004, + Title = {Error Mining for Wide-Coverage Grammar Engineering}, + Year = 2004} + +@inproceedings{zhang:kordoni:2006, + Address = {Genoa, Italy}, + Author = {Yi Zhang and Valia Kordoni}, + Booktitle = +LREC2006, + Title = {Automated Deep Lexical Acquisition for Robust Open Texts Processing}, + Year = {2006}} + +@inproceedings{zhang-kordoni:2005:ALTA2005, + Address = {Sydney, Australia}, + Author = {Yi Zhang and Valia Kordoni}, + Booktitle = {Proc.\ of the Australasian Language Technology Workshop 2005}, + Pages = {24--31}, + Title = {A Statistical Approach towards Unknown Word Type Prediction for Deep Grammars}, + Year = {2005}} + +@inproceedings{blunsom04srl, + Address = {Sydney, Australia}, + Author = {Phil Blunsom}, + Booktitle = {Proc.\ of the Australasian Language Technology Workshop 2004}, + Pages = {109--116}, + Title = {Maximum Entropy {M}arkov models for semantic role labelling}, + Year = {2005}} + +@inproceedings{blunsom06supertagging, + Address = {Sydney, Australia}, + Author = {Blunsom, Phil and Baldwin, Timothy}, + Booktitle = {Proceedings of the 2006 Conference on Empirical Methods in Natural Language Processing}, + Month = {July}, + Pages = {164--171}, + Title = {Multilingual Deep Lexical Acquisition for {HPSG}s via Supertagging}, + Year = {2006}} + +@inproceedings{Ben:Fli:Oep:02, + Address = {Taipei, Taiwan}, + Author = {Emily M. Bender and Dan Flickinger and Stephan Oepen}, + Booktitle = +COLING2002-GEE, + Title = {The Grammar {M}atrix. {A}n Open-Source Starter-Kit for the Rapid Development of Cross-Linguistically Consistent Broad-Coverage Precision Grammar}, + Year = 2002} + +@inproceedings{Riezler:2002, + Address = {Philadelphia, USA}, + Author = {Stefan Riezler and Tracy H. King and Ronald M. Kaplan and Richard Crouch and John T. {Maxwell III} and Mark Johnson}, + Booktitle = +ACL2002, + Title = {Parsing the {Wall Street Journal} using a {Lexical-Functional Grammar} and Discriminative Estimation Techniques}, + Year = 2002} + +@incollection{Baldwin+:2005b, + Author = {Timothy Baldwin and Emily M. Bender and Dan Flickinger and Ara Kim and Stephan Oepen}, + Crossref = {_Kepser:Reis:2005}, + Title = {Beauty and the Beast: What running a broad-coverage precision grammar over the {BNC} taught us about the grammar --- and the corpus}} + +@inproceedings{Baldwin:2005d, + Address = {Ann Arbor, USA}, + Author = {Timothy Baldwin}, + Booktitle = +ACL2005-DLA, + Pages = {67--76}, + Title = {Bootstrapping Deep Lexical Resources: Resources for Courses}, + Year = 2005} + +@inproceedings{Baldwin:2005e, + Address = {Tokyo, Japan}, + Author = {Timothy Baldwin}, + Booktitle = +PACLING2005, + Note = {(Invited Paper)}, + Pages = {23--32}, + Title = {General-Purpose Lexical Acquisition: Procedures, Questions and Results}, + Year = {2005}} + +@phdthesis{Fouvry:2003b, + Author = {Frederik Fouvry}, + School = {University of Essex}, + Title = {Robust Processing for Constraint-based Grammar Formalisms}, + Year = 2003} + +@inproceedings{Lapata:Keller:04, + Address = {Boston, USA}, + Author = {Mirella Lapata and Frank Keller}, + Booktitle = +NAACL2004, + Pages = {121--8}, + Title = {The Web as a Baseline: Evaluating the Performance of Unsupervised Web-based Models for a Range of {NLP} Tasks}, + Year = {2004}} + +@phdthesis{Korhonen:2002, + Author = {Anna Korhonen}, + School = {University of Cambridge}, + Title = {Subcategorization Acquisition}, + Year = 2002} + +@inproceedings{Joanis:2003, + Address = {Budapest, Hungary}, + Author = {Eric Joanis and Suzanne Stevenson}, + Booktitle = +EACL2003, + Pages = {163--70}, + Title = {A general feature space for automatic verb classification}, + Year = 2003} + +@book{Pollard:Sag:1994, + Address = {Chicago, USA}, + Author = {Carl Pollard and Ivan A. Sag }, + Publisher = {The University of Chicago Press}, + Title = {Head-driven Phrase Structure Grammar}, + Year = 1994} + +@inproceedings{Oepen+:2002, + Address = {Sozopol, Bulgaria}, + Author = {Stephan Oepen and Dan Flickinger and Kristina Toutanova and Christoper D. Manning}, + Booktitle = {Proc.\ of The First Workshop on Treebanks and Linguistic Theories (TLT-2002)}, + Title = {{LinGO Redwoods}: A Rich and Dynamic Treebank for {HPSG}}, + Year = 2002} + +@inproceedings{Bond+:2004, + Address = {Hainan Island, China}, + Author = {Francis Bond and Sanae Fujita and Chikara Hashimoto and Kaname Kasahara and Shigeko Nariyama and Eric Nichols and Akira Ohtani and Takaaki Tanaka and Shigeaki Amano}, + Booktitle = +IJCNLP2004, + Pages = {554--9}, + Title = {The {Hinoki} Treebank: A Treebank for Text Understanding}, + Year = 2004} + +@techreport{Matsumoto+:2003, + Author = {Yuji Matsumoto and Akira Kitauchi and Tatsuo Yamashita and Yoshitaka Hirano and Hiroshi Matsuda and Kazuma Takaoka and Masayuki Asahara}, + Institution = {NAIST}, + Title = {{\it Japanese Morphological Analysis System {ChaSen} Version 2.3.3 Manual}}, + Year = 2003} + +@inproceedings{Ngai:Florian:2001, + Address = {Pittsburgh, USA}, + Author = {Grace Ngai and Radu Florian}, + Booktitle = +NAACL2001, + Pages = {40--7}, + Title = {Transformation-based learning in the fast lane}, + Year = 2001} + +@book{vapnik95, + Address = {New York, NY, USA}, + Author = {Vladimir N. Vapnik}, + Isbn = {0-387-94559-8}, + Publisher = {Springer-Verlag New York, Inc.}, + Title = {The nature of statistical learning theory}, + Year = {1995}} + +@article{tsochantaridis05, + Address = {Cambridge, MA, USA}, + Author = {Ioannis Tsochantaridis and Thorsten Joachims and Thomas Hofmann and Yasemin Altun}, + Issn = {1533-7928}, + Journal = {Journal of Machine Learning Research}, + Pages = {1453--1484}, + Publisher = {MIT Press}, + Title = {Large Margin Methods for Structured and Interdependent Output Variables}, + Volume = {6}, + Year = {2005}} + +@inproceedings{markov13, + Address = {St. Petersburg}, + Author = {Andrei Markov}, + Booktitle = {Lecture at the physical-mathematical faculty, Royal Academy of Sciences}, + Pages = {7:153--162}, + Title = {An example of statistical investigation in the text of `{E}ugene {O}nyegin' illustrating coupling of tests in chains}, + Year = 1913} + +@article{baum70, + Author = {L. E. Baum and T. Petrie and G. Soules and N. Weiss}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {164--171}, + Title = {A maximization technique occurring in the statistical analysis of probabilistic function of {Markov} chains}, + Volume = {41(1)}, + Year = {1970}} + +@inproceedings{li94markov, + Author = {Stan Z. Li}, + Booktitle = {{ECCV} (2)}, + Pages = {361-370}, + Title = {Markov Random Field Models in Computer Vision}, + Url = {citeseer.ist.psu.edu/li94markov.html}, + Year = {1994}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/li94markov.html}} + +@inproceedings{mccallum00memm, + Author = {Andrew Mc{C}allum and Dayne Freitag and Fernando Pereira}, + Booktitle = {Proc. 17th International Conference on Machine Learning}, + Pages = {591--598}, + Title = {Maximum Entropy {M}arkov Models for Information Extraction and Segmentation}, + Url = {citeseer.ist.psu.edu/mccallum00maximum.html}, + Year = {2000}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/mccallum00maximum.html}} + +@inproceedings{Siegel:Bender:2002, + Address = {Taipei, Taiwan}, + Author = {Melanie Siegel and Emily M. Bender}, + Booktitle = {Proc.\ of the 3rd Workshop on Asian Language Resources and International Standardization}, + Title = {Efficient Deep Processing of {Japanese}}, + Year = 2002} + +@inproceedings{stolcke02srilm, + Author = {A. Stolcke}, + Booktitle = {Proc.\ of the International Conference on Spoken Language Processing}, + Title = {{SRILM} -- an extensible language modeling toolkit}, + Url = {citeseer.ist.psu.edu/stolcke02srilm.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/stolcke02srilm.html}} + +@inproceedings{diab00wsd, + Author = {Mona Diab}, + Booktitle = {Proc.\ of the ACL-2000 Workshop on Word Senses and Multilinguality}, + Title = {An Unsupervised Method for Multilingual Word Sense Tagging Using Parallel Corpora: A preliminary investigation}, + Url = {citeseer.ist.psu.edu/574728.html}, + Year = 2000, + Bdsk-Url-1 = {citeseer.ist.psu.edu/574728.html}} + +@inproceedings{brown97ebmt, + Address = {Santa Fe, New Mexico}, + Author = {Ralf D. Brown}, + Booktitle = {Proceedings of the Seventh International Conference on Theoretical and Methodological Issues in Machine Translation {(TMI-97)}}, + Month = {July}, + Pages = {111-118}, + Title = {Automated Dictionary Extraction for ``Knowledge-Free'' Example-Based Translation}, + Url = {citeseer.ist.psu.edu/brown97automated.html}, + Year = {1997}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/brown97automated.html}} + +@book{steedman00syntactic, + Address = {Cambridge, MA, USA}, + Author = {Mark Steedman}, + Isbn = {0-262-19420-1}, + Publisher = {MIT Press}, + Title = {The syntactic process}, + Year = {2000}} + +@inproceedings{toutanova02parse, + Author = {K. Toutanova and C. Manning and S. Shieber and D. Flickinger and S. Oepen}, + Booktitle = {In First Workshop on Treebanks and Linguistic Theories (TLT-2002)}, + Pages = {253--263}, + Title = {Parse disambiguation for a rich {HPSG} grammar}, + Url = {citeseer.ist.psu.edu/toutanova02parse.html}, + Year = {2002}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/toutanova02parse.html}} + +@inproceedings{ninomiya-EtAl:2006:EMNLP, + Address = {Sydney, Australia}, + Author = {Ninomiya, Takashi and Matsuzaki, Takuya and Tsuruoka, Yoshimasa and Miyao, Yusuke and Tsujii, Jun'ichi}, + Booktitle = +EMNLP2006, + Month = {July}, + Pages = {155--163}, + Title = {Extremely Lexicalized Models for Accurate and Fast {HPSG} Parsing}, + Url = {http://www.aclweb.org/anthology/W/W06/W06-1619}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W06/W06-1619}} + +@techreport{nist01, + Author = {NIST}, + Institution = {NIST}, + Title = {Automatic evaluation of machine translation quality using n-gram co-occurrence statistics}, + Type = {technical report}, + Url = {http://www.nist.gov/speech/tests/mt/}, + Year = 2001, + Bdsk-Url-1 = {http://www.nist.gov/speech/tests/mt/}} + +@inproceedings{och02me, + Address = {Philadelphia, USA}, + Author = {Franz Josef Och and Hermann Ney}, + Booktitle = +ACL2002, + Pages = {295--302}, + Title = {Discriminative training and maximum entropy models for statistical machine translation}, + Year = {2002}} + +@inproceedings{och03mert, + Address = {Sapporo, Japan}, + Author = {Franz Josef Och}, + Booktitle = +ACL2003, + Pages = {160--167}, + Title = {Minimum error rate training in statistical machine translation}, + Year = {2003}} + +@inproceedings{joachims06perf, + Address = {Philadelphia, PA, USA}, + Author = {Thorsten Joachims}, + Booktitle = {KDD '06: Proceedings of the 12th ACM SIGKDD international conference on knowledge discovery and data mining}, + Doi = {http://doi.acm.org/10.1145/1150402.1150429}, + Isbn = {1-59593-339-5}, + Pages = {217--226}, + Title = {Training linear {SVM}s in linear time}, + Year = {2006}, + Bdsk-Url-1 = {http://doi.acm.org/10.1145/1150402.1150429}} + +@inproceedings{curran2006multi, + Address = {Sydney, Australia}, + Author = {Curran, James R. and Clark, Stephen and Vadas, David}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {697--704}, + Title = {Multi-Tagging for Lexicalized-Grammar Parsing}, + Url = {http://www.aclweb.org/anthology/P/P06/P06-1088}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P06/P06-1088}} + +@article{rabiner90hmm, + Address = {San Francisco, CA, USA}, + Author = {Lawrence R. Rabiner}, + Book = {Readings in speech recognition}, + Isbn = {1-55860-124-4}, + Pages = {267--296}, + Publisher = {Morgan Kaufmann Publishers Inc.}, + Title = {A tutorial on hidden {M}arkov models and selected applications in speech recognition}, + Year = {1990}} + +@inproceedings{koehn04statistical, + Address = {Barcelona, Spain}, + Author = {Philipp Koehn}, + Booktitle = +EMNLP2004, + Month = {July}, + Title = {Statistical significance tests for machine translation evaluation}, + Url = {citeseer.ist.psu.edu/koehn04statistical.html}, + Year = {2004}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/koehn04statistical.html}} + +@inproceedings{mccallum03ne, + Address = {Edmonton, Canada}, + Author = {Andrew McCallum and Wei Li}, + Booktitle = +NAACL2003, + Pages = {188--191}, + Title = {Early results for named entity recognition with conditional random fields, feature induction and web-enhanced lexicons}, + Year = {2003}} + +@inproceedings{sha03shallow, + Address = {Edmonton, Canada}, + Author = {Fei Sha and Fernando Pereira}, + Booktitle = +NAACL2003, + Pages = {134--141}, + Title = {Shallow parsing with conditional random fields}, + Year = {2003}} + +@inproceedings{peng04accurate, + Author = {F. Peng and A. McCallum}, + Booktitle = +NAACL2004, + Pages = {329--336}, + Title = {Accurate information extraction from research papers using conditional random fields}, + Url = {citeseer.ist.psu.edu/peng04accurate.html}, + Year = {2004}, + Bdsk-Url-1 = {citeseer.ist.psu.edu/peng04accurate.html}} + +@inproceedings{cohn05srl, + Address = {Ann Arbor, Michigan}, + Author = {Cohn, Trevor and Blunsom, Philip}, + Booktitle = +CONLL2005, + Month = {June}, + Pages = {169--172}, + Title = {Semantic Role Labelling with Tree Conditional Random Fields}, + Url = {http://www.aclweb.org/anthology/W/W05/W05-0622}, + Year = {2005}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W05/W05-0622}} + +@inproceedings{blunsom06wa, + Address = {Sydney, Australia}, + Author = {Blunsom, Phil and Cohn, Trevor}, + Booktitle = +ACL2006, + Month = {July}, + Pages = {65--72}, + Title = {Discriminative Word Alignment with Conditional Random Fields}, + Url = {http://www.aclweb.org/anthology/P/P06/P06-1009}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P06/P06-1009}} + +@inproceedings{blunsom08latent, + Address = {Columbus, Ohio}, + Author = {Blunsom, Phil and Cohn, Trevor and Osborne, Miles}, + Booktitle = +ACL2008, + Month = {June}, + Pages = {200--208}, + Title = {A Discriminative Latent Variable Model for Statistical Machine Translation}, + Url = {http://www.aclweb.org/anthology/P/P08/P08-1024}, + Year = {2008}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P08/P08-1024}} + +@inproceedings{blunsom08bscfg, + Address = {Vancouver, Canada}, + Author = {Blunsom, Phil and Cohn, Trevor and Osborne, Miles}, + Booktitle = {Proceedings of NIPS 21}, + Month = {December}, + Title = {Bayesian Synchronous Grammar Induction}, + Year = {2008}} + +@inproceedings{blunsom08inference, + Address = {Honolulu, Hawaii}, + Author = {Blunsom, Phil and Osborne, Miles}, + Booktitle = {Proceedings of the 2008 Conference on Empirical Methods in Natural Language Processing}, + Month = {October}, + Pages = {215--223}, + Title = {Probabilistic Inference for Machine Translation}, + Year = {2008}} + +@inproceedings{blunsom09gibbs, + Address = {Singapore}, + Author = {Blunsom, Phil and Cohn, Trevor and Dyer, Chris and Osborne, Miles}, + Booktitle = +ACL2009, + Month = {August}, + Title = {A {G}ibbs Sampler for Phrasal Synchronous Grammar Induction}, + Year = {2009}} + +@article{besag75pseudo, + Author = {Besag, J. }, + Journal = {The Statistician}, + Pages = {179--195}, + Priority = {0}, + Title = {Statistical Analysis of Non-Lattice Data}, + Volume = {24:3}, + Year = {1975}} + +@inproceedings{klein02conditional, + Address = {Philadelphia, USA}, + Author = {Dan Klein and Christopher D. Manning}, + Booktitle = +EMNLP2002, + Pages = {9--16}, + Title = {Conditional structure versus conditional estimation in {NLP} models}, + Year = {2002}} + +@inproceedings{petrov07, + Address = {Vancouver, Canada}, + Author = {Petrov, Slav and Pauls, Adam and Klein, Dan}, + Booktitle = {Advances in Neural Information Processing Systems 20 (NIPS)}, + Title = {Discriminative Log-Linear Grammars with Latent Variables}, + Year = {2007}} + +@article{marcus94penn, + Address = {Cambridge, MA, USA}, + Author = {Mitchell P. Marcus and Mary Ann Marcinkiewicz and Beatrice Santorini}, + Issn = {0891-2017}, + Journal = {Computational Linguistics}, + Number = {2}, + Pages = {313--330}, + Publisher = {MIT Press}, + Title = {Building a large annotated corpus of {E}nglish: the {P}enn treebank}, + Volume = {19}, + Year = {1993}} + +@inproceedings{toutanova03pos, + Address = {Edmonton, Canada}, + Author = {Kristina Toutanova and Dan Klein and Christopher D. Manning and Yoram Singer}, + Booktitle = +NAACL2003, + Pages = {173--180}, + Title = {Feature-rich part-of-speech tagging with a cyclic dependency network}, + Year = {2003}} + +@inproceedings{matsuzaki07hpsg, + Address = {Hyderabad, India}, + Author = {Matsuzaki, Takuya and Yusuke Miyao and Jun'ichi Tsujii}, + Booktitle = {Proc.\ of the Twentieth International Joint Conference on Artificial Intelligence}, + Month = {January}, + Title = {Efficient {HPSG} Parsing with Supertagging and {CFG}-filtering}, + Year = {2007}} + +@article{callmeier00pet, + Address = {New York, NY, USA}, + Author = {Ulrich Callmeier}, + Issn = {1351-3249}, + Journal = {Natural Language Engineering}, + Number = {1}, + Pages = {99--107}, + Publisher = {Cambridge University Press}, + Title = {P{ET} a platform for experimentation with efficient {HPSG} processing techniques}, + Volume = {6}, + Year = {2000}} + +@article{platt99sv, + Author = {John C. Platt}, + Journal = {Advances in Large Margin Classifiers}, + Pages = {61--74}, + Publisher = {MIT Press}, + Title = {Probabilities for SV Machines and Comparisons to Regularized Likelihood Methods}, + Year = {1999}} + +@article{tong01active, + Author = {Simon Tong and Daphne Koller}, + Journal = {Journal of Machine Learning Research}, + Month = {November}, + Pages = {45--66}, + Title = {Support Vector Machine Active Learning with Applications to Text Classification}, + Year = {2001}} + +@article{sutton07crf, + Address = {Cambridge, MA, USA}, + Author = {Charles Sutton and Andrex McCallum}, + Editor = {Lise Getoor and Ben Taskar}, + Issn = {0-262-07288-2}, + Journal = {Introduction to Statistical Relational Learning}, + Publisher = {MIT Press}, + Title = {An Introduction to Conditional Random Fields for Relational Learning}, + Year = {2007}} + +@inproceedings{sutton07piecewise, + Author = {Charles Sutton and Andrew McCallum}, + Booktitle = {Proc.\ of the International Conference on Machine Learning}, + Title = {Piecewise Pseudolikelihood for Efficient {CRF} Training}, + Year = {2007}} + +@inproceedings{sutton05piecewise, + Author = {Charles Sutton and Andrew McCallum}, + Booktitle = {Proc.\ of the Conference on Uncertainty in Artificial Intelligence}, + Title = {Piecewise Training for Undirected Models}, + Year = {2005}} + +@inproceedings{yarowsky01inducing, + Address = {Pittsburgh, Pennsylvania}, + Author = {David Yarowsky and Grace Ngai}, + Booktitle = +NAACL2001, + Pages = {1--8}, + Title = {Inducing multilingual POS taggers and NP bracketers via robust projection across aligned corpora}, + Year = {2001}} + +@inproceedings{drabek05induction, + Address = {Ann Arbor, Michigan}, + Author = {Elliot Franco Drabek and David Yarowsky}, + Booktitle = +ACL2005-PT, + Pages = {49--56}, + Title = {Induction of Fine-Grained Part-of-Speech Taggers via Classifier Combination and Crosslingual Projection}, + Year = {2005}} + +@inproceedings{tuffis05combined, + Address = {Ann Arbor, Michigan}, + Author = {D. Tuffis and R. Ion and A.Ceausu and D. Stefanescu}, + Booktitle = +ACL2005-PT, + Title = {Combined word alignments}, + Year = {2005}} + +@article{levine06hpsg, + Author = {Robert D. Levine and Detmar Meurers}, + Editor = {Keith Brown}, + Journal = {Encyclopedia of Language and Linguistics}, + Publisher = {Oxford: Elsevier}, + Title = {Head-Driven Phrase Structure Grammar: Linguistic Approach, Formal Foundations and Computational Realization}, + Year = {2006}} + +@article{steedman07ccg, + Author = {Mark Steedman and Jason Baldridge}, + Editor = {Robert Borsley and Kersti Borjars}, + Journal = {To appear in Non-transformational Syntax: A Guide to Current Models}, + Publisher = {Oxford: Blackwell}, + Title = {Combinatory Categorial Grammar}, + Year = {2007}} + +@article{white06efficient, + Author = {Michael White}, + Journal = {To appear in Research on Language and Computation}, + Number = {1}, + Pages = {39--75}, + Title = {Efficient Realization of Coordinate Structures in Combinatory Categorial Grammar}, + Volume = {4}, + Year = 2006} + +@inproceedings{kruijff05context, + Address = {Aberdeen, Scotland}, + Author = {Geert-Jan M. Kruijff}, + Journal = {Proceedings of the Tenth European Workshop on Natural Language Generation (ENLG-05)}, + Title = {Context-sensitive utterance planning for {CCG}}, + Year = {2005}} + +@article{hockenmaier04extending, + Author = {Julia Hockenmaier and Gann Bierner and Jason Baldridge}, + Journal = {Research on Language and Computation}, + Number = {2}, + Pages = {165--208}, + Publisher = {Springer Netherlands}, + Title = {Extending the Coverage of a {CCG} System}, + Volume = {2}, + Year = {2004}} + +@inproceedings{hockenmaier03parsing, + Address = {Sapporo, Japan}, + Author = {Julia Hockenmaier}, + Booktitle = +ACL2003, + Pages = {359--366}, + Title = {Parsing with generative models of predicate-argument structure}, + Year = {2003}} + +@inproceedings{hockenmaier01generative, + Address = {Philadelphia, Pennsylvania}, + Author = {Julia Hockenmaier and Mark Steedman}, + Booktitle = +ACL2001, + Pages = {335--342}, + Title = {Generative models for statistical parsing with Combinatory Categorial Grammar}, + Year = {2001}} + +@inproceedings{hockenmaier06creating, + Address = {Sydney, Australia}, + Author = {Julia Hockenmaier}, + Booktitle = +ACL2006, + Pages = {505--512}, + Title = {Creating a {CCGbank} and a wide-coverage {CCG} lexicon for {G}erman}, + Year = {2006}} + +@article{hockenmaier07ccgbank, + Author = {Julia Hockenmaier and Mark Steedman}, + Journal = {To appear in Computational Linguistics}, + Publisher = {MIT press}, + Title = {C{CGbank}: a corpus of {CCG} derivations and dependency structures extracted from the {P}enn Treebank}, + Year = {2007}} + +@article{jaynes57, + Author = {Jaynes, E. T.}, + Doi = {10.1103/PhysRev.106.620}, + Journal = {Phys. Rev.}, + Month = {May}, + Number = {4}, + Numpages = {10}, + Pages = {620--630}, + Publisher = {American Physical Society}, + Title = {Information Theory and Statistical Mechanics}, + Volume = {106}, + Year = {1957}, + Bdsk-Url-1 = {http://dx.doi.org/10.1103/PhysRev.106.620}} + +@article{crammer01algorithmic, + Author = {Koby Crammer and Yoram Singer}, + Journal = {Journal of Machine Learning Research}, + Pages = {265--292}, + Title = {On the algorithmic implementation of multiclass kernel-based vector machines}, + Volume = {2}, + Year = {2001}} + +@article{lewis68scfg, + Address = {New York, NY, USA}, + Author = {Philip M. {Lewis II} and Richard E. Stearns}, + Doi = {http://doi.acm.org/10.1145/321466.321477}, + Issn = {0004-5411}, + Journal = {J. ACM}, + Number = {3}, + Pages = {465--488}, + Publisher = {ACM}, + Title = {Syntax-Directed Transduction}, + Volume = {15}, + Year = {1968}, + Bdsk-Url-1 = {http://doi.acm.org/10.1145/321466.321477}} + +@inproceedings{watanabe06gnf, + Address = {Sydney, Australia}, + Author = {Taro Watanabe and Hajime Tsukada and Hideki Isozaki}, + Booktitle = +ACL2006, + Pages = {777--784}, + Title = {Left-to-right target generation for hierarchical phrase-based translation}, + Year = {2006}} + +@inproceedings{watanabe07online, + Address = {Prague, Czech Republic}, + Author = {Watanabe, Taro and Suzuki, Jun and Tsukada, Hajime and Isozaki, Hideki}, + Booktitle = +EMNLP2007, + Pages = {764--773}, + Title = {Online Large-Margin Training for Statistical Machine Translation}, + Year = {2007}} + +@inproceedings{carpuat07wsd, + Address = {Prague, Czech Republic}, + Author = {Carpuat, Marine and Wu, Dekai}, + Booktitle = +EMNLP2007, + Pages = {61--72}, + Title = {Improving Statistical Machine Translation Using Word Sense Disambiguation}, + Year = {2007}} + +@inproceedings{wang07, + Address = {Prague, Czech Republic}, + Author = {Wang, Chao and Collins, Michael and Koehn, Philipp}, + Booktitle = +EMNLP2007, + Pages = {737--745}, + Title = {{C}hinese Syntactic Reordering for Statistical Machine Translation}, + Url = {http://www.aclweb.org/anthology/D/D07/D07-1077}, + Year = {2007}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/D/D07/D07-1077}} + +@inproceedings{denero06generative, + Address = {New York City}, + Author = {DeNero, John and Gillick, Dan and Zhang, James and Klein, Dan}, + Booktitle = +NAACL2006-SMT, + Date-Modified = {2009-10-22 14:34:55 +0100}, + Month = {June}, + Pages = {31--38}, + Read = {Yes}, + Title = {Why Generative Phrase Models Underperform Surface Heuristics}, + Year = {2006}, + Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUIJidUJHRvcFgkb2JqZWN0c1gkdmVyc2lvblkkYXJjaGl2ZXLRBgdUcm9vdIABqAkKFRYXGyIjVSRudWxs0wsMDQ4RFFpOUy5vYmplY3RzV05TLmtleXNWJGNsYXNzog8QgASABqISE4ACgAOAB1lhbGlhc0RhdGFccmVsYXRpdmVQYXRo0hgNGRpXTlMuZGF0YU8RAa4AAAAAAa4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarigZIKwAAAAvL1BZOQUFDTDA2X1NNVF9EZU5lcm8ucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC8yWwmVUEVBERiBwcnZ3AAQAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAwmVGAQAAAAEAEAALy9QACTqHAAk6egAAkOcAAgBDTWFjaW50b3NoIEhEOlVzZXJzOnBibHVuc29tOkRvY3VtZW50czpwYXBlcnM6TkFBQ0wwNl9TTVRfRGVOZXJvLnBkZgAADgAuABYATgBBAEEAQwBMADAANgBfAFMATQBUAF8ARABlAE4AZQByAG8ALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADZVc2Vycy9wYmx1bnNvbS9Eb2N1bWVudHMvcGFwZXJzL05BQUNMMDZfU01UX0RlTmVyby5wZGYAEwABLwAAFQACAA///wAAgAXSHB0eH1gkY2xhc3Nlc1okY2xhc3NuYW1lox8gIV1OU011dGFibGVEYXRhVk5TRGF0YVhOU09iamVjdF8QMy4uLy4uLy4uLy4uL0RvY3VtZW50cy9wYXBlcnMvTkFBQ0wwNl9TTVRfRGVOZXJvLnBkZtIcHSQloiUhXE5TRGljdGlvbmFyeRIAAYagXxAPTlNLZXllZEFyY2hpdmVyAAgAEQAWAB8AKAAyADUAOgA8AEUASwBSAF0AZQBsAG8AcQBzAHYAeAB6AHwAhgCTAJgAoAJSAlQCWQJiAm0CcQJ/AoYCjwLFAsoCzQLaAt8AAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC8Q==}} + +@inproceedings{zollmann06syntax, + Address = {New York City}, + Author = {Andreas Zollmann and Ashish Venugopal}, + Booktitle = +NAACL2006-SMT, + Month = {June}, + Title = {Syntax augmented machine translation via chart parsing}, + Year = {2006}} + +@article{johnson02dop, + Author = {Mark Johnson}, + Journal = {Computational Linguistics}, + Number = {1}, + Pages = {71--76}, + Title = {The {DOP} Estimation Method Is Biased and Inconsistent}, + Volume = {28}, + Year = {2002}} + +@phdthesis{beal03, + Author = {Matthew Beal}, + School = {The Gatsby Computational Neuroscience Unit, University College London}, + Title = {Variational Algorithms for Approximate Bayesian Inference}, + Year = {2003}} + +@inproceedings{eck05iwslt, + Address = {Pittsburgh}, + Author = {Matthias Eck and Chiori Hori}, + Booktitle = +IWSLT2005, + Month = {October}, + Title = {Overview of the {IWSLT} 2005 Evaluation Campaign}, + Year = {2005}} + +@inproceedings{cherry07itg, + Address = {Rochester, USA}, + Author = {Colin Cherry and Dekany Lin}, + Booktitle = +NAACL2007-SSST, + Title = {Inversion Transduction Grammar for Joint Phrasal Translation Modeling}, + Year = {2007}} + +@article{teh06hdp, + Author = {Y. W. Teh and M. I. Jordan and M. J. Beal and D. M. Blei}, + Journal = {Journal of the American Statistical Association}, + Number = {476}, + Pages = {1566-1581}, + Title = {Hierarchical {D}irichlet Processes}, + Volume = {101}, + Year = {2006}} + +@inproceedings{marcu02joint, + Address = {Philadelphia}, + Author = {Daniel Marcu and William Wong}, + Booktitle = +EMNLP2002, + Month = {July}, + Pages = {133--139}, + Publisher = {Association for Computational Linguistics}, + Title = {A Phrase-Based, Joint Probability Model for Statistical Machine Translation}, + Year = 2002} + +@inproceedings{chappelier00monte, + Address = {London, UK}, + Author = {Jean-C\'{e}dric Chappelier and Martin Rajman}, + Booktitle = {NLP '00: Proceedings of the Second International Conference on Natural Language Processing}, + Isbn = {3-540-67605-8}, + Pages = {106--117}, + Title = {Monte-Carlo Sampling for NP-Hard Maximization Problems in the Framework of Weighted Parsing}, + Year = {2000}} + +@phdthesis{goodman98thesis, + Address = {Cambridge, MA, USA}, + Author = {Joshua T. Goodman}, + Isbn = {0-591-85426-0}, + Note = {Adviser-Stuart Shieber}, + Order_No = {AAI9832377}, + Publisher = {Harvard University}, + Title = {Parsing inside-out}, + Year = {1998}} + +@inproceedings{mi08forest, + Address = {Columbus, Ohio}, + Author = {Mi, Haitao and Huang, Liang and Liu, Qun}, + Booktitle = +ACL2008, + Month = {June}, + Pages = {192--199}, + Title = {Forest-Based Translation}, + Url = {http://www.aclweb.org/anthology/P/P08/P08-1023}, + Year = {2008}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P08/P08-1023}} + +@inproceedings{huang06syntax, + Address = {Boston, MA}, + Author = {Huang, Liang and Knight, Kevin and Joshi, Aravind}, + Booktitle = {In Proceedings of the 7th Biennial Conference of the Association for Machine Translation in the Americas ({AMTA})}, + Title = {Statistical Syntax-Directed Translation with Extended Domain of Locality}, + Year = {2006}} + +@inproceedings{kumar04mbr, + Author = {Shankar Kumar and William Byrne}, + Booktitle = +NAACL2004, + Pages = {169--176}, + Title = {Minimum bayes-risk decoding for statistical machine translation}, + Year = {2004}} + +@inproceedings{zhang08bitg, + Address = {Columbus, Ohio}, + Author = {Zhang, Hao and Quirk, Chris and Moore, Robert C. and Gildea, Daniel}, + Booktitle = +ACL2008, + Month = {June}, + Pages = {97--105}, + Title = {{Bayesian} Learning of Non-Compositional Phrases with Synchronous Parsing}, + Year = {2008}} + +@inproceedings{newman07distributed, + Author = {David Newman and Arthur Asuncion and Padhraic Smyth and Max Welling}, + Booktitle = {NIPS}, + Publisher = {MIT Press}, + Title = {Distributed Inference for Latent Dirichlet Allocation.}, + Year = {2007}} + +@inproceedings{asuncion08asynchronous, + Author = {Arthur Asuncion and Padhraic Smyth and Max Welling}, + Booktitle = {NIPS}, + Publisher = {MIT Press}, + Title = {Asynchronous Distributed Learning of Topic Models.}, + Year = {2008}} + +@inproceedings{johnson07, + Address = {Rochester, New York}, + Author = {Johnson, Mark and Griffiths, Thomas and Goldwater, Sharon}, + Booktitle = +NAACL2007, + Month = {April}, + Pages = {139--146}, + Title = {{Bayesian} Inference for {PCFG}s via {Markov} Chain {Monte} {Carlo}}, + Url = {http://www.aclweb.org/anthology/N/N07/N07-1018}, + Year = {2007}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/N/N07/N07-1018}} + +@inproceedings{johnson07pos, + Address = {Prague, Czech Republic}, + Author = {Johnson, Mark}, + Booktitle = +EMNLP2007, + Pages = {296--305}, + Title = {Why Doesnt {EM} Find Good {HMM} {POS}-Taggers?}, + Year = {2007}} + +@inproceedings{denero08complexity, + Address = {Columbus, Ohio}, + Author = {DeNero, John and Klein, Dan}, + Booktitle = {Proceedings of ACL-08: HLT, Short Papers}, + Month = {June}, + Pages = {25--28}, + Publisher = {Association for Computational Linguistics}, + Title = {The Complexity of Phrase Alignment Problems}, + Url = {http://www.aclweb.org/anthology/P/P08/P08-2007}, + Year = {2008}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P08/P08-2007}} + +@inproceedings{denero08sampling, + Address = {Honolulu, Hawaii}, + Author = {DeNero, John and Bouchard-C\^{o}t\'{e}, Alexandre and Klein, Dan}, + Booktitle = {Proceedings of the 2008 Conference on Empirical Methods in Natural Language Processing}, + Month = {October}, + Pages = {314--323}, + Publisher = {Association for Computational Linguistics}, + Title = {Sampling Alignment Structure under a {B}ayesian Translation Model}, + Url = {http://www.aclweb.org/anthology/D08-1033}, + Year = {2008}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/D08-1033}} + +@inproceedings{birch06constraining, + Address = {New York City}, + Author = {Birch, Alexandra and Callison-Burch, Chris and Osborne, Miles and Koehn, Philipp}, + Booktitle = {Proceedings on the Workshop on Statistical Machine Translation}, + Month = {June}, + Pages = {154--157}, + Publisher = {Association for Computational Linguistics}, + Title = {Constraining the Phrase-Based, Joint Probability Statistical Translation Model}, + Url = {http://www.aclweb.org/anthology/W/W06/W06-3123}, + Year = {2006}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W06/W06-3123}} + +@inproceedings{chang08chinese, + Address = {Prague, Czech Republic}, + Author = {Chang, Pi-Chuan and Jurafsky, Dan and Manning, Christopher D.}, + Booktitle = +WMT2008, + Month = {June}, + Title = {Optimizing {C}hinese Word Segmentation for Machine Translation Performance}, + Year = {2008}} + +@inproceedings{habash06preprocessing, + Address = {New York City}, + Author = {Habash, Nizar and Sadat, Fatiha}, + Booktitle = +NAACL2006, + Publisher = {Association for Computational Linguistics}, + Title = {Arabic Preprocessing Schemes for Statistical Machine Translation}, + Year = {2006}} + +@inproceedings{goldwater07, + Address = {Prague, Czech Republic}, + Author = {Goldwater, Sharon and Griffiths, Tom}, + Booktitle = +ACL2007, + Month = {June}, + Pages = {744--751}, + Title = {A fully Bayesian approach to unsupervised part-of-speech tagging}, + Url = {http://www.aclweb.org/anthology/P/P07/P07-1094}, + Year = {2007}, + Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P07/P07-1094}} + +@inproceedings{goldwater06, + Address = {Sydney}, + Author = {Goldwater, Sharon and Griffiths, Tom and Johnson, Mark}, + Booktitle = +ACL2006, + Title = {Contextual Dependencies in Unsupervised Word Segmentation}, + Year = {2006}} + +@incollection{goldwater06interpolating, + Address = {Cambridge, MA}, + Author = {Sharon Goldwater and Tom Griffiths and Mark Johnson}, + Booktitle = {Advances in Neural Information Processing Systems 18}, + Editor = {Y. Weiss and B. Sch\"{o}lkopf and J. Platt}, + Pages = {459--466}, + Publisher = {MIT Press}, + Title = {Interpolating between types and tokens by estimating power-law generators}, + Year = {2006}} + +@inproceedings{zhang08factorisation, + Address = {Manchester, UK}, + Author = {Zhang, Hao and Gildea, Daniel and Chiang, David }, + Booktitle = +COLING2008, + Pages = {1081--1088}, + Title = {Extracting Synchronous Grammar Rules From Word-Level Alignments in Linear Time}, + Year = {2008}} + +@article{antoniak74, + Author = {Antoniak, Charles E. }, + Journal = {The Annals of Statistics}, + Number = {6}, + Pages = {1152--1174}, + Title = {Mixtures of Dirichlet Processes with Applications to Bayesian Nonparametric Problems}, + Volume = {2}, + Year = {1974}} + +@phdthesis{blunsom07thesis, + Author = {Blunsom, Phil}, + School = {Department of Computer Science and Software Engineering, The University of Melbourne}, + Title = {Structured Classification for Multilingual Natural Language Processing}, + Year = {2007}} + +@phdthesis{RL:1, + Address = {Stanford, California}, + Author = {R. E. Larson}, + School = {Stanford University}, + Title = {Title of Dissertation}, + Year = 1964} + +@incollection{aldous85, + Author = {Aldous, D. }, + Booktitle = {\'{E}cole d'\'{E}t\'{e} de Probabiliti\'{e}s de Saint-Flour XIII 1983}, + Pages = {1--198}, + Publisher = {Springer}, + Title = {Exchangeability and related topics}, + Year = {1985}} + +@incollection{blunsom09scfg, + Author = {Phil Blunsom and Trevor Cohn and Miles Osborne}, + Booktitle = {Advances in Neural Information Processing Systems 21}, + Editor = {D. Koller and D. Schuurmans and Y. Bengio and L. Bottou}, + Pages = {161--168}, + Title = {Bayesian Synchronous Grammar Induction}, + Year = {2009}} + +@inproceedings{zhang06synchronous, + Author = {Zhang,, Hao and Huang,, Liang and Gildea,, Daniel and Knight,, Kevin}, + Booktitle = +NAACL2006, + Location = {New York, New York}, + Pages = {256--263}, + Title = {Synchronous binarization for machine translation}, + Year = {2006}} + +@article{fraser07measuring, + Address = {Cambridge, MA, USA}, + Author = {Fraser, Alexander and Marcu, Daniel}, + Issn = {0891-2017}, + Journal = {Computational Linguistics}, + Number = {3}, + Pages = {293--303}, + Publisher = {MIT Press}, + Title = {Measuring Word Alignment Quality for Statistical Machine Translation}, + Volume = {33}, + Year = {2007}} + +@inproceedings{teh06, + Author = {Y. Teh}, + Booktitle = {Proceedings of the 21st International Conference on Computational Linguistics and 44th Annual Meeting of the Association for Computational Linguistics}, + Pages = {985-992}, + Title = {A Hierarchical {B}ayesian Language Model based on {P}itman-{Y}or Processes}, + Year = {2006}} + +@article{ferguson73, + Author = {S. Ferguson}, + Journal = {Annals of Statistics}, + Pages = {209--230}, + Title = {A {Bayesian} Analysis of Some Nonparametric Problems}, + Volume = 1, + Year = 1973} -- cgit v1.2.3