Ergun Biçici.
Machine Translation with parfda, Moses, kenlm, nplm, and PRO.
In Proc. of the Fourth Conf. on Machine Translation (WMT19),
Florence, Italy,
pages 122-128,
8 2019.
[PDF]
[doi:10.18653/v1/W19-5306]
Keyword(s): Machine Translation.
Abstract:
We build exttt{parfda} Moses statistical machine translation (SMT) models for most language pairs in the news translation task. We experiment with a hybrid approach using neural language models integrated into Moses. We obtain the constrained data statistics on the machine translation task, the coverage of the test sets, and the upper bounds on the translation results. We also contribute a new testsuite for the German-English language pair and a new automated key phrase extraction technique for the evaluation of the testsuite translations. |
@InProceedings{Bicici:parfda:WMT2019,
author = {Ergun Bi\c{c}ici},
title = {Machine Translation with parfda, {M}oses, kenlm, nplm, and {PRO}},
booktitle = {Proc. of the {F}ourth {C}onf. on {M}achine {T}ranslation ({WMT19})},
month = {8},
year = {2019},
address = {Florence, Italy},
doi = {10.18653/v1/W19-5306},
pages = {122--128},
keywords = {Machine Translation},
abstract = {We build exttt{parfda} Moses statistical machine translation (SMT) models for most language pairs in the news translation task. We experiment with a hybrid approach using neural language models integrated into Moses. We obtain the constrained data statistics on the machine translation task, the coverage of the test sets, and the upper bounds on the translation results. We also contribute a new testsuite for the German-English language pair and a new automated key phrase extraction technique for the evaluation of the testsuite translations.},
pdf = {http://bicici.github.io/publications/2019/parfda_WMT.pdf},
}
Ergun Biçici.
RTM Stacking Results for Machine Translation Performance Prediction.
In Proc. of the Fourth Conf. on Machine Translation (WMT19),
Florence, Italy,
8 2019.
[doi:10.18653/v1/W19-5405]
Keyword(s): Machine Translation,
Machine Learning,
Performance Prediction.
Abstract:
We obtain new results using referential translation machines with increased number of learning models in the set of results that are stacked to obtain a better mixture of experts prediction. We combine features extracted from the word-level predictions with the sentence- or document-level features, which significantly improve the results on the training sets but decrease the test set results. |
@InProceedings{Bicici:RTM:WMT2019,
author = {Ergun Bi\c{c}ici},
title = {{RTM} Stacking Results for Machine Translation Performance Prediction},
booktitle = {Proc. of the {F}ourth {C}onf. on {M}achine {T}ranslation ({WMT19})},
month = {8},
year = {2019},
address = {Florence, Italy},
keywords = "Machine Translation, Machine Learning, Performance Prediction",
abstract = {We obtain new results using referential translation machines with increased number of learning models in the set of results that are stacked to obtain a better mixture of experts prediction. We combine features extracted from the word-level predictions with the sentence- or document-level features, which significantly improve the results on the training sets but decrease the test set results.},
doi = {10.18653/v1/W19-5405},
}