Ergun Biçici.
RTM results for Predicting Translation Performance.
In Proc. of the Third Conf. on Machine Translation (WMT18),
Brussels, Belgium,
pages 765-769,
10 2018.
[WWW]
Keyword(s): Machine Translation,
Machine Learning,
Performance Prediction.
Abstract:
With improved prediction combination using weights based on their training performance and stacking and multilayer perceptrons to build deeper prediction models, RTMs become the 3rd system in general at the sentence-level prediction of translation scores and achieve the lowest RMSE in English to German NMT QET results. For the document-level task, we compare document-level RTM models with sentence-level RTM models obtained with the concatenation of document sentences and obtain similar results. |
@InProceedings{Bicici:RTM:WMT2018,
author = {Ergun Bi\c{c}ici},
title = {{RTM} results for Predicting Translation Performance},
booktitle = {Proc. of the {T}hird {C}onf. on {M}achine {T}ranslation ({WMT18})},
month = {10},
year = {2018},
pages = {765--769},
address = {Brussels, Belgium},
keywords = "Machine Translation, Machine Learning, Performance Prediction",
abstract = {With improved prediction combination using weights based on their training performance and stacking and multilayer perceptrons to build deeper prediction models, RTMs become the 3rd system in general at the sentence-level prediction of translation scores and achieve the lowest RMSE in English to German NMT QET results. For the document-level task, we compare document-level RTM models with sentence-level RTM models obtained with the concatenation of document sentences and obtain similar results.},
url = {https://aclweb.org/anthology/papers/W/W18/W18-6458/},
}
Ergun Biçici.
Robust parfda Statistical Machine Translation Results.
In Proc. of the Third Conf. on Machine Translation (WMT18),
Brussels, Belgium,
pages 345-354,
10 2018.
[WWW]
Keyword(s): Machine Translation.
Abstract:
We build parallel feature decay algorithms ( exttt{parfda}) Moses statistical machine translation (SMT) models for language pairs in the translation task. exttt{parfda} obtains results close to the top constrained phrase-based SMT with an average of $2.252$ BLEU points difference on WMT 2017 datasets using significantly less computation for building SMT systems than that would be spent using all available corpora. We obtain BLEU upper bounds based on target coverage to identify which systems used additional data. We use PRO for tuning to decrease fluctuations in the results and post-process translation outputs to decrease translation errors due to the casing of words. $F_1$ scores on the key phrases of the English to Turkish testsuite that we prepared reveal that exttt{parfda} achieves $2nd$ best results. Truecasing translations before scoring obtained the best results overall. |
@InProceedings{Bicici:parfda:WMT2018,
author = {Ergun Bi\c{c}ici},
title = {Robust parfda Statistical Machine Translation Results},
booktitle = {Proc. of the {T}hird {C}onf. on {M}achine {T}ranslation ({WMT18})},
month = {10},
year = {2018},
pages = {345--354},
address = {Brussels, Belgium},
keywords = {Machine Translation},
abstract = {We build parallel feature decay algorithms ( exttt{parfda}) Moses statistical machine translation (SMT) models for language pairs in the translation task. exttt{parfda} obtains results close to the top constrained phrase-based SMT with an average of $2.252$ BLEU points difference on WMT 2017 datasets using significantly less computation for building SMT systems than that would be spent using all available corpora. We obtain BLEU upper bounds based on target coverage to identify which systems used additional data. We use PRO for tuning to decrease fluctuations in the results and post-process translation outputs to decrease translation errors due to the casing of words. $F_1$ scores on the key phrases of the English to Turkish testsuite that we prepared reveal that exttt{parfda} achieves $2nd$ best results. Truecasing translations before scoring obtained the best results overall.},
url = {https://aclweb.org/anthology/papers/W/W18/W18-6405/},
}