@article{Tang2015, author = "FengZhen Tang and Peter Tino and Pedro Antonio Guti{\'e}rrez and Huanhuan Chen", abstract = "In this letter, we explore the idea of modeling slack variables in support vector machine (SVM) approaches. The study is motivated by SVM+, which models the slacks through a smooth correcting function that is determined by additional (privileged) information about the training examples not available in the test phase. We take a closer look at the meaning and consequences of smooth modeling of slacks, as opposed to determining them in an unconstrained manner through the SVM optimization program. To better understand this difference we only allow the determination and modeling of slack values on the same information -- that is, using the same training input in the original input space. We also explore whether it is possible to improve classification performance by combining (in a convex combination) the original SVM slacks with the modeled ones. We show experimentally that this approach not only leads to improved generalization performance but also yields more compact, lower-complexity models. Finally, we extend this idea to the context of ordinal regression, where a natural order among the classes exists. The experimental results confirm principal findings from the binary case.", awards = "JCR (2015): 1.626 Position: 57/130 (Q2) Category: COMPUTER SCIENCE, ARTIFICIAL INTELLIGENCE", comments = "JCR (2015): 1.626 Position: 57/130 (Q2) Category: COMPUTER SCIENCE, ARTIFICIAL INTELLIGENCE", doi = "10.1162/NECO_a_00714", issn = "0899-7667", journal = "Neural Computation", keywords = "Learning using privileged information, ordinal regressio n, Slack vari- able modelling, support vector ordinal regression", month = "April", note = "JCR (2015): 1.626 Position: 57/130 (Q2) Category: COMPUTER SCIENCE, ARTIFICIAL INTELLIGENCE", number = "4", pages = "954--981", title = "{T}he {B}enefits of {M}odelling {S}lack {V}ariables in {SVM}s", url = "http://dx.doi.org/10.1162/NECO_a_00714", volume = "27", year = "2015", }