return (invisible(NULL))
}
V <- length(private$pmodels)
- oneLineX <- as.data.frame(t(as.matrix(X[1,])))
+ oneLineX <- t(as.matrix(X[1,]))
if (length(private$pmodels[[1]]$model(oneLineX)) >= 2)
# Soft classification:
return (Reduce("+", lapply(private$pmodels, function(m) m$model(X))) / V)
if (is.null(params))
# Here, gmodel is a string (= its family),
# because a custom model must be given with its parameters.
- params <- as.list(private$getParams(gmodel, data, target))
+ params <- as.list(private$getParams(gmodel, data, target, task))
private$params <- params
if (is.character(gmodel))
gmodel <- private$getGmodel(gmodel, task)
}
},
# Return a default list of parameters, given a gmodel family
- getParams = function(family, data, target) {
+ getParams = function(family, data, target, task) {
if (family == "tree") {
# Run rpart once to obtain a CV grid for parameter cp
require(rpart)
minsplit = 2,
minbucket = 1,
xval = 0)
- r <- rpart(target ~ ., df, method="class", control=ctrl)
+ method <- ifelse(task == "classification", "class", "anova")
+ r <- rpart(target ~ ., df, method=method, control=ctrl)
cps <- r$cptable[-1,1]
- if (length(cps) <= 11) {
- if (length(cps == 0))
- stop("No cross-validation possible: select another model")
+ if (length(cps) <= 1)
+ stop("No cross-validation possible: select another model")
+ if (length(cps) <= 11)
return (cps)
- }
step <- (length(cps) - 1) / 10
cps[unique(round(seq(1, length(cps), step)))]
}