- pmodels = NULL,
- get_testIndices = function(CV, v, n, shuffle_inds) {
- if (CV$type == "vfold") {
- # Slice indices (optionnally shuffled)
- first_index = round((v-1) * n / CV$V) + 1
- last_index = round(v * n / CV$V)
- test_indices = first_index:last_index
- if (!is.null(shuffle_inds))
- test_indices <- shuffle_inds[test_indices]
- }
- else
- # Monte-Carlo cross-validation
- test_indices = sample(n, round(n * CV$test_size))
- test_indices
- },
- defaultLoss = function(y1, y2) {
- if (private$task == "classification") {
- if (is.null(dim(y1)))
- # Standard case: "hard" classification
- mean(y1 != y2)
- else {
- # "Soft" classification: predict() outputs a probability matrix
- # In this case "target" could be in matrix form.
- if (!is.null(dim(y2)))
- mean(rowSums(abs(y1 - y2)))
- else {
- # Or not: y2 is a "factor".
- y2 <- as.character(y2)
- # NOTE: the user should provide target in matrix form because
- # matching y2 with columns is rather inefficient!
- names <- colnames(y1)
- positions <- list()
- for (idx in seq_along(names))
- positions[[ names[idx] ]] <- idx
- mean(vapply(
- seq_along(y2),
- function(idx) sum(abs(y1[idx,] - positions[[ y2[idx] ]])),
- 0))
- }
- }
- }
- else
- # Regression
- mean(abs(y1 - y2))
- }