# TODO: Add comment # # Author: mike-bowles ############################################################################### require(e1071) rm(list=ls()) # create 2-dim. normal with rho=0: X <- data.frame(a = rnorm(1000), b = rnorm(1000)) attach(X) # m <- svm(X, gamma = 0.1, cost = 1, type = "one-classification",nu = 0.5) # test: newdata <- data.frame(a = c(0, 4), b = c(0, 4)) predict (m, newdata) # visualize: plot(X, col = 1:1000 %in% m$index + 1, xlim = c(-5,5), ylim=c(-5,5)) points(newdata, pch = "+", col = 2, cex = 5) # m <- svm(X, gamma = 0.1, cost = 1, type = "one-classification",nu = 0.001) # test: newdata <- data.frame(a = c(0, 4), b = c(0, 4)) predict (m, newdata) # visualize: plot(X, col = 1:1000 %in% m$index + 1, xlim = c(-5,5), ylim=c(-5,5)) points(newdata, pch = "+", col = 2, cex = 5) set.seed(134) x <- cbind(rnorm(80), rnorm(80), rnorm(80)) y <- cbind(rnorm(10, 5, 1), rnorm(10, 5, 1), rnorm(10, 5, 1)) X <- rbind(x,y) plot(X) m <- svm(X, gamma = 0.1, cost = 1, type = "one-classification",nu = 0.1) plot(X, col = 1:90 %in% m$index + 1) m <- svm(X, gamma = 1, cost = 1, type = "one-classification",nu = 0.01) plot(X, col = 1:90 %in% m$index + 1) # more data? set.seed(134) x <- cbind(rnorm(1000), rnorm(1000), rnorm(1000)) y <- cbind(rnorm(10, 5, 1), rnorm(10, 5, 1), rnorm(10, 5, 1)) X <- rbind(x,y) m <- svm(X, gamma = 0.01, cost = 1, type = "one-classification",nu = 0.1) plot(X, col = 1:1010 %in% m$index + 1) # let's try training this on all good data X <- cbind(rnorm(1000), rnorm(1000), rnorm(1000)) m <- m <- svm(X, gamma = 0.1, cost = 1, type = "one-classification",nu = 0.001) plot(X, col = 1:1000 %in% m$index + 1) newdata <- cbind(rnorm(10, 5, 1), rnorm(10, 5, 1), rnorm(10, 5, 1)) predict (m, newdata)