forked from bkrai/Top-10-Machine-Learning-Methods-With-R
-
Notifications
You must be signed in to change notification settings - Fork 0
/
RandomForest
63 lines (52 loc) · 1.44 KB
/
RandomForest
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# Read Data
data <- read.csv('https://raw.githubusercontent.com/bkrai/Statistical-Modeling-and-Graphs-with-R/main/Cardiotocographic.csv')
str(data)
data$NSP <- as.factor(data$NSP)
table(data$NSP)
# Data Partition
set.seed(123)
ind <- sample(2, nrow(data), replace = TRUE, prob = c(0.7, 0.3))
train <- data[ind==1,]
test <- data[ind==2,]
# Random Forest model
library(randomForest)
set.seed(222)
rf <- randomForest(NSP~., data=train,
ntree = 300,
mtry = 8,
importance = TRUE,
proximity = TRUE)
# Prediction & Confusion Matrix - train data
library(caret)
p1 <- predict(rf, train)
cm <- confusionMatrix(p1, train$NSP)
cm$byClass[3,1]
# Prediction & Confusion Matrix - test data
p2 <- predict(rf, test)
confusionMatrix(p2, test$NSP)
# Error rate of Random Forest
plot(rf)
# Tune mtry
t <- tuneRF(train[,-22], train[,22],
stepFactor = 0.5,
plot = TRUE,
ntreeTry = 300,
trace = TRUE,
improve = 0.05)
# No. of nodes for the trees
hist(treesize(rf),
main = "No. of Nodes for the Trees",
col = "green")
# Variable Importance
varImpPlot(rf,
sort = T,
n.var = 10,
main = "Top 10 - Variable Importance")
importance(rf)
varUsed(rf)
# Partial Dependence Plot
partialPlot(rf, train, ASTV, "2")
# Extract Single Tree
getTree(rf, 1, labelVar = TRUE)
# Multi-dimensional Scaling Plot of Proximity Matrix
MDSplot(rf, train$NSP)