::p_load("data.table",
pacman"tidyverse",
"dplyr", "tidyr",
"ggplot2", "GGally",
"caret",
"xgboost", # For xgb.importance
"doParallel", "parallel") # For 병렬 처리
registerDoParallel(cores=detectCores()) # 사용할 Core 개수 지정
<- fread("../Titanic.csv") # 데이터 불러오기
titanic
%>%
titanic as_tibble
13 XGBoost
XGBoost의 장점
- 병렬 처리를 사용하여 Gradient Boosting보다 속도가 빠르다.
- Gradient Boosting보다 정확도가 높다.
- 유연성이 좋다.
- 조기 종료가 가능하다.
- 규제를 통해 과적합을 방지할 수 있다.
XGBoost의 단점
- Gradient Boosting보다 학습시간이 빠를 뿐 다른 기법에 비하면 느리다.
- 초모수(Hyperparameter)가 많다.
- 적절하지 않은 초모수 값을 할당하면 오히려 성능이 나빠질 수 있다.
실습 자료 : 1912년 4월 15일 타이타닉호 침몰 당시 탑승객들의 정보를 기록한 데이터셋이며, 총 11개의 변수를 포함하고 있다. 이 자료에서 Target은
Survived
이다.


13.1 데이터 불러오기
# A tibble: 891 × 11
Survived Pclass Name Sex Age SibSp Parch Ticket Fare Cabin Embarked
<int> <int> <chr> <chr> <dbl> <int> <int> <chr> <dbl> <chr> <chr>
1 0 3 Braund, Mr. Owen Harris male 22 1 0 A/5 21171 7.25 "" S
2 1 1 Cumings, Mrs. John Bradley (Florence Briggs Thayer) female 38 1 0 PC 17599 71.3 "C85" C
3 1 3 Heikkinen, Miss. Laina female 26 0 0 STON/O2. 3101282 7.92 "" S
4 1 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35 1 0 113803 53.1 "C123" S
5 0 3 Allen, Mr. William Henry male 35 0 0 373450 8.05 "" S
6 0 3 Moran, Mr. James male NA 0 0 330877 8.46 "" Q
7 0 1 McCarthy, Mr. Timothy J male 54 0 0 17463 51.9 "E46" S
8 0 3 Palsson, Master. Gosta Leonard male 2 3 1 349909 21.1 "" S
9 1 3 Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg) female 27 0 2 347742 11.1 "" S
10 1 2 Nasser, Mrs. Nicholas (Adele Achem) female 14 1 0 237736 30.1 "" C
# ℹ 881 more rows
13.2 데이터 전처리 I
%<>%
titanic data.frame() %>% # Data Frame 형태로 변환
mutate(Survived = ifelse(Survived == 1, "yes", "no")) # Target을 문자형 변수로 변환
# 1. Convert to Factor
<- c("Pclass", "Sex",
fac.col # Target
"Survived")
<- titanic %>%
titanic mutate_at(fac.col, as.factor) # 범주형으로 변환
glimpse(titanic) # 데이터 구조 확인
Rows: 891
Columns: 11
$ Survived <fct> no, yes, yes, yes, no, no, no, no, yes, yes, yes, yes, no, no, no, yes, no, yes, no, yes, no, yes, yes, yes, no, yes, no, no, yes, no, no, yes, yes, no, no, no, yes, no, no, yes, no…
$ Pclass <fct> 3, 1, 3, 1, 3, 3, 1, 3, 3, 2, 3, 1, 3, 3, 3, 2, 3, 2, 3, 3, 2, 2, 3, 1, 3, 3, 3, 1, 3, 3, 1, 1, 3, 2, 1, 1, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 1, 2, 1, 1, 2, 3, 2, 3, 3…
$ Name <chr> "Braund, Mr. Owen Harris", "Cumings, Mrs. John Bradley (Florence Briggs Thayer)", "Heikkinen, Miss. Laina", "Futrelle, Mrs. Jacques Heath (Lily May Peel)", "Allen, Mr. William Henry…
$ Sex <fct> male, female, female, female, male, male, male, male, female, female, female, female, male, male, female, female, male, male, female, female, male, male, female, male, female, femal…
$ Age <dbl> 22.0, 38.0, 26.0, 35.0, 35.0, NA, 54.0, 2.0, 27.0, 14.0, 4.0, 58.0, 20.0, 39.0, 14.0, 55.0, 2.0, NA, 31.0, NA, 35.0, 34.0, 15.0, 28.0, 8.0, 38.0, NA, 19.0, NA, NA, 40.0, NA, NA, 66.…
$ SibSp <int> 1, 1, 0, 1, 0, 0, 0, 3, 0, 1, 1, 0, 0, 1, 0, 0, 4, 0, 1, 0, 0, 0, 0, 0, 3, 1, 0, 3, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 2, 1, 1, 1, 0, 1, 0, 0, 1, 0, 2, 1, 4, 0, 1, 1, 0, 0, 0, 0, 1, 5, 0…
$ Parch <int> 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 1, 0, 0, 5, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 5, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 2, 0…
$ Ticket <chr> "A/5 21171", "PC 17599", "STON/O2. 3101282", "113803", "373450", "330877", "17463", "349909", "347742", "237736", "PP 9549", "113783", "A/5. 2151", "347082", "350406", "248706", "38…
$ Fare <dbl> 7.2500, 71.2833, 7.9250, 53.1000, 8.0500, 8.4583, 51.8625, 21.0750, 11.1333, 30.0708, 16.7000, 26.5500, 8.0500, 31.2750, 7.8542, 16.0000, 29.1250, 13.0000, 18.0000, 7.2250, 26.0000,…
$ Cabin <chr> "", "C85", "", "C123", "", "", "E46", "", "", "", "G6", "C103", "", "", "", "", "", "", "", "", "", "D56", "", "A6", "", "", "", "C23 C25 C27", "", "", "", "B78", "", "", "", "", ""…
$ Embarked <chr> "S", "C", "S", "S", "S", "Q", "S", "S", "S", "C", "S", "S", "S", "S", "S", "S", "Q", "S", "S", "C", "S", "S", "Q", "S", "S", "S", "C", "S", "Q", "S", "C", "C", "Q", "S", "C", "S", "…
# 2. Generate New Variable
<- titanic %>%
titanic mutate(FamSize = SibSp + Parch) # "FamSize = 형제 및 배우자 수 + 부모님 및 자녀 수"로 가족 수를 의미하는 새로운 변수
glimpse(titanic) # 데이터 구조 확인
Rows: 891
Columns: 12
$ Survived <fct> no, yes, yes, yes, no, no, no, no, yes, yes, yes, yes, no, no, no, yes, no, yes, no, yes, no, yes, yes, yes, no, yes, no, no, yes, no, no, yes, yes, no, no, no, yes, no, no, yes, no…
$ Pclass <fct> 3, 1, 3, 1, 3, 3, 1, 3, 3, 2, 3, 1, 3, 3, 3, 2, 3, 2, 3, 3, 2, 2, 3, 1, 3, 3, 3, 1, 3, 3, 1, 1, 3, 2, 1, 1, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 1, 2, 1, 1, 2, 3, 2, 3, 3…
$ Name <chr> "Braund, Mr. Owen Harris", "Cumings, Mrs. John Bradley (Florence Briggs Thayer)", "Heikkinen, Miss. Laina", "Futrelle, Mrs. Jacques Heath (Lily May Peel)", "Allen, Mr. William Henry…
$ Sex <fct> male, female, female, female, male, male, male, male, female, female, female, female, male, male, female, female, male, male, female, female, male, male, female, male, female, femal…
$ Age <dbl> 22.0, 38.0, 26.0, 35.0, 35.0, NA, 54.0, 2.0, 27.0, 14.0, 4.0, 58.0, 20.0, 39.0, 14.0, 55.0, 2.0, NA, 31.0, NA, 35.0, 34.0, 15.0, 28.0, 8.0, 38.0, NA, 19.0, NA, NA, 40.0, NA, NA, 66.…
$ SibSp <int> 1, 1, 0, 1, 0, 0, 0, 3, 0, 1, 1, 0, 0, 1, 0, 0, 4, 0, 1, 0, 0, 0, 0, 0, 3, 1, 0, 3, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 2, 1, 1, 1, 0, 1, 0, 0, 1, 0, 2, 1, 4, 0, 1, 1, 0, 0, 0, 0, 1, 5, 0…
$ Parch <int> 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 1, 0, 0, 5, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 5, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 2, 0…
$ Ticket <chr> "A/5 21171", "PC 17599", "STON/O2. 3101282", "113803", "373450", "330877", "17463", "349909", "347742", "237736", "PP 9549", "113783", "A/5. 2151", "347082", "350406", "248706", "38…
$ Fare <dbl> 7.2500, 71.2833, 7.9250, 53.1000, 8.0500, 8.4583, 51.8625, 21.0750, 11.1333, 30.0708, 16.7000, 26.5500, 8.0500, 31.2750, 7.8542, 16.0000, 29.1250, 13.0000, 18.0000, 7.2250, 26.0000,…
$ Cabin <chr> "", "C85", "", "C123", "", "", "E46", "", "", "", "G6", "C103", "", "", "", "", "", "", "", "", "", "D56", "", "A6", "", "", "", "C23 C25 C27", "", "", "", "B78", "", "", "", "", ""…
$ Embarked <chr> "S", "C", "S", "S", "S", "Q", "S", "S", "S", "C", "S", "S", "S", "S", "S", "S", "Q", "S", "S", "C", "S", "S", "Q", "S", "S", "S", "C", "S", "Q", "S", "C", "C", "Q", "S", "C", "S", "…
$ FamSize <int> 1, 1, 0, 1, 0, 0, 0, 4, 2, 1, 2, 0, 0, 6, 0, 0, 5, 0, 1, 0, 0, 0, 0, 0, 4, 6, 0, 5, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 2, 1, 1, 1, 0, 3, 0, 0, 1, 0, 2, 1, 5, 0, 1, 1, 1, 0, 0, 0, 3, 7, 0…
# 3. Select Variables used for Analysis
<- titanic %>%
titanic1 ::select(Survived, Pclass, Sex, Age, Fare, FamSize) # 분석에 사용할 변수 선택
dplyr
glimpse(titanic1) # 데이터 구조 확인
Rows: 891
Columns: 6
$ Survived <fct> no, yes, yes, yes, no, no, no, no, yes, yes, yes, yes, no, no, no, yes, no, yes, no, yes, no, yes, yes, yes, no, yes, no, no, yes, no, no, yes, yes, no, no, no, yes, no, no, yes, no…
$ Pclass <fct> 3, 1, 3, 1, 3, 3, 1, 3, 3, 2, 3, 1, 3, 3, 3, 2, 3, 2, 3, 3, 2, 2, 3, 1, 3, 3, 3, 1, 3, 3, 1, 1, 3, 2, 1, 1, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 1, 2, 1, 1, 2, 3, 2, 3, 3…
$ Sex <fct> male, female, female, female, male, male, male, male, female, female, female, female, male, male, female, female, male, male, female, female, male, male, female, male, female, femal…
$ Age <dbl> 22.0, 38.0, 26.0, 35.0, 35.0, NA, 54.0, 2.0, 27.0, 14.0, 4.0, 58.0, 20.0, 39.0, 14.0, 55.0, 2.0, NA, 31.0, NA, 35.0, 34.0, 15.0, 28.0, 8.0, 38.0, NA, 19.0, NA, NA, 40.0, NA, NA, 66.…
$ Fare <dbl> 7.2500, 71.2833, 7.9250, 53.1000, 8.0500, 8.4583, 51.8625, 21.0750, 11.1333, 30.0708, 16.7000, 26.5500, 8.0500, 31.2750, 7.8542, 16.0000, 29.1250, 13.0000, 18.0000, 7.2250, 26.0000,…
$ FamSize <int> 1, 1, 0, 1, 0, 0, 0, 4, 2, 1, 2, 0, 0, 6, 0, 0, 5, 0, 1, 0, 0, 0, 0, 0, 4, 6, 0, 5, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 2, 1, 1, 1, 0, 3, 0, 0, 1, 0, 2, 1, 5, 0, 1, 1, 1, 0, 0, 0, 3, 7, 0…
13.3 데이터 탐색
ggpairs(titanic1,
aes(colour = Survived)) + # Target의 범주에 따라 색깔을 다르게 표현
theme_bw()
ggpairs(titanic1,
aes(colour = Survived, alpha = 0.8)) + # Target의 범주에 따라 색깔을 다르게 표현
scale_colour_manual(values = c("purple","cyan4")) + # 특정 색깔 지정
scale_fill_manual(values = c("purple","cyan4")) + # 특정 색깔 지정
theme_bw()
13.4 데이터 분할
# Partition (Training Dataset : Test Dataset = 7:3)
<- titanic1$Survived # Target
y
set.seed(200)
<- createDataPartition(y, p = 0.7, list =T) # Index를 이용하여 7:3으로 분할
ind <- titanic1[ind$Resample1,] # Training Dataset
titanic.trd <- titanic1[-ind$Resample1,] # Test Dataset titanic.ted
13.5 데이터 전처리 II
# Imputation
<- titanic.trd %>%
titanic.trd.Imp mutate(Age = replace_na(Age, mean(Age, na.rm = TRUE))) # 평균으로 결측값 대체
<- titanic.ted %>%
titanic.ted.Imp mutate(Age = replace_na(Age, mean(titanic.trd$Age, na.rm = TRUE))) # Training Dataset을 이용하여 결측값 대체
glimpse(titanic.trd.Imp) # 데이터 구조 확인
Rows: 625
Columns: 6
$ Survived <fct> no, yes, yes, no, no, no, yes, yes, yes, yes, no, no, yes, no, yes, no, yes, no, no, no, yes, no, no, yes, yes, no, no, no, no, no, yes, no, no, no, yes, no, yes, no, no, no, yes, n…
$ Pclass <fct> 3, 3, 1, 3, 3, 3, 3, 2, 3, 1, 3, 3, 2, 3, 3, 2, 1, 3, 3, 1, 3, 3, 1, 1, 3, 2, 1, 1, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 1, 3, 1, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 1, 2, 3…
$ Sex <fct> male, female, female, male, male, male, female, female, female, female, male, female, male, female, female, male, male, female, male, male, female, male, male, female, female, male,…
$ Age <dbl> 22.00000, 26.00000, 35.00000, 35.00000, 29.93737, 2.00000, 27.00000, 14.00000, 4.00000, 58.00000, 39.00000, 14.00000, 29.93737, 31.00000, 29.93737, 35.00000, 28.00000, 8.00000, 29.9…
$ Fare <dbl> 7.2500, 7.9250, 53.1000, 8.0500, 8.4583, 21.0750, 11.1333, 30.0708, 16.7000, 26.5500, 31.2750, 7.8542, 13.0000, 18.0000, 7.2250, 26.0000, 35.5000, 21.0750, 7.2250, 263.0000, 7.8792,…
$ FamSize <int> 1, 0, 1, 0, 0, 4, 2, 1, 2, 0, 6, 0, 0, 1, 0, 0, 0, 4, 0, 5, 0, 0, 0, 1, 0, 0, 1, 1, 0, 2, 1, 1, 1, 0, 0, 1, 0, 2, 1, 5, 1, 1, 0, 7, 0, 0, 5, 0, 2, 7, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 3…
glimpse(titanic.ted.Imp) # 데이터 구조 확인
Rows: 266
Columns: 6
$ Survived <fct> yes, no, no, yes, no, yes, yes, yes, yes, yes, no, no, yes, yes, no, yes, no, yes, yes, no, yes, no, no, no, no, no, no, yes, yes, no, no, no, no, no, no, no, no, no, no, yes, no, n…
$ Pclass <fct> 1, 1, 3, 2, 3, 2, 3, 3, 3, 2, 3, 3, 2, 2, 3, 2, 1, 3, 2, 3, 3, 2, 2, 3, 3, 3, 3, 1, 2, 2, 3, 3, 3, 3, 3, 2, 3, 2, 2, 2, 3, 3, 2, 1, 3, 1, 3, 2, 1, 3, 3, 3, 3, 3, 3, 3, 3, 1, 3, 1, 3…
$ Sex <fct> female, male, male, female, male, male, female, female, male, female, male, male, female, female, male, female, male, male, female, male, female, male, male, male, male, male, male,…
$ Age <dbl> 38.00000, 54.00000, 20.00000, 55.00000, 2.00000, 34.00000, 15.00000, 38.00000, 29.93737, 3.00000, 29.93737, 21.00000, 29.00000, 21.00000, 28.50000, 5.00000, 45.00000, 29.93737, 29.0…
$ Fare <dbl> 71.2833, 51.8625, 8.0500, 16.0000, 29.1250, 13.0000, 8.0292, 31.3875, 7.2292, 41.5792, 8.0500, 7.8000, 26.0000, 10.5000, 7.2292, 27.7500, 83.4750, 15.2458, 10.5000, 8.1583, 7.9250, …
$ FamSize <int> 1, 0, 0, 0, 5, 0, 0, 6, 0, 3, 0, 0, 1, 0, 0, 3, 1, 2, 0, 0, 6, 0, 0, 0, 0, 4, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 6, 2, 1, 0, 0, 1, 0, 2, 0, 0, 0, 0, 1, 0, 0, 1, 5, 2, 5, 0, 5, 0, 4, 0, 6…
13.6 모형 훈련
Boosting은 다수의 약한 학습자(간단하면서 성능이 낮은 예측 모형)을 순차적으로 학습하는 앙상블 기법이다. Boosting의 특징은 이전 모형의 오차를 반영하여 다음 모형을 생성하며, 오차를 개선하는 방향으로 학습을 수행한다.

XGBoost는 Extreme Gradient Boosting의 약어로 Gradient Boosting의 단점을 해결하기 위해 제안되었다.
Package "caret"
은 통합 API를 통해 R로 기계 학습을 실행할 수 있는 매우 실용적인 방법을 제공한다. Package "caret"
에서는 초모수의 최적의 조합을 찾는 방법으로 그리드 검색(Grid Search), 랜덤 검색(Random Search), 직접 탐색 범위 설정이 있다. 여기서는 초모수 eta
, max_depth
, gamma
, colsample_bytree
, min_child_weight
, subsample
, nrounds
의 최적의 조합값을 찾기 위해 그리드 검색을 수행하였고, 이를 기반으로 직접 탐색 범위를 설정하였다. 아래는 그리드 검색을 수행하였을 때 결과이다.

<- trainControl(method = "cv", number = 5, # 5-Fold Cross Validation (5-Fold CV)
fitControl allowParallel = TRUE) # 병렬 처리
set.seed(200) # For CV
<- train(Survived ~ ., data = titanic.trd.Imp,
xgb.fit trControl = fitControl ,
method = "xgbTree")
Caution!
Package "caret"
을 통해 "xgbTree"
를 수행하는 경우, 함수 train(Target ~ 예측 변수, data)
를 사용하면 범주형 예측 변수는 자동적으로 더미 변환이 된다. 범주형 예측 변수에 대해 더미 변환을 수행하고 싶지 않다면 함수 train(x = 예측 변수만 포함하는 데이터셋, y = Target만 포함하는 데이터셋)
를 사용한다.
xgb.fit
eXtreme Gradient Boosting
625 samples
5 predictor
2 classes: 'no', 'yes'
No pre-processing
Resampling: Cross-Validated (5 fold)
Summary of sample sizes: 500, 500, 500, 500, 500
Resampling results across tuning parameters:
eta max_depth colsample_bytree subsample nrounds Accuracy Kappa
0.3 1 0.6 0.50 50 0.7936 0.5532288
0.3 1 0.6 0.50 100 0.7984 0.5670283
0.3 1 0.6 0.50 150 0.8016 0.5750364
0.3 1 0.6 0.75 50 0.8000 0.5683679
0.3 1 0.6 0.75 100 0.7920 0.5530621
0.3 1 0.6 0.75 150 0.7952 0.5595167
0.3 1 0.6 1.00 50 0.8048 0.5824528
0.3 1 0.6 1.00 100 0.7936 0.5567690
0.3 1 0.6 1.00 150 0.7936 0.5583194
0.3 1 0.8 0.50 50 0.7952 0.5588203
0.3 1 0.8 0.50 100 0.7952 0.5606791
0.3 1 0.8 0.50 150 0.7984 0.5678404
0.3 1 0.8 0.75 50 0.8064 0.5838573
0.3 1 0.8 0.75 100 0.7952 0.5598180
0.3 1 0.8 0.75 150 0.7984 0.5680711
0.3 1 0.8 1.00 50 0.8032 0.5776070
0.3 1 0.8 1.00 100 0.7936 0.5567448
0.3 1 0.8 1.00 150 0.7952 0.5608127
0.3 2 0.6 0.50 50 0.7984 0.5659624
0.3 2 0.6 0.50 100 0.8000 0.5681795
0.3 2 0.6 0.50 150 0.7984 0.5655375
0.3 2 0.6 0.75 50 0.8080 0.5840128
0.3 2 0.6 0.75 100 0.8160 0.6017267
0.3 2 0.6 0.75 150 0.8208 0.6148647
0.3 2 0.6 1.00 50 0.7984 0.5642618
0.3 2 0.6 1.00 100 0.8160 0.6050330
0.3 2 0.6 1.00 150 0.8144 0.6035449
0.3 2 0.8 0.50 50 0.8112 0.5928300
0.3 2 0.8 0.50 100 0.8224 0.6163725
0.3 2 0.8 0.50 150 0.8192 0.6108362
0.3 2 0.8 0.75 50 0.8144 0.5988434
0.3 2 0.8 0.75 100 0.8064 0.5841983
0.3 2 0.8 0.75 150 0.8064 0.5869076
0.3 2 0.8 1.00 50 0.8048 0.5784530
0.3 2 0.8 1.00 100 0.8080 0.5916713
0.3 2 0.8 1.00 150 0.8160 0.6074775
0.3 3 0.6 0.50 50 0.8080 0.5855842
0.3 3 0.6 0.50 100 0.7984 0.5677822
0.3 3 0.6 0.50 150 0.7936 0.5622879
0.3 3 0.6 0.75 50 0.8000 0.5690285
0.3 3 0.6 0.75 100 0.8176 0.6067952
0.3 3 0.6 0.75 150 0.8032 0.5768480
0.3 3 0.6 1.00 50 0.8064 0.5846185
0.3 3 0.6 1.00 100 0.8176 0.6114240
0.3 3 0.6 1.00 150 0.8000 0.5742391
0.3 3 0.8 0.50 50 0.7856 0.5416109
0.3 3 0.8 0.50 100 0.8080 0.5909694
0.3 3 0.8 0.50 150 0.8048 0.5833752
0.3 3 0.8 0.75 50 0.8080 0.5848742
0.3 3 0.8 0.75 100 0.8096 0.5922924
0.3 3 0.8 0.75 150 0.8096 0.5944031
0.3 3 0.8 1.00 50 0.8112 0.5955304
0.3 3 0.8 1.00 100 0.8160 0.6095351
0.3 3 0.8 1.00 150 0.8080 0.5923806
0.4 1 0.6 0.50 50 0.7984 0.5654428
0.4 1 0.6 0.50 100 0.7856 0.5417550
0.4 1 0.6 0.50 150 0.7904 0.5519082
0.4 1 0.6 0.75 50 0.7904 0.5527988
0.4 1 0.6 0.75 100 0.7936 0.5588007
0.4 1 0.6 0.75 150 0.8048 0.5850564
0.4 1 0.6 1.00 50 0.8000 0.5708883
0.4 1 0.6 1.00 100 0.7904 0.5508317
0.4 1 0.6 1.00 150 0.7952 0.5638414
0.4 1 0.8 0.50 50 0.7952 0.5596631
0.4 1 0.8 0.50 100 0.7904 0.5501620
0.4 1 0.8 0.50 150 0.7984 0.5681207
0.4 1 0.8 0.75 50 0.7920 0.5567217
0.4 1 0.8 0.75 100 0.8016 0.5713368
0.4 1 0.8 0.75 150 0.7984 0.5700493
0.4 1 0.8 1.00 50 0.8000 0.5706676
0.4 1 0.8 1.00 100 0.7952 0.5627966
0.4 1 0.8 1.00 150 0.7872 0.5463291
0.4 2 0.6 0.50 50 0.8000 0.5686933
0.4 2 0.6 0.50 100 0.8304 0.6363294
0.4 2 0.6 0.50 150 0.8240 0.6250907
0.4 2 0.6 0.75 50 0.8096 0.5875135
0.4 2 0.6 0.75 100 0.8064 0.5837448
0.4 2 0.6 0.75 150 0.8064 0.5859264
0.4 2 0.6 1.00 50 0.7952 0.5598551
0.4 2 0.6 1.00 100 0.8160 0.6065912
0.4 2 0.6 1.00 150 0.8048 0.5836471
0.4 2 0.8 0.50 50 0.7840 0.5373904
0.4 2 0.8 0.50 100 0.8080 0.5909284
0.4 2 0.8 0.50 150 0.7904 0.5520807
0.4 2 0.8 0.75 50 0.8176 0.6082601
0.4 2 0.8 0.75 100 0.8240 0.6230251
0.4 2 0.8 0.75 150 0.8096 0.5938767
0.4 2 0.8 1.00 50 0.8064 0.5852386
0.4 2 0.8 1.00 100 0.8160 0.6091437
0.4 2 0.8 1.00 150 0.8064 0.5882986
0.4 3 0.6 0.50 50 0.8016 0.5800843
0.4 3 0.6 0.50 100 0.8160 0.6076108
0.4 3 0.6 0.50 150 0.7952 0.5644753
0.4 3 0.6 0.75 50 0.8064 0.5851191
0.4 3 0.6 0.75 100 0.8144 0.6048904
0.4 3 0.6 0.75 150 0.8096 0.5969159
0.4 3 0.6 1.00 50 0.8112 0.5967624
0.4 3 0.6 1.00 100 0.8096 0.5948556
0.4 3 0.6 1.00 150 0.8080 0.5925659
0.4 3 0.8 0.50 50 0.8080 0.5919065
0.4 3 0.8 0.50 100 0.7984 0.5685959
0.4 3 0.8 0.50 150 0.7904 0.5575082
0.4 3 0.8 0.75 50 0.8016 0.5746439
0.4 3 0.8 0.75 100 0.8176 0.6118326
0.4 3 0.8 0.75 150 0.8176 0.6124456
0.4 3 0.8 1.00 50 0.8144 0.6005889
0.4 3 0.8 1.00 100 0.7936 0.5584222
0.4 3 0.8 1.00 150 0.7872 0.5478133
Tuning parameter 'gamma' was held constant at a value of 0
Tuning parameter 'min_child_weight' was held constant at a value of 1
Accuracy was used to select the optimal model using the largest value.
The final values used for the model were nrounds = 100, max_depth = 2, eta = 0.4, gamma = 0, colsample_bytree = 0.6, min_child_weight = 1 and subsample = 0.5.
plot(xgb.fit) # Plot
Result!
랜덤하게 결정된 초모수 값을 조합하여 만든 108개의 초모수 조합값 (eta
, max_depth
, gamma
, colsample_bytree
, min_child_weight
, subsample
, nrounds
)에 대한 정확도를 보여주며, (eta
= 0.4, max_depth
= 2, gamma
= 0, colsample_bytree
= 0.6, min_child_weight
= 1, subsample
= 0.5, nrounds
= 100)일 때 정확도가 가장 높은 것을 알 수 있다. 따라서 그리드 검색을 통해 찾은 최적의 초모수 조합값 (eta
= 0.4, max_depth
= 2, gamma
= 0, colsample_bytree
= 0.6, min_child_weight
= 1, subsample
= 0.5, nrounds
= 100) 근처의 값들을 탐색 범위로 설정하여 훈련을 다시 수행할 수 있다.
<- expand.grid(eta = seq(0.3, 0.5, by = 0.1), # eta의 탐색 범위
customGrid max_depth = seq(1, 3, by = 1), # max_depth의 탐색 범위
gamma = seq(0.1, 1, by = 0.5), # gamma의 탐색 범위
colsample_bytree = seq(0.5, 0.7, by = 0.1), # colsample_bytree의 탐색 범위
min_child_weight = seq(1, 2, by = 1), # min_child_weight의 탐색 범위
subsample = seq(0.45, 0.55, by = 0.1), # subsample의 탐색 범위
nrounds = seq(99, 101, by = 1)) # nrounds의 탐색 범위
set.seed(200) # For CV
<- train(Survived ~ ., data = titanic.trd.Imp,
xgb.tune.fit trControl = fitControl ,
method = "xgbTree",
tuneGrid = customGrid)
xgb.tune.fit
eXtreme Gradient Boosting
625 samples
5 predictor
2 classes: 'no', 'yes'
No pre-processing
Resampling: Cross-Validated (5 fold)
Summary of sample sizes: 500, 500, 500, 500, 500
Resampling results across tuning parameters:
eta max_depth gamma colsample_bytree min_child_weight subsample nrounds Accuracy Kappa
0.3 1 0.1 0.5 1 0.45 99 0.8064 0.5838129
0.3 1 0.1 0.5 1 0.45 100 0.8048 0.5814521
0.3 1 0.1 0.5 1 0.45 101 0.8032 0.5774806
0.3 1 0.1 0.5 1 0.55 99 0.8032 0.5775456
0.3 1 0.1 0.5 1 0.55 100 0.8080 0.5881485
0.3 1 0.1 0.5 1 0.55 101 0.8048 0.5801318
0.3 1 0.1 0.5 2 0.45 99 0.8080 0.5877026
0.3 1 0.1 0.5 2 0.45 100 0.8064 0.5837763
0.3 1 0.1 0.5 2 0.45 101 0.8128 0.5982911
0.3 1 0.1 0.5 2 0.55 99 0.8000 0.5711714
0.3 1 0.1 0.5 2 0.55 100 0.8016 0.5743475
0.3 1 0.1 0.5 2 0.55 101 0.7968 0.5657900
0.3 1 0.1 0.6 1 0.45 99 0.8048 0.5834690
0.3 1 0.1 0.6 1 0.45 100 0.8032 0.5813071
0.3 1 0.1 0.6 1 0.45 101 0.8016 0.5772513
0.3 1 0.1 0.6 1 0.55 99 0.8048 0.5792279
0.3 1 0.1 0.6 1 0.55 100 0.8080 0.5846818
0.3 1 0.1 0.6 1 0.55 101 0.8048 0.5800795
0.3 1 0.1 0.6 2 0.45 99 0.7952 0.5604895
0.3 1 0.1 0.6 2 0.45 100 0.7936 0.5553880
0.3 1 0.1 0.6 2 0.45 101 0.7920 0.5536102
0.3 1 0.1 0.6 2 0.55 99 0.7920 0.5529005
0.3 1 0.1 0.6 2 0.55 100 0.7888 0.5460309
0.3 1 0.1 0.6 2 0.55 101 0.7872 0.5438934
0.3 1 0.1 0.7 1 0.45 99 0.8032 0.5753638
0.3 1 0.1 0.7 1 0.45 100 0.8000 0.5679317
0.3 1 0.1 0.7 1 0.45 101 0.8032 0.5744805
0.3 1 0.1 0.7 1 0.55 99 0.7968 0.5628348
0.3 1 0.1 0.7 1 0.55 100 0.7952 0.5588770
0.3 1 0.1 0.7 1 0.55 101 0.7968 0.5619139
0.3 1 0.1 0.7 2 0.45 99 0.7920 0.5530767
0.3 1 0.1 0.7 2 0.45 100 0.7920 0.5527731
0.3 1 0.1 0.7 2 0.45 101 0.7920 0.5525992
0.3 1 0.1 0.7 2 0.55 99 0.7936 0.5540954
0.3 1 0.1 0.7 2 0.55 100 0.8000 0.5666202
0.3 1 0.1 0.7 2 0.55 101 0.8032 0.5739548
0.3 1 0.6 0.5 1 0.45 99 0.7936 0.5564740
0.3 1 0.6 0.5 1 0.45 100 0.7952 0.5581319
0.3 1 0.6 0.5 1 0.45 101 0.7968 0.5605810
0.3 1 0.6 0.5 1 0.55 99 0.7968 0.5633042
0.3 1 0.6 0.5 1 0.55 100 0.8000 0.5719791
0.3 1 0.6 0.5 1 0.55 101 0.7968 0.5641198
0.3 1 0.6 0.5 2 0.45 99 0.8016 0.5737968
0.3 1 0.6 0.5 2 0.45 100 0.7952 0.5585167
0.3 1 0.6 0.5 2 0.45 101 0.7920 0.5504370
0.3 1 0.6 0.5 2 0.55 99 0.8048 0.5790185
0.3 1 0.6 0.5 2 0.55 100 0.8080 0.5852328
0.3 1 0.6 0.5 2 0.55 101 0.8000 0.5681826
0.3 1 0.6 0.6 1 0.45 99 0.7936 0.5563138
0.3 1 0.6 0.6 1 0.45 100 0.7936 0.5569457
0.3 1 0.6 0.6 1 0.45 101 0.7904 0.5508814
0.3 1 0.6 0.6 1 0.55 99 0.7824 0.5305113
0.3 1 0.6 0.6 1 0.55 100 0.7856 0.5374017
0.3 1 0.6 0.6 1 0.55 101 0.7904 0.5474332
0.3 1 0.6 0.6 2 0.45 99 0.8032 0.5756352
0.3 1 0.6 0.6 2 0.45 100 0.8080 0.5844591
0.3 1 0.6 0.6 2 0.45 101 0.8032 0.5747264
0.3 1 0.6 0.6 2 0.55 99 0.7904 0.5503540
0.3 1 0.6 0.6 2 0.55 100 0.7920 0.5527563
0.3 1 0.6 0.6 2 0.55 101 0.7920 0.5511523
0.3 1 0.6 0.7 1 0.45 99 0.7984 0.5650223
0.3 1 0.6 0.7 1 0.45 100 0.8048 0.5805166
0.3 1 0.6 0.7 1 0.45 101 0.8016 0.5747020
0.3 1 0.6 0.7 1 0.55 99 0.7936 0.5594105
0.3 1 0.6 0.7 1 0.55 100 0.7984 0.5674121
0.3 1 0.6 0.7 1 0.55 101 0.7952 0.5596154
0.3 1 0.6 0.7 2 0.45 99 0.8064 0.5827225
0.3 1 0.6 0.7 2 0.45 100 0.8032 0.5757635
0.3 1 0.6 0.7 2 0.45 101 0.7968 0.5641691
0.3 1 0.6 0.7 2 0.55 99 0.8032 0.5760319
0.3 1 0.6 0.7 2 0.55 100 0.8032 0.5759375
0.3 1 0.6 0.7 2 0.55 101 0.8064 0.5836489
0.3 2 0.1 0.5 1 0.45 99 0.7984 0.5659478
0.3 2 0.1 0.5 1 0.45 100 0.8000 0.5700399
0.3 2 0.1 0.5 1 0.45 101 0.7984 0.5646069
0.3 2 0.1 0.5 1 0.55 99 0.8032 0.5767185
0.3 2 0.1 0.5 1 0.55 100 0.8048 0.5810878
0.3 2 0.1 0.5 1 0.55 101 0.8016 0.5721704
0.3 2 0.1 0.5 2 0.45 99 0.8128 0.5953302
0.3 2 0.1 0.5 2 0.45 100 0.8176 0.6047191
0.3 2 0.1 0.5 2 0.45 101 0.8144 0.5969698
0.3 2 0.1 0.5 2 0.55 99 0.8016 0.5733767
0.3 2 0.1 0.5 2 0.55 100 0.8112 0.5946178
0.3 2 0.1 0.5 2 0.55 101 0.8096 0.5907259
0.3 2 0.1 0.6 1 0.45 99 0.8096 0.5936793
0.3 2 0.1 0.6 1 0.45 100 0.8032 0.5783268
0.3 2 0.1 0.6 1 0.45 101 0.8048 0.5820366
0.3 2 0.1 0.6 1 0.55 99 0.8144 0.5984934
0.3 2 0.1 0.6 1 0.55 100 0.8080 0.5845359
0.3 2 0.1 0.6 1 0.55 101 0.8144 0.5978417
0.3 2 0.1 0.6 2 0.45 99 0.8096 0.5899417
0.3 2 0.1 0.6 2 0.45 100 0.8048 0.5774758
0.3 2 0.1 0.6 2 0.45 101 0.8064 0.5823657
0.3 2 0.1 0.6 2 0.55 99 0.8032 0.5752657
0.3 2 0.1 0.6 2 0.55 100 0.8032 0.5759264
0.3 2 0.1 0.6 2 0.55 101 0.8080 0.5847341
0.3 2 0.1 0.7 1 0.45 99 0.8064 0.5824642
0.3 2 0.1 0.7 1 0.45 100 0.8016 0.5732246
0.3 2 0.1 0.7 1 0.45 101 0.8048 0.5791015
0.3 2 0.1 0.7 1 0.55 99 0.8144 0.6017616
0.3 2 0.1 0.7 1 0.55 100 0.8096 0.5906118
0.3 2 0.1 0.7 1 0.55 101 0.8112 0.5930176
0.3 2 0.1 0.7 2 0.45 99 0.8016 0.5705171
0.3 2 0.1 0.7 2 0.45 100 0.8016 0.5693442
0.3 2 0.1 0.7 2 0.45 101 0.7984 0.5639016
0.3 2 0.1 0.7 2 0.55 99 0.8032 0.5785788
0.3 2 0.1 0.7 2 0.55 100 0.8048 0.5823144
0.3 2 0.1 0.7 2 0.55 101 0.8112 0.5972521
0.3 2 0.6 0.5 1 0.45 99 0.8112 0.5959995
0.3 2 0.6 0.5 1 0.45 100 0.8016 0.5752524
0.3 2 0.6 0.5 1 0.45 101 0.8048 0.5823796
0.3 2 0.6 0.5 1 0.55 99 0.8048 0.5797549
0.3 2 0.6 0.5 1 0.55 100 0.8064 0.5841294
0.3 2 0.6 0.5 1 0.55 101 0.8032 0.5767271
0.3 2 0.6 0.5 2 0.45 99 0.8048 0.5787174
0.3 2 0.6 0.5 2 0.45 100 0.8064 0.5823871
0.3 2 0.6 0.5 2 0.45 101 0.8144 0.6004399
0.3 2 0.6 0.5 2 0.55 99 0.8224 0.6144061
0.3 2 0.6 0.5 2 0.55 100 0.8176 0.6050864
0.3 2 0.6 0.5 2 0.55 101 0.8144 0.5971932
0.3 2 0.6 0.6 1 0.45 99 0.8080 0.5874888
0.3 2 0.6 0.6 1 0.45 100 0.8048 0.5812470
0.3 2 0.6 0.6 1 0.45 101 0.8048 0.5812451
0.3 2 0.6 0.6 1 0.55 99 0.8112 0.5957263
0.3 2 0.6 0.6 1 0.55 100 0.8064 0.5839784
0.3 2 0.6 0.6 1 0.55 101 0.8048 0.5799662
0.3 2 0.6 0.6 2 0.45 99 0.8080 0.5881105
0.3 2 0.6 0.6 2 0.45 100 0.8160 0.6046260
0.3 2 0.6 0.6 2 0.45 101 0.8192 0.6111008
0.3 2 0.6 0.6 2 0.55 99 0.8160 0.6028799
0.3 2 0.6 0.6 2 0.55 100 0.8160 0.6018224
0.3 2 0.6 0.6 2 0.55 101 0.8048 0.5770912
0.3 2 0.6 0.7 1 0.45 99 0.8096 0.5903734
0.3 2 0.6 0.7 1 0.45 100 0.8096 0.5895973
0.3 2 0.6 0.7 1 0.45 101 0.8080 0.5859832
0.3 2 0.6 0.7 1 0.55 99 0.8000 0.5726832
0.3 2 0.6 0.7 1 0.55 100 0.8016 0.5755417
0.3 2 0.6 0.7 1 0.55 101 0.8016 0.5761515
0.3 2 0.6 0.7 2 0.45 99 0.8192 0.6111638
0.3 2 0.6 0.7 2 0.45 100 0.8240 0.6204912
0.3 2 0.6 0.7 2 0.45 101 0.8128 0.5987658
0.3 2 0.6 0.7 2 0.55 99 0.8128 0.5956023
0.3 2 0.6 0.7 2 0.55 100 0.8096 0.5894324
0.3 2 0.6 0.7 2 0.55 101 0.8160 0.6027735
0.3 3 0.1 0.5 1 0.45 99 0.8048 0.5839084
0.3 3 0.1 0.5 1 0.45 100 0.8112 0.5978148
0.3 3 0.1 0.5 1 0.45 101 0.8096 0.5960775
0.3 3 0.1 0.5 1 0.55 99 0.8096 0.5926541
0.3 3 0.1 0.5 1 0.55 100 0.8096 0.5950085
0.3 3 0.1 0.5 1 0.55 101 0.8128 0.6005444
0.3 3 0.1 0.5 2 0.45 99 0.8208 0.6162723
0.3 3 0.1 0.5 2 0.45 100 0.8080 0.5888067
0.3 3 0.1 0.5 2 0.45 101 0.8096 0.5919595
0.3 3 0.1 0.5 2 0.55 99 0.8144 0.6041075
0.3 3 0.1 0.5 2 0.55 100 0.8176 0.6115186
0.3 3 0.1 0.5 2 0.55 101 0.8160 0.6075260
0.3 3 0.1 0.6 1 0.45 99 0.8112 0.5932933
0.3 3 0.1 0.6 1 0.45 100 0.8144 0.5997478
0.3 3 0.1 0.6 1 0.45 101 0.8080 0.5858388
0.3 3 0.1 0.6 1 0.55 99 0.8160 0.6042108
0.3 3 0.1 0.6 1 0.55 100 0.8224 0.6173801
0.3 3 0.1 0.6 1 0.55 101 0.8208 0.6146349
0.3 3 0.1 0.6 2 0.45 99 0.8096 0.5937302
0.3 3 0.1 0.6 2 0.45 100 0.8064 0.5869546
0.3 3 0.1 0.6 2 0.45 101 0.8032 0.5790146
0.3 3 0.1 0.6 2 0.55 99 0.8208 0.6175102
0.3 3 0.1 0.6 2 0.55 100 0.8192 0.6143484
0.3 3 0.1 0.6 2 0.55 101 0.8208 0.6182049
0.3 3 0.1 0.7 1 0.45 99 0.8144 0.6017376
0.3 3 0.1 0.7 1 0.45 100 0.8096 0.5918062
0.3 3 0.1 0.7 1 0.45 101 0.8096 0.5915797
0.3 3 0.1 0.7 1 0.55 99 0.8048 0.5841298
0.3 3 0.1 0.7 1 0.55 100 0.8080 0.5911333
0.3 3 0.1 0.7 1 0.55 101 0.8032 0.5807817
0.3 3 0.1 0.7 2 0.45 99 0.8048 0.5795917
0.3 3 0.1 0.7 2 0.45 100 0.8048 0.5816915
0.3 3 0.1 0.7 2 0.45 101 0.8064 0.5855839
0.3 3 0.1 0.7 2 0.55 99 0.8064 0.5860909
0.3 3 0.1 0.7 2 0.55 100 0.8080 0.5893588
0.3 3 0.1 0.7 2 0.55 101 0.8080 0.5898876
0.3 3 0.6 0.5 1 0.45 99 0.8112 0.5979589
0.3 3 0.6 0.5 1 0.45 100 0.8112 0.5987583
0.3 3 0.6 0.5 1 0.45 101 0.8080 0.5910979
0.3 3 0.6 0.5 1 0.55 99 0.8016 0.5724942
0.3 3 0.6 0.5 1 0.55 100 0.8144 0.6017634
0.3 3 0.6 0.5 1 0.55 101 0.8112 0.5947893
0.3 3 0.6 0.5 2 0.45 99 0.8224 0.6170823
0.3 3 0.6 0.5 2 0.45 100 0.8272 0.6281251
0.3 3 0.6 0.5 2 0.45 101 0.8256 0.6256626
0.3 3 0.6 0.5 2 0.55 99 0.8144 0.6007998
0.3 3 0.6 0.5 2 0.55 100 0.8064 0.5833309
0.3 3 0.6 0.5 2 0.55 101 0.7984 0.5666751
0.3 3 0.6 0.6 1 0.45 99 0.8048 0.5810873
0.3 3 0.6 0.6 1 0.45 100 0.8080 0.5917270
0.3 3 0.6 0.6 1 0.45 101 0.8112 0.5953732
0.3 3 0.6 0.6 1 0.55 99 0.8032 0.5777434
0.3 3 0.6 0.6 1 0.55 100 0.8032 0.5800519
0.3 3 0.6 0.6 1 0.55 101 0.8048 0.5843351
0.3 3 0.6 0.6 2 0.45 99 0.8160 0.6065282
0.3 3 0.6 0.6 2 0.45 100 0.8208 0.6165114
0.3 3 0.6 0.6 2 0.45 101 0.8160 0.6058498
0.3 3 0.6 0.6 2 0.55 99 0.8048 0.5828182
0.3 3 0.6 0.6 2 0.55 100 0.8160 0.6055006
0.3 3 0.6 0.6 2 0.55 101 0.8080 0.5882109
0.3 3 0.6 0.7 1 0.45 99 0.8080 0.5888975
0.3 3 0.6 0.7 1 0.45 100 0.8016 0.5762706
0.3 3 0.6 0.7 1 0.45 101 0.8032 0.5794309
0.3 3 0.6 0.7 1 0.55 99 0.8048 0.5821583
0.3 3 0.6 0.7 1 0.55 100 0.8048 0.5811598
0.3 3 0.6 0.7 1 0.55 101 0.8112 0.5944848
0.3 3 0.6 0.7 2 0.45 99 0.8064 0.5824673
0.3 3 0.6 0.7 2 0.45 100 0.8096 0.5918120
0.3 3 0.6 0.7 2 0.45 101 0.8080 0.5913199
0.3 3 0.6 0.7 2 0.55 99 0.8080 0.5889894
0.3 3 0.6 0.7 2 0.55 100 0.8016 0.5747475
0.3 3 0.6 0.7 2 0.55 101 0.8048 0.5802493
0.4 1 0.1 0.5 1 0.45 99 0.7968 0.5648428
0.4 1 0.1 0.5 1 0.45 100 0.7968 0.5665363
0.4 1 0.1 0.5 1 0.45 101 0.8016 0.5755251
0.4 1 0.1 0.5 1 0.55 99 0.8000 0.5712934
0.4 1 0.1 0.5 1 0.55 100 0.7968 0.5633200
0.4 1 0.1 0.5 1 0.55 101 0.7984 0.5667448
0.4 1 0.1 0.5 2 0.45 99 0.8064 0.5874486
0.4 1 0.1 0.5 2 0.45 100 0.8112 0.5960756
0.4 1 0.1 0.5 2 0.45 101 0.8160 0.6047659
0.4 1 0.1 0.5 2 0.55 99 0.7888 0.5478920
0.4 1 0.1 0.5 2 0.55 100 0.7936 0.5580187
0.4 1 0.1 0.5 2 0.55 101 0.7888 0.5488473
0.4 1 0.1 0.6 1 0.45 99 0.7984 0.5661084
0.4 1 0.1 0.6 1 0.45 100 0.8032 0.5749515
0.4 1 0.1 0.6 1 0.45 101 0.7968 0.5617222
0.4 1 0.1 0.6 1 0.55 99 0.7984 0.5681274
0.4 1 0.1 0.6 1 0.55 100 0.8000 0.5734568
0.4 1 0.1 0.6 1 0.55 101 0.8032 0.5793127
0.4 1 0.1 0.6 2 0.45 99 0.7952 0.5575754
0.4 1 0.1 0.6 2 0.45 100 0.7856 0.5373404
0.4 1 0.1 0.6 2 0.45 101 0.7840 0.5339368
0.4 1 0.1 0.6 2 0.55 99 0.7888 0.5471573
0.4 1 0.1 0.6 2 0.55 100 0.7888 0.5465656
0.4 1 0.1 0.6 2 0.55 101 0.7904 0.5513458
0.4 1 0.1 0.7 1 0.45 99 0.7888 0.5451314
0.4 1 0.1 0.7 1 0.45 100 0.7888 0.5477034
0.4 1 0.1 0.7 1 0.45 101 0.7936 0.5564137
0.4 1 0.1 0.7 1 0.55 99 0.7888 0.5441428
0.4 1 0.1 0.7 1 0.55 100 0.7888 0.5430693
0.4 1 0.1 0.7 1 0.55 101 0.7952 0.5585235
0.4 1 0.1 0.7 2 0.45 99 0.8096 0.5879311
0.4 1 0.1 0.7 2 0.45 100 0.8080 0.5836482
0.4 1 0.1 0.7 2 0.45 101 0.8064 0.5812550
0.4 1 0.1 0.7 2 0.55 99 0.8048 0.5825363
0.4 1 0.1 0.7 2 0.55 100 0.8096 0.5932939
0.4 1 0.1 0.7 2 0.55 101 0.8112 0.5960429
0.4 1 0.6 0.5 1 0.45 99 0.8032 0.5791064
0.4 1 0.6 0.5 1 0.45 100 0.8064 0.5855774
0.4 1 0.6 0.5 1 0.45 101 0.8000 0.5717159
0.4 1 0.6 0.5 1 0.55 99 0.8016 0.5710198
0.4 1 0.6 0.5 1 0.55 100 0.7920 0.5480072
0.4 1 0.6 0.5 1 0.55 101 0.7920 0.5502826
0.4 1 0.6 0.5 2 0.45 99 0.7936 0.5640744
0.4 1 0.6 0.5 2 0.45 100 0.7968 0.5681072
0.4 1 0.6 0.5 2 0.45 101 0.7968 0.5704825
0.4 1 0.6 0.5 2 0.55 99 0.8096 0.5930286
0.4 1 0.6 0.5 2 0.55 100 0.8080 0.5897734
0.4 1 0.6 0.5 2 0.55 101 0.8064 0.5879978
0.4 1 0.6 0.6 1 0.45 99 0.8064 0.5861577
0.4 1 0.6 0.6 1 0.45 100 0.8048 0.5815993
0.4 1 0.6 0.6 1 0.45 101 0.7984 0.5657947
0.4 1 0.6 0.6 1 0.55 99 0.7968 0.5624708
0.4 1 0.6 0.6 1 0.55 100 0.7984 0.5658045
0.4 1 0.6 0.6 1 0.55 101 0.7936 0.5576578
0.4 1 0.6 0.6 2 0.45 99 0.8032 0.5763327
0.4 1 0.6 0.6 2 0.45 100 0.7968 0.5628419
0.4 1 0.6 0.6 2 0.45 101 0.7936 0.5553905
0.4 1 0.6 0.6 2 0.55 99 0.8112 0.5930763
0.4 1 0.6 0.6 2 0.55 100 0.8048 0.5764562
0.4 1 0.6 0.6 2 0.55 101 0.8080 0.5844435
0.4 1 0.6 0.7 1 0.45 99 0.7872 0.5397521
0.4 1 0.6 0.7 1 0.45 100 0.7872 0.5404324
0.4 1 0.6 0.7 1 0.45 101 0.7936 0.5536214
0.4 1 0.6 0.7 1 0.55 99 0.7936 0.5583798
0.4 1 0.6 0.7 1 0.55 100 0.7920 0.5563395
0.4 1 0.6 0.7 1 0.55 101 0.8000 0.5725743
0.4 1 0.6 0.7 2 0.45 99 0.8048 0.5785592
0.4 1 0.6 0.7 2 0.45 100 0.8000 0.5696905
0.4 1 0.6 0.7 2 0.45 101 0.8000 0.5692470
0.4 1 0.6 0.7 2 0.55 99 0.8032 0.5770293
0.4 1 0.6 0.7 2 0.55 100 0.7984 0.5663165
0.4 1 0.6 0.7 2 0.55 101 0.8000 0.5709361
0.4 2 0.1 0.5 1 0.45 99 0.8144 0.5982400
0.4 2 0.1 0.5 1 0.45 100 0.8208 0.6150196
0.4 2 0.1 0.5 1 0.45 101 0.8256 0.6255417
0.4 2 0.1 0.5 1 0.55 99 0.8240 0.6220474
0.4 2 0.1 0.5 1 0.55 100 0.8256 0.6249393
0.4 2 0.1 0.5 1 0.55 101 0.8224 0.6173992
0.4 2 0.1 0.5 2 0.45 99 0.8208 0.6127408
0.4 2 0.1 0.5 2 0.45 100 0.8160 0.6039160
0.4 2 0.1 0.5 2 0.45 101 0.8096 0.5890128
0.4 2 0.1 0.5 2 0.55 99 0.8048 0.5773155
0.4 2 0.1 0.5 2 0.55 100 0.8208 0.6129043
0.4 2 0.1 0.5 2 0.55 101 0.8160 0.6023447
0.4 2 0.1 0.6 1 0.45 99 0.8080 0.5899733
0.4 2 0.1 0.6 1 0.45 100 0.8048 0.5817091
0.4 2 0.1 0.6 1 0.45 101 0.8016 0.5742258
0.4 2 0.1 0.6 1 0.55 99 0.8144 0.6012797
0.4 2 0.1 0.6 1 0.55 100 0.8192 0.6116334
0.4 2 0.1 0.6 1 0.55 101 0.8128 0.5997723
0.4 2 0.1 0.6 2 0.45 99 0.8096 0.5919074
0.4 2 0.1 0.6 2 0.45 100 0.8112 0.5950910
0.4 2 0.1 0.6 2 0.45 101 0.8128 0.5988078
0.4 2 0.1 0.6 2 0.55 99 0.8032 0.5767340
0.4 2 0.1 0.6 2 0.55 100 0.8096 0.5904993
0.4 2 0.1 0.6 2 0.55 101 0.8128 0.5967108
0.4 2 0.1 0.7 1 0.45 99 0.7984 0.5715282
0.4 2 0.1 0.7 1 0.45 100 0.7936 0.5603396
0.4 2 0.1 0.7 1 0.45 101 0.8000 0.5733208
0.4 2 0.1 0.7 1 0.55 99 0.8080 0.5900799
0.4 2 0.1 0.7 1 0.55 100 0.8032 0.5788321
0.4 2 0.1 0.7 1 0.55 101 0.8048 0.5818428
0.4 2 0.1 0.7 2 0.45 99 0.7936 0.5570264
0.4 2 0.1 0.7 2 0.45 100 0.7968 0.5604493
0.4 2 0.1 0.7 2 0.45 101 0.7952 0.5579014
0.4 2 0.1 0.7 2 0.55 99 0.8112 0.5972013
0.4 2 0.1 0.7 2 0.55 100 0.8096 0.5934287
0.4 2 0.1 0.7 2 0.55 101 0.8032 0.5803223
0.4 2 0.6 0.5 1 0.45 99 0.7888 0.5469905
0.4 2 0.6 0.5 1 0.45 100 0.7872 0.5441030
0.4 2 0.6 0.5 1 0.45 101 0.7904 0.5501000
0.4 2 0.6 0.5 1 0.55 99 0.8112 0.5916252
0.4 2 0.6 0.5 1 0.55 100 0.8128 0.5968697
0.4 2 0.6 0.5 1 0.55 101 0.8112 0.5917534
0.4 2 0.6 0.5 2 0.45 99 0.8032 0.5797886
0.4 2 0.6 0.5 2 0.45 100 0.8160 0.6050679
0.4 2 0.6 0.5 2 0.45 101 0.8112 0.5937308
0.4 2 0.6 0.5 2 0.55 99 0.8144 0.6012854
0.4 2 0.6 0.5 2 0.55 100 0.8144 0.5998335
0.4 2 0.6 0.5 2 0.55 101 0.8112 0.5938388
0.4 2 0.6 0.6 1 0.45 99 0.8000 0.5725380
0.4 2 0.6 0.6 1 0.45 100 0.8048 0.5831854
0.4 2 0.6 0.6 1 0.45 101 0.8048 0.5830149
0.4 2 0.6 0.6 1 0.55 99 0.8032 0.5765950
0.4 2 0.6 0.6 1 0.55 100 0.7968 0.5649878
0.4 2 0.6 0.6 1 0.55 101 0.8000 0.5707581
0.4 2 0.6 0.6 2 0.45 99 0.8144 0.5992945
0.4 2 0.6 0.6 2 0.45 100 0.8112 0.5918309
0.4 2 0.6 0.6 2 0.45 101 0.8128 0.5972461
0.4 2 0.6 0.6 2 0.55 99 0.8048 0.5762582
0.4 2 0.6 0.6 2 0.55 100 0.8144 0.5979076
0.4 2 0.6 0.6 2 0.55 101 0.8224 0.6136496
0.4 2 0.6 0.7 1 0.45 99 0.8128 0.5978036
0.4 2 0.6 0.7 1 0.45 100 0.7984 0.5679823
0.4 2 0.6 0.7 1 0.45 101 0.8016 0.5727478
0.4 2 0.6 0.7 1 0.55 99 0.8160 0.6089290
0.4 2 0.6 0.7 1 0.55 100 0.8160 0.6083179
0.4 2 0.6 0.7 1 0.55 101 0.8064 0.5864786
0.4 2 0.6 0.7 2 0.45 99 0.8080 0.5879538
0.4 2 0.6 0.7 2 0.45 100 0.8160 0.6046354
0.4 2 0.6 0.7 2 0.45 101 0.8144 0.6030495
0.4 2 0.6 0.7 2 0.55 99 0.8256 0.6236890
0.4 2 0.6 0.7 2 0.55 100 0.8256 0.6254991
0.4 2 0.6 0.7 2 0.55 101 0.8304 0.6359992
0.4 3 0.1 0.5 1 0.45 99 0.8000 0.5739537
0.4 3 0.1 0.5 1 0.45 100 0.7952 0.5667440
0.4 3 0.1 0.5 1 0.45 101 0.8048 0.5855250
0.4 3 0.1 0.5 1 0.55 99 0.8144 0.6046439
0.4 3 0.1 0.5 1 0.55 100 0.8128 0.6012938
0.4 3 0.1 0.5 1 0.55 101 0.8064 0.5870635
0.4 3 0.1 0.5 2 0.45 99 0.8224 0.6157011
0.4 3 0.1 0.5 2 0.45 100 0.8192 0.6088574
0.4 3 0.1 0.5 2 0.45 101 0.8256 0.6234331
0.4 3 0.1 0.5 2 0.55 99 0.8112 0.5966845
0.4 3 0.1 0.5 2 0.55 100 0.8032 0.5825946
0.4 3 0.1 0.5 2 0.55 101 0.8080 0.5945848
0.4 3 0.1 0.6 1 0.45 99 0.7984 0.5700555
0.4 3 0.1 0.6 1 0.45 100 0.7984 0.5691527
0.4 3 0.1 0.6 1 0.45 101 0.7984 0.5711980
0.4 3 0.1 0.6 1 0.55 99 0.7936 0.5576469
0.4 3 0.1 0.6 1 0.55 100 0.7984 0.5700712
0.4 3 0.1 0.6 1 0.55 101 0.7984 0.5697263
0.4 3 0.1 0.6 2 0.45 99 0.7952 0.5613850
0.4 3 0.1 0.6 2 0.45 100 0.7984 0.5695710
0.4 3 0.1 0.6 2 0.45 101 0.8080 0.5884539
0.4 3 0.1 0.6 2 0.55 99 0.8000 0.5756429
0.4 3 0.1 0.6 2 0.55 100 0.7920 0.5581939
0.4 3 0.1 0.6 2 0.55 101 0.7952 0.5640903
0.4 3 0.1 0.7 1 0.45 99 0.8080 0.5867722
0.4 3 0.1 0.7 1 0.45 100 0.8096 0.5901813
0.4 3 0.1 0.7 1 0.45 101 0.8080 0.5894819
0.4 3 0.1 0.7 1 0.55 99 0.7952 0.5616378
0.4 3 0.1 0.7 1 0.55 100 0.7984 0.5702454
0.4 3 0.1 0.7 1 0.55 101 0.7984 0.5692306
0.4 3 0.1 0.7 2 0.45 99 0.8192 0.6144148
0.4 3 0.1 0.7 2 0.45 100 0.8144 0.6045889
0.4 3 0.1 0.7 2 0.45 101 0.8176 0.6116084
0.4 3 0.1 0.7 2 0.55 99 0.8016 0.5733535
0.4 3 0.1 0.7 2 0.55 100 0.7968 0.5634136
0.4 3 0.1 0.7 2 0.55 101 0.7984 0.5674154
0.4 3 0.6 0.5 1 0.45 99 0.7920 0.5543971
0.4 3 0.6 0.5 1 0.45 100 0.7920 0.5539615
0.4 3 0.6 0.5 1 0.45 101 0.8064 0.5862700
0.4 3 0.6 0.5 1 0.55 99 0.7984 0.5698990
0.4 3 0.6 0.5 1 0.55 100 0.7968 0.5668926
0.4 3 0.6 0.5 1 0.55 101 0.8000 0.5728400
0.4 3 0.6 0.5 2 0.45 99 0.8080 0.5864349
0.4 3 0.6 0.5 2 0.45 100 0.8048 0.5801866
0.4 3 0.6 0.5 2 0.45 101 0.7920 0.5536591
0.4 3 0.6 0.5 2 0.55 99 0.8000 0.5712036
0.4 3 0.6 0.5 2 0.55 100 0.8080 0.5865977
0.4 3 0.6 0.5 2 0.55 101 0.8064 0.5840325
0.4 3 0.6 0.6 1 0.45 99 0.7904 0.5543360
0.4 3 0.6 0.6 1 0.45 100 0.7888 0.5471223
0.4 3 0.6 0.6 1 0.45 101 0.8016 0.5767752
0.4 3 0.6 0.6 1 0.55 99 0.8000 0.5701150
0.4 3 0.6 0.6 1 0.55 100 0.7968 0.5624274
0.4 3 0.6 0.6 1 0.55 101 0.7984 0.5650349
0.4 3 0.6 0.6 2 0.45 99 0.7984 0.5649638
0.4 3 0.6 0.6 2 0.45 100 0.7936 0.5564593
0.4 3 0.6 0.6 2 0.45 101 0.8000 0.5711640
0.4 3 0.6 0.6 2 0.55 99 0.8048 0.5835703
0.4 3 0.6 0.6 2 0.55 100 0.8144 0.6044045
0.4 3 0.6 0.6 2 0.55 101 0.8096 0.5950353
0.4 3 0.6 0.7 1 0.45 99 0.8016 0.5772039
0.4 3 0.6 0.7 1 0.45 100 0.8048 0.5851602
0.4 3 0.6 0.7 1 0.45 101 0.7936 0.5619635
0.4 3 0.6 0.7 1 0.55 99 0.8064 0.5895548
0.4 3 0.6 0.7 1 0.55 100 0.8096 0.5966714
0.4 3 0.6 0.7 1 0.55 101 0.8096 0.5970981
0.4 3 0.6 0.7 2 0.45 99 0.8000 0.5720680
0.4 3 0.6 0.7 2 0.45 100 0.7952 0.5613857
0.4 3 0.6 0.7 2 0.45 101 0.8000 0.5705116
0.4 3 0.6 0.7 2 0.55 99 0.8000 0.5739845
0.4 3 0.6 0.7 2 0.55 100 0.7936 0.5613344
0.4 3 0.6 0.7 2 0.55 101 0.7968 0.5670451
0.5 1 0.1 0.5 1 0.45 99 0.8032 0.5763495
0.5 1 0.1 0.5 1 0.45 100 0.7936 0.5551323
0.5 1 0.1 0.5 1 0.45 101 0.7840 0.5338263
0.5 1 0.1 0.5 1 0.55 99 0.7968 0.5661053
0.5 1 0.1 0.5 1 0.55 100 0.7968 0.5662083
0.5 1 0.1 0.5 1 0.55 101 0.8048 0.5832938
0.5 1 0.1 0.5 2 0.45 99 0.7936 0.5567226
0.5 1 0.1 0.5 2 0.45 100 0.7952 0.5629058
0.5 1 0.1 0.5 2 0.45 101 0.7968 0.5654417
0.5 1 0.1 0.5 2 0.55 99 0.8064 0.5850504
0.5 1 0.1 0.5 2 0.55 100 0.8080 0.5890217
0.5 1 0.1 0.5 2 0.55 101 0.7984 0.5692429
0.5 1 0.1 0.6 1 0.45 99 0.7968 0.5628308
0.5 1 0.1 0.6 1 0.45 100 0.7968 0.5635390
0.5 1 0.1 0.6 1 0.45 101 0.7904 0.5492666
0.5 1 0.1 0.6 1 0.55 99 0.8064 0.5848651
0.5 1 0.1 0.6 1 0.55 100 0.8096 0.5908818
0.5 1 0.1 0.6 1 0.55 101 0.8096 0.5909583
0.5 1 0.1 0.6 2 0.45 99 0.8144 0.5998746
0.5 1 0.1 0.6 2 0.45 100 0.8112 0.5942978
0.5 1 0.1 0.6 2 0.45 101 0.8128 0.5990446
0.5 1 0.1 0.6 2 0.55 99 0.7904 0.5435422
0.5 1 0.1 0.6 2 0.55 100 0.7936 0.5547012
0.5 1 0.1 0.6 2 0.55 101 0.7952 0.5572543
0.5 1 0.1 0.7 1 0.45 99 0.8048 0.5867834
0.5 1 0.1 0.7 1 0.45 100 0.8032 0.5816360
0.5 1 0.1 0.7 1 0.45 101 0.8032 0.5814908
0.5 1 0.1 0.7 1 0.55 99 0.7856 0.5393567
0.5 1 0.1 0.7 1 0.55 100 0.7792 0.5255469
0.5 1 0.1 0.7 1 0.55 101 0.7808 0.5293043
0.5 1 0.1 0.7 2 0.45 99 0.8128 0.5976354
0.5 1 0.1 0.7 2 0.45 100 0.8144 0.5986222
0.5 1 0.1 0.7 2 0.45 101 0.8144 0.6011537
0.5 1 0.1 0.7 2 0.55 99 0.8032 0.5783321
0.5 1 0.1 0.7 2 0.55 100 0.7968 0.5642749
0.5 1 0.1 0.7 2 0.55 101 0.7952 0.5634869
0.5 1 0.6 0.5 1 0.45 99 0.8000 0.5725969
0.5 1 0.6 0.5 1 0.45 100 0.8000 0.5744279
0.5 1 0.6 0.5 1 0.45 101 0.8032 0.5826629
0.5 1 0.6 0.5 1 0.55 99 0.7840 0.5400600
0.5 1 0.6 0.5 1 0.55 100 0.7888 0.5497872
0.5 1 0.6 0.5 1 0.55 101 0.7872 0.5444276
0.5 1 0.6 0.5 2 0.45 99 0.7984 0.5672880
0.5 1 0.6 0.5 2 0.45 100 0.7968 0.5626342
0.5 1 0.6 0.5 2 0.45 101 0.8000 0.5714996
0.5 1 0.6 0.5 2 0.55 99 0.7920 0.5531878
0.5 1 0.6 0.5 2 0.55 100 0.7984 0.5700373
0.5 1 0.6 0.5 2 0.55 101 0.7904 0.5533853
0.5 1 0.6 0.6 1 0.45 99 0.8000 0.5698651
0.5 1 0.6 0.6 1 0.45 100 0.7872 0.5435998
0.5 1 0.6 0.6 1 0.45 101 0.8064 0.5832235
0.5 1 0.6 0.6 1 0.55 99 0.7968 0.5675827
0.5 1 0.6 0.6 1 0.55 100 0.8000 0.5731900
0.5 1 0.6 0.6 1 0.55 101 0.8016 0.5752530
0.5 1 0.6 0.6 2 0.45 99 0.8048 0.5859595
0.5 1 0.6 0.6 2 0.45 100 0.8048 0.5871449
0.5 1 0.6 0.6 2 0.45 101 0.8080 0.5920804
0.5 1 0.6 0.6 2 0.55 99 0.8096 0.5921209
0.5 1 0.6 0.6 2 0.55 100 0.8064 0.5822818
0.5 1 0.6 0.6 2 0.55 101 0.8080 0.5858828
0.5 1 0.6 0.7 1 0.45 99 0.7920 0.5532564
0.5 1 0.6 0.7 1 0.45 100 0.7952 0.5614666
0.5 1 0.6 0.7 1 0.45 101 0.8000 0.5733456
0.5 1 0.6 0.7 1 0.55 99 0.8192 0.6140360
0.5 1 0.6 0.7 1 0.55 100 0.8144 0.6036613
0.5 1 0.6 0.7 1 0.55 101 0.8192 0.6146661
0.5 1 0.6 0.7 2 0.45 99 0.8048 0.5786991
0.5 1 0.6 0.7 2 0.45 100 0.8080 0.5849150
0.5 1 0.6 0.7 2 0.45 101 0.7952 0.5598728
0.5 1 0.6 0.7 2 0.55 99 0.8016 0.5749519
0.5 1 0.6 0.7 2 0.55 100 0.7968 0.5653279
0.5 1 0.6 0.7 2 0.55 101 0.7968 0.5645514
0.5 2 0.1 0.5 1 0.45 99 0.8128 0.5989748
0.5 2 0.1 0.5 1 0.45 100 0.8096 0.5935491
0.5 2 0.1 0.5 1 0.45 101 0.8144 0.6031455
0.5 2 0.1 0.5 1 0.55 99 0.8144 0.6027193
0.5 2 0.1 0.5 1 0.55 100 0.8064 0.5853639
0.5 2 0.1 0.5 1 0.55 101 0.8112 0.5950285
0.5 2 0.1 0.5 2 0.45 99 0.8048 0.5845094
0.5 2 0.1 0.5 2 0.45 100 0.7984 0.5713613
0.5 2 0.1 0.5 2 0.45 101 0.8096 0.5956802
0.5 2 0.1 0.5 2 0.55 99 0.8048 0.5799867
0.5 2 0.1 0.5 2 0.55 100 0.8048 0.5800400
0.5 2 0.1 0.5 2 0.55 101 0.8032 0.5769708
0.5 2 0.1 0.6 1 0.45 99 0.7968 0.5632408
0.5 2 0.1 0.6 1 0.45 100 0.8016 0.5732302
0.5 2 0.1 0.6 1 0.45 101 0.8096 0.5909158
0.5 2 0.1 0.6 1 0.55 99 0.7968 0.5649963
0.5 2 0.1 0.6 1 0.55 100 0.8032 0.5814555
0.5 2 0.1 0.6 1 0.55 101 0.8096 0.5939723
0.5 2 0.1 0.6 2 0.45 99 0.8000 0.5687592
0.5 2 0.1 0.6 2 0.45 100 0.8000 0.5683392
0.5 2 0.1 0.6 2 0.45 101 0.8032 0.5736847
0.5 2 0.1 0.6 2 0.55 99 0.8176 0.6083516
0.5 2 0.1 0.6 2 0.55 100 0.8176 0.6090571
0.5 2 0.1 0.6 2 0.55 101 0.8192 0.6131141
0.5 2 0.1 0.7 1 0.45 99 0.8080 0.5868709
0.5 2 0.1 0.7 1 0.45 100 0.8032 0.5758353
0.5 2 0.1 0.7 1 0.45 101 0.8032 0.5771500
0.5 2 0.1 0.7 1 0.55 99 0.8048 0.5822481
0.5 2 0.1 0.7 1 0.55 100 0.8032 0.5804910
0.5 2 0.1 0.7 1 0.55 101 0.8000 0.5730032
0.5 2 0.1 0.7 2 0.45 99 0.7984 0.5707673
0.5 2 0.1 0.7 2 0.45 100 0.7952 0.5629595
0.5 2 0.1 0.7 2 0.45 101 0.7920 0.5529894
0.5 2 0.1 0.7 2 0.55 99 0.8080 0.5901246
0.5 2 0.1 0.7 2 0.55 100 0.8064 0.5860440
0.5 2 0.1 0.7 2 0.55 101 0.8000 0.5730932
0.5 2 0.6 0.5 1 0.45 99 0.7968 0.5673774
0.5 2 0.6 0.5 1 0.45 100 0.7968 0.5681916
0.5 2 0.6 0.5 1 0.45 101 0.7984 0.5725797
0.5 2 0.6 0.5 1 0.55 99 0.7952 0.5640566
0.5 2 0.6 0.5 1 0.55 100 0.7936 0.5585354
0.5 2 0.6 0.5 1 0.55 101 0.7888 0.5467925
0.5 2 0.6 0.5 2 0.45 99 0.7936 0.5542431
0.5 2 0.6 0.5 2 0.45 100 0.7952 0.5608133
0.5 2 0.6 0.5 2 0.45 101 0.7952 0.5575078
0.5 2 0.6 0.5 2 0.55 99 0.8160 0.6039908
0.5 2 0.6 0.5 2 0.55 100 0.8096 0.5920139
0.5 2 0.6 0.5 2 0.55 101 0.8208 0.6171648
0.5 2 0.6 0.6 1 0.45 99 0.8112 0.5949845
0.5 2 0.6 0.6 1 0.45 100 0.8000 0.5693836
0.5 2 0.6 0.6 1 0.45 101 0.8064 0.5845865
0.5 2 0.6 0.6 1 0.55 99 0.8016 0.5750297
0.5 2 0.6 0.6 1 0.55 100 0.8000 0.5733590
0.5 2 0.6 0.6 1 0.55 101 0.8032 0.5804208
0.5 2 0.6 0.6 2 0.45 99 0.8032 0.5786274
0.5 2 0.6 0.6 2 0.45 100 0.8096 0.5930928
0.5 2 0.6 0.6 2 0.45 101 0.8096 0.5933705
0.5 2 0.6 0.6 2 0.55 99 0.8000 0.5716383
0.5 2 0.6 0.6 2 0.55 100 0.7968 0.5658482
0.5 2 0.6 0.6 2 0.55 101 0.7952 0.5640643
0.5 2 0.6 0.7 1 0.45 99 0.7952 0.5666841
0.5 2 0.6 0.7 1 0.45 100 0.7936 0.5628605
0.5 2 0.6 0.7 1 0.45 101 0.7888 0.5512151
0.5 2 0.6 0.7 1 0.55 99 0.8064 0.5841222
0.5 2 0.6 0.7 1 0.55 100 0.8096 0.5910420
0.5 2 0.6 0.7 1 0.55 101 0.8032 0.5792084
0.5 2 0.6 0.7 2 0.45 99 0.8160 0.6048872
0.5 2 0.6 0.7 2 0.45 100 0.8144 0.5980613
0.5 2 0.6 0.7 2 0.45 101 0.8176 0.6065533
0.5 2 0.6 0.7 2 0.55 99 0.8176 0.6101146
0.5 2 0.6 0.7 2 0.55 100 0.8048 0.5840427
0.5 2 0.6 0.7 2 0.55 101 0.8080 0.5914939
0.5 3 0.1 0.5 1 0.45 99 0.8128 0.5978386
0.5 3 0.1 0.5 1 0.45 100 0.8160 0.6035074
0.5 3 0.1 0.5 1 0.45 101 0.8224 0.6181951
0.5 3 0.1 0.5 1 0.55 99 0.7888 0.5512073
0.5 3 0.1 0.5 1 0.55 100 0.7888 0.5504228
0.5 3 0.1 0.5 1 0.55 101 0.7872 0.5471532
0.5 3 0.1 0.5 2 0.45 99 0.8064 0.5876166
0.5 3 0.1 0.5 2 0.45 100 0.8160 0.6055529
0.5 3 0.1 0.5 2 0.45 101 0.8080 0.5872798
0.5 3 0.1 0.5 2 0.55 99 0.7936 0.5607528
0.5 3 0.1 0.5 2 0.55 100 0.7984 0.5723670
0.5 3 0.1 0.5 2 0.55 101 0.7888 0.5500899
0.5 3 0.1 0.6 1 0.45 99 0.8016 0.5758417
0.5 3 0.1 0.6 1 0.45 100 0.8032 0.5784385
0.5 3 0.1 0.6 1 0.45 101 0.8000 0.5717967
0.5 3 0.1 0.6 1 0.55 99 0.8064 0.5873364
0.5 3 0.1 0.6 1 0.55 100 0.7984 0.5706933
0.5 3 0.1 0.6 1 0.55 101 0.7984 0.5695325
0.5 3 0.1 0.6 2 0.45 99 0.7952 0.5638030
0.5 3 0.1 0.6 2 0.45 100 0.8032 0.5799274
0.5 3 0.1 0.6 2 0.45 101 0.8016 0.5775043
0.5 3 0.1 0.6 2 0.55 99 0.8000 0.5715343
0.5 3 0.1 0.6 2 0.55 100 0.8000 0.5710062
0.5 3 0.1 0.6 2 0.55 101 0.7936 0.5599672
0.5 3 0.1 0.7 1 0.45 99 0.7920 0.5584895
0.5 3 0.1 0.7 1 0.45 100 0.7904 0.5534373
0.5 3 0.1 0.7 1 0.45 101 0.7888 0.5496257
0.5 3 0.1 0.7 1 0.55 99 0.8080 0.5961297
0.5 3 0.1 0.7 1 0.55 100 0.8128 0.6040817
0.5 3 0.1 0.7 1 0.55 101 0.8128 0.6031082
0.5 3 0.1 0.7 2 0.45 99 0.7952 0.5607323
0.5 3 0.1 0.7 2 0.45 100 0.8016 0.5749702
0.5 3 0.1 0.7 2 0.45 101 0.7936 0.5572030
0.5 3 0.1 0.7 2 0.55 99 0.8080 0.5865582
0.5 3 0.1 0.7 2 0.55 100 0.8112 0.5932810
0.5 3 0.1 0.7 2 0.55 101 0.8144 0.5996192
0.5 3 0.6 0.5 1 0.45 99 0.8048 0.5821933
0.5 3 0.6 0.5 1 0.45 100 0.8000 0.5697399
0.5 3 0.6 0.5 1 0.45 101 0.8064 0.5828413
0.5 3 0.6 0.5 1 0.55 99 0.8096 0.5928874
0.5 3 0.6 0.5 1 0.55 100 0.8128 0.6006056
0.5 3 0.6 0.5 1 0.55 101 0.8096 0.5915135
0.5 3 0.6 0.5 2 0.45 99 0.8048 0.5814272
0.5 3 0.6 0.5 2 0.45 100 0.8016 0.5748128
0.5 3 0.6 0.5 2 0.45 101 0.8096 0.5905967
0.5 3 0.6 0.5 2 0.55 99 0.8000 0.5719155
0.5 3 0.6 0.5 2 0.55 100 0.8048 0.5844191
0.5 3 0.6 0.5 2 0.55 101 0.8016 0.5773891
0.5 3 0.6 0.6 1 0.45 99 0.7904 0.5521494
0.5 3 0.6 0.6 1 0.45 100 0.7984 0.5687816
0.5 3 0.6 0.6 1 0.45 101 0.8048 0.5799408
0.5 3 0.6 0.6 1 0.55 99 0.7952 0.5695903
0.5 3 0.6 0.6 1 0.55 100 0.8016 0.5817920
0.5 3 0.6 0.6 1 0.55 101 0.8000 0.5775279
0.5 3 0.6 0.6 2 0.45 99 0.7968 0.5603817
0.5 3 0.6 0.6 2 0.45 100 0.7968 0.5612484
0.5 3 0.6 0.6 2 0.45 101 0.7936 0.5558168
0.5 3 0.6 0.6 2 0.55 99 0.8096 0.5919859
0.5 3 0.6 0.6 2 0.55 100 0.8144 0.6024666
0.5 3 0.6 0.6 2 0.55 101 0.8128 0.5991701
0.5 3 0.6 0.7 1 0.45 99 0.7968 0.5695434
0.5 3 0.6 0.7 1 0.45 100 0.7936 0.5622803
0.5 3 0.6 0.7 1 0.45 101 0.8016 0.5788004
0.5 3 0.6 0.7 1 0.55 99 0.7984 0.5742120
0.5 3 0.6 0.7 1 0.55 100 0.7968 0.5697453
0.5 3 0.6 0.7 1 0.55 101 0.7968 0.5705984
0.5 3 0.6 0.7 2 0.45 99 0.8032 0.5769559
0.5 3 0.6 0.7 2 0.45 100 0.8064 0.5854369
0.5 3 0.6 0.7 2 0.45 101 0.7984 0.5684251
0.5 3 0.6 0.7 2 0.55 99 0.8144 0.6060192
0.5 3 0.6 0.7 2 0.55 100 0.8128 0.6012620
0.5 3 0.6 0.7 2 0.55 101 0.8096 0.5951375
Accuracy was used to select the optimal model using the largest value.
The final values used for the model were nrounds = 101, max_depth = 2, eta = 0.4, gamma = 0.6, colsample_bytree = 0.7, min_child_weight = 2 and subsample = 0.55.
$bestTune # 최적의 초모수 조합값 xgb.tune.fit
nrounds max_depth eta gamma colsample_bytree min_child_weight subsample
360 101 2 0.4 0.6 0.7 2 0.55
Result!
(eta
= 0.4, max_depth
= 2, gamma
= 0.6, colsample_bytree
= 0.7, min_child_weight
= 2, subsample
= 0.55, nrounds
= 101)일 때 정확도가 가장 높은 것을 알 수 있으며, (eta
= 0.4, max_depth
= 2, gamma
= 0.6, colsample_bytree
= 0.7, min_child_weight
= 2, subsample
= 0.55, nrounds
= 101)를 가지는 모형을 최적의 훈련된 모형으로 선택한다.
# 변수 중요도
<- xgboost::xgb.importance(model = xgb.tune.fit$finalModel)
importance
importance
Feature Gain Cover Frequency
<char> <num> <num> <num>
1: Fare 0.32508276 0.39859069 0.40370370
2: Age 0.24472964 0.31555757 0.31851852
3: Sexmale 0.22943973 0.09741137 0.08888889
4: FamSize 0.10861218 0.09341485 0.10740741
5: Pclass3 0.07158805 0.05498747 0.04444444
6: Pclass2 0.02054765 0.04003806 0.03703704
# 변수 중요도 plot
::xgb.plot.importance(importance_matrix = importance) xgboost
Result!
변수 Fare
이 Target Survived
을 분류하는 데 있어 중요하다.
13.7 모형 평가
Caution!
모형 평가를 위해 Test Dataset
에 대한 예측 class/확률
이 필요하며, 함수 predict()
를 이용하여 생성한다.
# 예측 class 생성
<- predict(xgb.tune.fit,
test.xgb.class newdata = titanic.ted.Imp[,-1]) # Test Dataset including Only 예측 변수
test.xgb.class
[1] yes no no yes no no yes no no yes no no yes yes no yes no no yes no no no no no no no no no yes no yes no no no no no no no no yes no no no yes no no no no
[49] yes no no no no no no yes no no no yes no no yes no yes no no no no no no no no yes no no no no no no yes no yes no no no no no no no no no no yes yes yes
[97] no yes no no no yes yes no yes yes no yes no yes yes no yes no yes no yes no no no yes no no yes no yes no yes no yes no yes no no yes yes no no no no yes no no no
[145] yes no no no no no no yes no no no no no no no no yes no yes yes no yes yes no no no no no yes yes yes no yes no no no no no no yes no no no yes no yes no no
[193] no no no no no yes no no no no no no no no no no yes no no yes yes no no no yes yes no no no no no yes yes yes no no no no no no no no yes no no yes no no
[241] no yes no yes no yes no yes no no yes no no no no yes yes yes no yes no yes yes no no no
Levels: no yes
13.7.1 ConfusionMatrix
<- caret::confusionMatrix(test.xgb.class, titanic.ted.Imp$Survived,
CM positive = "yes") # confusionMatrix(예측 class, 실제 class, positive = "관심 class")
CM
Confusion Matrix and Statistics
Reference
Prediction no yes
no 153 34
yes 11 68
Accuracy : 0.8308
95% CI : (0.7803, 0.8738)
No Information Rate : 0.6165
P-Value [Acc > NIR] : 2.211e-14
Kappa : 0.6263
Mcnemar's Test P-Value : 0.00104
Sensitivity : 0.6667
Specificity : 0.9329
Pos Pred Value : 0.8608
Neg Pred Value : 0.8182
Prevalence : 0.3835
Detection Rate : 0.2556
Detection Prevalence : 0.2970
Balanced Accuracy : 0.7998
'Positive' Class : yes
13.7.2 ROC 곡선
# 예측 확률 생성
<- predict(xgb.tune.fit,
test.xgb.prob newdata = titanic.ted.Imp[,-1], # Test Dataset including Only 예측 변수
type = "prob") # 예측 확률 생성
%>%
test.xgb.prob as_tibble
# A tibble: 266 × 2
no yes
<dbl> <dbl>
1 0.0600 0.940
2 0.757 0.243
3 0.960 0.0397
4 0.172 0.828
5 0.642 0.358
6 0.860 0.140
7 0.453 0.547
8 0.978 0.0223
9 0.957 0.0431
10 0.0349 0.965
# ℹ 256 more rows
<- test.xgb.prob[,2] # "Survived = yes"에 대한 예측 확률
test.xgb.prob
<- titanic.ted.Imp$Survived # Test Dataset의 실제 class
ac <- as.numeric(test.xgb.prob) # 예측 확률을 수치형으로 변환 pp
13.7.2.1 Package “pROC”
::p_load("pROC")
pacman
<- roc(ac, pp, plot = T, col = "gray") # roc(실제 class, 예측 확률)
xgb.roc <- round(auc(xgb.roc), 3)
auc legend("bottomright", legend = auc, bty = "n")
Caution!
Package "pROC"
를 통해 출력한 ROC 곡선은 다양한 함수를 이용해서 그래프를 수정할 수 있다.
# 함수 plot.roc() 이용
plot.roc(xgb.roc,
col="gray", # Line Color
print.auc = TRUE, # AUC 출력 여부
print.auc.col = "red", # AUC 글씨 색깔
print.thres = TRUE, # Cutoff Value 출력 여부
print.thres.pch = 19, # Cutoff Value를 표시하는 도형 모양
print.thres.col = "red", # Cutoff Value를 표시하는 도형의 색깔
auc.polygon = TRUE, # 곡선 아래 면적에 대한 여부
auc.polygon.col = "gray90") # 곡선 아래 면적의 색깔
# 함수 ggroc() 이용
ggroc(xgb.roc) +
annotate(geom = "text", x = 0.9, y = 1.0,
label = paste("AUC = ", auc),
size = 5,
color="red") +
theme_bw()
13.7.2.2 Package “Epi”
::p_load("Epi")
pacman# install_version("etm", version = "1.1", repos = "http://cran.us.r-project.org")
ROC(pp, ac, plot = "ROC") # ROC(예측 확률, 실제 class)
13.7.2.3 Package “ROCR”
::p_load("ROCR")
pacman
<- prediction(pp, ac) # prediction(예측 확률, 실제 class)
xgb.pred
<- performance(xgb.pred, "tpr", "fpr") # performance(, "민감도", "1-특이도")
xgb.perf plot(xgb.perf, col = "gray") # ROC Curve
<- performance(xgb.pred, "auc") # AUC
perf.auc <- attributes(perf.auc)$y.values
auc legend("bottomright", legend = auc, bty = "n")
13.7.3 향상 차트
13.7.3.1 Package “ROCR”
<- performance(xgb.pred, "lift", "rpp") # Lift Chart
xgb.perf plot(xgb.perf, main = "lift curve",
colorize = T, # Coloring according to cutoff
lwd = 2)