-
Notifications
You must be signed in to change notification settings - Fork 0
/
ml_sdms_train.py
71 lines (53 loc) · 2.18 KB
/
ml_sdms_train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 15 11:26:03 2020
@author: danielfurman
"""
# Binary classification with ten BioClim features. Five are a function of
# precipitation and five of temperature, all de-correlated below a 0.5
# correlation threshold. We use PyCaret to train and tune (10-fold cv) our
# models from a train set that contains 80% of the total data. We find that
# random forest, xgboost, lgbm, catboost, and extra trees algorithms perform
# best. The last step was to create blended model from these five.
from pycaret.classification import setup
from pycaret.classification import create_model
from pycaret.classification import finalize_model
from pycaret.classification import save_model
from pycaret.classification import blend_models
from pycaret.classification import compare_models
from pandas import read_csv
#data = read_csv('data/envtrain_xv.csv')
data = read_csv('data_2.0/envtrain_xv.csv')
#data = data.drop(['Unnamed: 0'], axis=1)
exp_clf = setup(data, target='pa', log_experiment = True,
experiment_name = 'xv-21', session_id = 110,
numeric_features = ['bclim14'])
# create models
etrees = create_model('et')
xgboost = create_model('xgboost')
catboost = create_model('catboost')
rf = create_model('rf')
lgbm = create_model('lightgbm')
log = create_model('lr')
# save models as .pkl files
finalize_model(etrees)
save_model(etrees, 'classifier_models(pkl)/xant_etrees')
finalize_model(xgboost)
save_model(xgboost, 'classifier_models(pkl)/xant_xgb')
finalize_model(catboost)
save_model(catboost, 'classifier_models(pkl)/xant_cboost')
finalize_model(rf)
save_model(rf, 'classifier_models(pkl)/xant_rf')
finalize_model(lgbm)
save_model(lgbm, 'classifier_models(pkl)/xant_lgbm')
finalize_model(log)
save_model(log, 'classifier_models(pkl)/xant_log')
blender_specific = blend_models(estimator_list=[
etrees, lgbm, rf], method='soft')
#blender_specific = blend_models(estimator_list=[
#etrees, lgbm, catboost], method='soft')
finalize_model(blender_specific)
save_model(blender_specific, 'classifier_models(pkl)/xant_blended')
print('PyCaret training ended \n\n')
compare_models() # print ordered 10-fold cv scores