diff --git a/__pycache__/__init__.cpython-36.pyc b/__pycache__/__init__.cpython-36.pyc index 51086b4..4843a81 100644 Binary files a/__pycache__/__init__.cpython-36.pyc and b/__pycache__/__init__.cpython-36.pyc differ diff --git a/q01_myXGBoost/__pycache__/__init__.cpython-36.pyc b/q01_myXGBoost/__pycache__/__init__.cpython-36.pyc index 05966ae..5889223 100644 Binary files a/q01_myXGBoost/__pycache__/__init__.cpython-36.pyc and b/q01_myXGBoost/__pycache__/__init__.cpython-36.pyc differ diff --git a/q01_myXGBoost/__pycache__/build.cpython-36.pyc b/q01_myXGBoost/__pycache__/build.cpython-36.pyc index 73181f1..0433583 100644 Binary files a/q01_myXGBoost/__pycache__/build.cpython-36.pyc and b/q01_myXGBoost/__pycache__/build.cpython-36.pyc differ diff --git a/q01_myXGBoost/build.py b/q01_myXGBoost/build.py index db3654a..9718d9f 100644 --- a/q01_myXGBoost/build.py +++ b/q01_myXGBoost/build.py @@ -1,3 +1,4 @@ +# %load q01_myXGBoost/build.py import pandas as pd from xgboost import XGBClassifier from sklearn.model_selection import train_test_split @@ -11,13 +12,26 @@ y = dataset.iloc[:, -1] X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=9) -param_grid1 = {"max_depth": [2, 3, 4, 5, 6, 7, 9, 11], - "min_child_weight": [4, 6, 7, 8], - "subsample": [0.6, .7, .8, .9, 1], - "colsample_bytree": [0.6, .7, .8, .9, 1] +param_grid1 = {'max_depth': [2, 3, 4, 5, 6, 7, 9, 11], + 'min_child_weight': [4, 6, 7, 8], + 'subsample': [0.6, .7, .8, .9, 1], + 'colsample_bytree': [0.6, .7, .8, .9, 1] } # Write your solution here : +def myXGBoost(X_train, X_test, y_train,y_test, model, param_grid, KFold=3, **kwargs): + if kwargs: + model.set_params(**kwargs) + gs_cv = GridSearchCV(model, param_grid=param_grid, cv=KFold, verbose=0) + gs_cv.fit(X_train, y_train) + best_params = gs_cv.best_params_ + y_pred = gs_cv.predict(X_test) + accuracy = accuracy_score(y_pred, y_test) + return accuracy, best_params +# accuracy, best_params = myXGBoost(X_train, X_test, y_train, y_test, XGBClassifier(seed=9), param_grid1, 3) +# print (accuracy) +# print (best_params) + diff --git a/q01_myXGBoost/tests/__pycache__/__init__.cpython-36.pyc b/q01_myXGBoost/tests/__pycache__/__init__.cpython-36.pyc index 8dfa197..08d181a 100644 Binary files a/q01_myXGBoost/tests/__pycache__/__init__.cpython-36.pyc and b/q01_myXGBoost/tests/__pycache__/__init__.cpython-36.pyc differ diff --git a/q01_myXGBoost/tests/__pycache__/test_q01_myXGBoost.cpython-36.pyc b/q01_myXGBoost/tests/__pycache__/test_q01_myXGBoost.cpython-36.pyc index c955d76..c5f5e3b 100644 Binary files a/q01_myXGBoost/tests/__pycache__/test_q01_myXGBoost.cpython-36.pyc and b/q01_myXGBoost/tests/__pycache__/test_q01_myXGBoost.cpython-36.pyc differ diff --git a/q02_param2/__pycache__/__init__.cpython-36.pyc b/q02_param2/__pycache__/__init__.cpython-36.pyc index 65aae62..37ee3ef 100644 Binary files a/q02_param2/__pycache__/__init__.cpython-36.pyc and b/q02_param2/__pycache__/__init__.cpython-36.pyc differ diff --git a/q02_param2/__pycache__/build.cpython-36.pyc b/q02_param2/__pycache__/build.cpython-36.pyc index 265965e..6c31c2d 100644 Binary files a/q02_param2/__pycache__/build.cpython-36.pyc and b/q02_param2/__pycache__/build.cpython-36.pyc differ diff --git a/q02_param2/build.py b/q02_param2/build.py index 8391570..1034b3f 100644 --- a/q02_param2/build.py +++ b/q02_param2/build.py @@ -1,3 +1,4 @@ +# %load q02_param2/build.py # Default imports from sklearn.model_selection import train_test_split from xgboost import XGBClassifier @@ -11,10 +12,28 @@ y = dataset.iloc[:, -1] X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=9) -param_grid2 = {"gamma": [0, 0.05, 0.1, 0.3, 0.7, 0.9, 1], - "reg_alpha": [0, 0.001, 0.005, 0.01, 0.05, 0.1], - "reg_lambda": [0.05, 0.1, 0.5, 1.0] +param_grid2 = {'gamma': [0, 0.05, 0.1, 0.3, 0.7, 0.9, 1], + 'reg_alpha': [0, 0.001, 0.005, 0.01, 0.05, 0.1], + 'reg_lambda': [0.05, 0.1, 0.5, 1.0] } # Write your solution here : + +def param2(X_train, X_test, y_train, y_test, model, param_grid2): + param_grid1 = {'max_depth': [2, 3, 4, 5, 6, 7, 9, 11], + 'min_child_weight': [4, 6, 7, 8], + 'subsample': [0.6, .7, .8, .9, 1], + 'colsample_bytree': [0.6, .7, .8, .9, 1] + } + + accuracy,best_params=myXGBoost(X_train, X_test, y_train, y_test,model,param_grid1,KFold=3) + + model1=model.set_params(**best_params) + accuracy1,best_params1=myXGBoost(X_train, X_test, y_train, y_test,model1,param_grid2,KFold=3) + + return accuracy1,best_params1 + +# param2(X_train, X_test, y_train, y_test, XGBClassifier(seed=9), param_grid2) + + diff --git a/q02_param2/tests/__pycache__/__init__.cpython-36.pyc b/q02_param2/tests/__pycache__/__init__.cpython-36.pyc index 19bc1aa..c301aac 100644 Binary files a/q02_param2/tests/__pycache__/__init__.cpython-36.pyc and b/q02_param2/tests/__pycache__/__init__.cpython-36.pyc differ diff --git a/q02_param2/tests/__pycache__/test_q02_param2.cpython-36.pyc b/q02_param2/tests/__pycache__/test_q02_param2.cpython-36.pyc index 18c07a7..983d8b1 100644 Binary files a/q02_param2/tests/__pycache__/test_q02_param2.cpython-36.pyc and b/q02_param2/tests/__pycache__/test_q02_param2.cpython-36.pyc differ diff --git a/q03_xgboost/__pycache__/__init__.cpython-36.pyc b/q03_xgboost/__pycache__/__init__.cpython-36.pyc index 2e9c375..278afe4 100644 Binary files a/q03_xgboost/__pycache__/__init__.cpython-36.pyc and b/q03_xgboost/__pycache__/__init__.cpython-36.pyc differ diff --git a/q03_xgboost/__pycache__/build.cpython-36.pyc b/q03_xgboost/__pycache__/build.cpython-36.pyc index 4c997b3..4620c93 100644 Binary files a/q03_xgboost/__pycache__/build.cpython-36.pyc and b/q03_xgboost/__pycache__/build.cpython-36.pyc differ diff --git a/q03_xgboost/build.py b/q03_xgboost/build.py index 7905a04..26a842c 100644 --- a/q03_xgboost/build.py +++ b/q03_xgboost/build.py @@ -1,3 +1,4 @@ +# %load q03_xgboost/build.py # Default imports from sklearn.model_selection import train_test_split from xgboost import XGBClassifier @@ -14,4 +15,14 @@ # Write your solution here : +def xgboost(X_train, X_test, y_train, y_test, **kwargs): + model = XGBClassifier(random_state=9) + model.set_params(**kwargs) + model.fit(X_train,y_train) + y_pred = model.predict(X_test) + accuracy = accuracy_score(y_test,y_pred) + return accuracy + + + diff --git a/q03_xgboost/tests/__pycache__/__init__.cpython-36.pyc b/q03_xgboost/tests/__pycache__/__init__.cpython-36.pyc index e887bf7..51ca004 100644 Binary files a/q03_xgboost/tests/__pycache__/__init__.cpython-36.pyc and b/q03_xgboost/tests/__pycache__/__init__.cpython-36.pyc differ diff --git a/q03_xgboost/tests/__pycache__/test_q03_xgboost.cpython-36.pyc b/q03_xgboost/tests/__pycache__/test_q03_xgboost.cpython-36.pyc index 77271df..e6fd8de 100644 Binary files a/q03_xgboost/tests/__pycache__/test_q03_xgboost.cpython-36.pyc and b/q03_xgboost/tests/__pycache__/test_q03_xgboost.cpython-36.pyc differ