fuzzy-rules-generator/temp_density_regression.ipynb

26 KiB

In [163]:
import pandas as pd

train = pd.read_csv("data/density_train.csv", sep=";", decimal=",")
test = pd.read_csv("data/density_test.csv", sep=";", decimal=",")

train["Density"] = pow(train["Density"], 4)
test["Density"] = pow(test["Density"], 4)

display(train.head())
display(test.head())
T Al2O3 TiO2 Density
0 20 0.0 0.0 1.274429
1 25 0.0 0.0 1.261477
2 35 0.0 0.0 1.234322
3 40 0.0 0.0 1.220283
4 45 0.0 0.0 1.205995
T Al2O3 TiO2 Density
0 30 0.00 0.0 1.248056
1 55 0.00 0.0 1.176984
2 25 0.05 0.0 1.382694
3 30 0.05 0.0 1.366141
4 35 0.05 0.0 1.349487
In [164]:
y_train = train["T"]
X_train = train.drop(["T"], axis=1)

display(X_train.head())
display(y_train.head())

y_test = test["T"]
X_test = test.drop(["T"], axis=1)

display(X_test.head())
display(y_test.head())
Al2O3 TiO2 Density
0 0.0 0.0 1.274429
1 0.0 0.0 1.261477
2 0.0 0.0 1.234322
3 0.0 0.0 1.220283
4 0.0 0.0 1.205995
0    20
1    25
2    35
3    40
4    45
Name: T, dtype: int64
Al2O3 TiO2 Density
0 0.00 0.0 1.248056
1 0.00 0.0 1.176984
2 0.05 0.0 1.382694
3 0.05 0.0 1.366141
4 0.05 0.0 1.349487
0    30
1    55
2    25
3    30
4    35
Name: T, dtype: int64
In [165]:
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn import linear_model, tree, neighbors, ensemble

random_state = 9

models = {
    "linear": {"model": linear_model.LinearRegression(n_jobs=-1)},
    "linear_poly": {
        "model": make_pipeline(
            PolynomialFeatures(degree=2),
            linear_model.LinearRegression(fit_intercept=False, n_jobs=-1),
        )
    },
    "linear_interact": {
        "model": make_pipeline(
            PolynomialFeatures(interaction_only=True),
            linear_model.LinearRegression(fit_intercept=False, n_jobs=-1),
        )
    },
    "ridge": {"model": linear_model.RidgeCV()},
    "decision_tree": {
        "model": tree.DecisionTreeRegressor(random_state=random_state, max_depth=6, criterion="absolute_error")
    },
    "knn": {"model": neighbors.KNeighborsRegressor(n_neighbors=7, n_jobs=-1)},
    "random_forest": {
        "model": ensemble.RandomForestRegressor(
            max_depth=7, random_state=random_state, n_jobs=-1
        )
    },
}
In [166]:
import math
from sklearn import metrics

for model_name in models.keys():
    print(f"Model: {model_name}")
    fitted_model = models[model_name]["model"].fit(
        X_train.values, y_train.values.ravel()
    )
    y_train_pred = fitted_model.predict(X_train.values)
    y_test_pred = fitted_model.predict(X_test.values)
    models[model_name]["fitted"] = fitted_model
    models[model_name]["MSE_train"] = metrics.mean_squared_error(y_train, y_train_pred)
    models[model_name]["MSE_test"] = metrics.mean_squared_error(y_test, y_test_pred)
    models[model_name]["MAE_train"] = metrics.mean_absolute_error(y_train, y_train_pred)
    models[model_name]["MAE_test"] = metrics.mean_absolute_error(y_test, y_test_pred)
    models[model_name]["R2_train"] = metrics.r2_score(y_train, y_train_pred)
    models[model_name]["R2_test"] = metrics.r2_score(y_test, y_test_pred)
Model: linear
Model: linear_poly
Model: linear_interact
Model: ridge
Model: decision_tree
Model: knn
Model: random_forest
In [167]:
reg_metrics = pd.DataFrame.from_dict(models, "index")[
    ["MSE_train", "MSE_test", "MAE_train", "MAE_test", "R2_train", "R2_test"]
]
reg_metrics.sort_values(by="MAE_test").style.background_gradient(
    cmap="viridis", low=1, high=0.3, subset=["MSE_train", "MSE_test"]
).background_gradient(cmap="plasma", low=0.3, high=1, subset=["MAE_test", "R2_test"])
Out[167]:
  MSE_train MSE_test MAE_train MAE_test R2_train R2_test
linear_poly 0.465283 0.209921 0.513527 0.374980 0.998248 0.999016
linear_interact 16.021929 16.881061 3.268616 3.266739 0.939657 0.920866
linear 30.840398 36.882107 4.679503 4.594400 0.883846 0.827106
decision_tree 10.526316 47.426471 1.842105 5.735294 0.960355 0.777676
random_forest 20.214645 54.501240 3.570892 6.598133 0.923866 0.744512
knn 161.291622 140.006002 10.206767 9.537815 0.392527 0.343686
ridge 204.018844 162.078696 12.353188 10.798642 0.231604 0.240215
In [168]:
model = models["decision_tree"]["fitted"]
rules = tree.export_text(
    model, feature_names=X_train.columns.values.tolist()
)
print(rules)
|--- Density <= 1.18
|   |--- Density <= 1.14
|   |   |--- value: [70.00]
|   |--- Density >  1.14
|   |   |--- Density <= 1.15
|   |   |   |--- value: [65.00]
|   |   |--- Density >  1.15
|   |   |   |--- value: [60.00]
|--- Density >  1.18
|   |--- Density <= 1.31
|   |   |--- TiO2 <= 0.03
|   |   |   |--- Al2O3 <= 0.03
|   |   |   |   |--- Density <= 1.23
|   |   |   |   |   |--- Density <= 1.20
|   |   |   |   |   |   |--- value: [50.00]
|   |   |   |   |   |--- Density >  1.20
|   |   |   |   |   |   |--- value: [42.50]
|   |   |   |   |--- Density >  1.23
|   |   |   |   |   |--- Density <= 1.25
|   |   |   |   |   |   |--- value: [35.00]
|   |   |   |   |   |--- Density >  1.25
|   |   |   |   |   |   |--- value: [22.50]
|   |   |   |--- Al2O3 >  0.03
|   |   |   |   |--- Density <= 1.26
|   |   |   |   |   |--- Density <= 1.24
|   |   |   |   |   |   |--- value: [70.00]
|   |   |   |   |   |--- Density >  1.24
|   |   |   |   |   |   |--- value: [65.00]
|   |   |   |   |--- Density >  1.26
|   |   |   |   |   |--- Density <= 1.29
|   |   |   |   |   |   |--- value: [55.00]
|   |   |   |   |   |--- Density >  1.29
|   |   |   |   |   |   |--- value: [50.00]
|   |   |--- TiO2 >  0.03
|   |   |   |--- Density <= 1.25
|   |   |   |   |--- value: [70.00]
|   |   |   |--- Density >  1.25
|   |   |   |   |--- Density <= 1.27
|   |   |   |   |   |--- value: [65.00]
|   |   |   |   |--- Density >  1.27
|   |   |   |   |   |--- value: [60.00]
|   |--- Density >  1.31
|   |   |--- Density <= 1.57
|   |   |   |--- Density <= 1.37
|   |   |   |   |--- Density <= 1.33
|   |   |   |   |   |--- value: [45.00]
|   |   |   |   |--- Density >  1.33
|   |   |   |   |   |--- Density <= 1.36
|   |   |   |   |   |   |--- value: [40.00]
|   |   |   |   |   |--- Density >  1.36
|   |   |   |   |   |   |--- value: [35.00]
|   |   |   |--- Density >  1.37
|   |   |   |   |--- Density <= 1.39
|   |   |   |   |   |--- value: [30.00]
|   |   |   |   |--- Density >  1.39
|   |   |   |   |   |--- Al2O3 <= 0.03
|   |   |   |   |   |   |--- value: [22.50]
|   |   |   |   |   |--- Al2O3 >  0.03
|   |   |   |   |   |   |--- value: [20.00]
|   |   |--- Density >  1.57
|   |   |   |--- Density <= 1.93
|   |   |   |   |--- Density <= 1.74
|   |   |   |   |   |--- value: [70.00]
|   |   |   |   |--- Density >  1.74
|   |   |   |   |   |--- Al2O3 <= 0.15
|   |   |   |   |   |   |--- value: [65.00]
|   |   |   |   |   |--- Al2O3 >  0.15
|   |   |   |   |   |   |--- value: [50.00]
|   |   |   |--- Density >  1.93
|   |   |   |   |--- Al2O3 <= 0.15
|   |   |   |   |   |--- Density <= 2.09
|   |   |   |   |   |   |--- value: [50.00]
|   |   |   |   |   |--- Density >  2.09
|   |   |   |   |   |   |--- value: [30.00]
|   |   |   |   |--- Al2O3 >  0.15
|   |   |   |   |   |--- Density <= 1.95
|   |   |   |   |   |   |--- value: [30.00]
|   |   |   |   |   |--- Density >  1.95
|   |   |   |   |   |   |--- value: [22.50]

In [169]:
import pickle

pickle.dump(model, open("data/temp_density_tree.model.sav", "wb"))