Variance and Bias Trade-off#

Bagging Example Using Decision Trees#

# Import necessary libraries
from sklearn.datasets import fetch_california_housing
from sklearn.ensemble import BaggingRegressor
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeRegressor

# Load data
data = fetch_california_housing()
X, y = data.data, data.target

# Split data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Create a simple decision tree regressor
tree = DecisionTreeRegressor()

# Create a bagging ensemble of decision trees
bagging = BaggingRegressor(estimator=tree, n_estimators=100, random_state=42)

# Train the model
bagging.fit(X_train, y_train)

# Score the model
print("Bagging Score:", bagging.score(X_test, y_test))
Bagging Score: 0.8046988456668309

Gradient Boosting Example#

# Import necessary libraries
from sklearn.ensemble import GradientBoostingRegressor

# Create a Gradient Boosting model
gboost = GradientBoostingRegressor(n_estimators=100, learning_rate=0.1, max_depth=3, random_state=42)

# Train the model
gboost.fit(X_train, y_train)

# Score the model
print("Gradient Boosting Score:", gboost.score(X_test, y_test))
Gradient Boosting Score: 0.7756446042829697