Delete XG Boost Model

parent 2e4c472b
# Feeding the data into the algorithm
xgb_model = XGBRegressor(n_estimators=100, learning_rate=0.2, objective='reg:squarederror')
xgb_model.fit(x_train, y_train)
xgb_pred = xgb_model.predict(x_test)
# Transform the prediction back to it's original state
xgb_pred = xgb_pred.reshape(-1,1)
xgb_pred_test_set = np.concatenate([xgb_pred,x_test], axis=1)
xgb_pred_test_set = scaler.inverse_transform(xgb_pred_test_set)
# calculating the predicted exports values from the difference values and append that to the predicted data frame
result_list = []
for index in range(0, len(xgb_pred_test_set)):
result_list.append(xgb_pred_test_set[index][0] + act_exports[index])
xgb_pred_series = pd.Series(result_list, name='xgb_pred')
predict_df = predict_df.merge(xgb_pred_series, left_index=True, right_index=True)
# Now that we have calculated the predicted data and now we can evaluate it with varius matrix to test it's accuracy
xgb_rmse = np.sqrt(mean_squared_error(predict_df['xgb_pred'], monthly_exports['Quantity'][-12:]))
xgb_mae = mean_absolute_error(predict_df['xgb_pred'], monthly_exports['Quantity'][-12:])
xgb_r2 = r2_score(predict_df['xgb_pred'], monthly_exports['Quantity'][-12:])
print('XG Boost RMSE: ', xgb_rmse)
print('XG Boost MAE: ', xgb_mae)
print('XG Boost R2 Score: ', xgb_r2)
# Visualising the data with the original values
plt.figure(figsize=(15,7))
plt.plot(monthly_exports['Date'], monthly_exports['Quantity'])
plt.plot(predict_df['Date'], predict_df['xgb_pred'])
plt.title("Export Forecast using XG Boost")
plt.xlabel("Date")
plt.ylabel("Exports")
plt.legend(["Original exports", "Predicted exports"])
plt.show()
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment