Building a Linear Regression Model
import { tensor } from "deepbox/ndarray";
import { trainTestSplit } from "deepbox/preprocess";
// Generate data: y = 2x + 3 + noise
const X_data: number[][] = [];
const y_data: number[] = [];
for (let i = 0; i < 100; i++) {
const x = i / 10;
const y = 2 * x + 3 + (Math.random() - 0.5) * 2;
X_data.push([x]);
y_data.push(y);
}
const X = tensor(X_data);
const y = tensor(y_data);
console.log(`Dataset: ${X.shape[0]} samples, ${X.shape[1]} features`);
const [X_train, X_test, y_train, y_test] = trainTestSplit(X, y, {
testSize: 0.2,
randomState: 42,
});
console.log(`Training set: ${X_train.shape[0]} samples`);
console.log(`Test set: ${X_test.shape[0]} samples`);
import { LinearRegression } from "deepbox/ml";
const model = new LinearRegression();
model.fit(X_train, y_train);
console.log("Model trained!");
console.log(`Coefficients: ${model.coef?.toString()}`);
console.log(`Intercept: ${model.intercept}`);
import { mae, mse, r2Score } from "deepbox/metrics";
const y_pred = model.predict(X_test);
const r2 = r2Score(y_test, y_pred);
const mseValue = mse(y_test, y_pred);
const maeValue = mae(y_test, y_pred);
console.log("Model Performance:");
console.log(`R² Score: ${r2.toFixed(4)}`);
console.log(`MSE: ${mseValue.toFixed(4)}`);
console.log(`MAE: ${maeValue.toFixed(4)}`);
Understanding the Metrics
- R² Score: Proportion of variance explained (1.0 is perfect, 0.0 is baseline)
- MSE (Mean Squared Error): Average squared difference between predictions and actual values
- MAE (Mean Absolute Error): Average absolute difference between predictions and actual values
Next Steps
Logistic Regression
Learn classification with logistic regression
Regularization
Prevent overfitting with Ridge and Lasso regression