Enhance documentation with usage examples for crate::compute::models

This commit is contained in:
Palash Tyagi 2025-08-03 16:48:37 +01:00
parent e6964795e3
commit ed01c4b8f2
8 changed files with 116 additions and 0 deletions

View File

@ -1,3 +1,15 @@
//! Common activation functions used in neural networks.
//!
//! Functions operate element-wise on [`Matrix`] values.
//!
//! ```
//! use rustframe::compute::models::activations::sigmoid;
//! use rustframe::matrix::Matrix;
//!
//! let x = Matrix::from_vec(vec![0.0], 1, 1);
//! let y = sigmoid(&x);
//! assert!((y.get(0,0) - 0.5).abs() < 1e-6);
//! ```
use crate::matrix::{Matrix, SeriesOps};
pub fn sigmoid(x: &Matrix<f64>) -> Matrix<f64> {

View File

@ -1,3 +1,30 @@
//! A minimal dense neural network implementation for educational purposes.
//!
//! Layers operate on [`Matrix`] values and support ReLU and Sigmoid
//! activations. This is not meant to be a performant deeplearning framework
//! but rather a small example of how the surrounding matrix utilities can be
//! composed.
//!
//! ```
//! use rustframe::compute::models::dense_nn::{ActivationKind, DenseNN, DenseNNConfig, InitializerKind, LossKind};
//! use rustframe::matrix::Matrix;
//!
//! // Tiny network with one input and one output neuron.
//! let config = DenseNNConfig {
//! input_size: 1,
//! hidden_layers: vec![],
//! output_size: 1,
//! activations: vec![ActivationKind::Relu],
//! initializer: InitializerKind::Uniform(0.5),
//! loss: LossKind::MSE,
//! learning_rate: 0.1,
//! epochs: 1,
//! };
//! let mut nn = DenseNN::new(config);
//! let x = Matrix::from_vec(vec![1.0, 2.0], 2, 1);
//! let y = Matrix::from_vec(vec![2.0, 3.0], 2, 1);
//! nn.train(&x, &y);
//! ```
use crate::compute::models::activations::{drelu, relu, sigmoid};
use crate::matrix::{Matrix, SeriesOps};
use crate::random::prelude::*;

View File

@ -1,3 +1,16 @@
//! Gaussian Naive Bayes classifier for dense matrices.
//!
//! ```
//! use rustframe::compute::models::gaussian_nb::GaussianNB;
//! use rustframe::matrix::Matrix;
//!
//! let x = Matrix::from_vec(vec![1.0, 2.0, 1.0, 2.0], 2, 2); // two samples
//! let y = Matrix::from_vec(vec![0.0, 1.0], 2, 1);
//! let mut model = GaussianNB::new(1e-9, false);
//! model.fit(&x, &y);
//! let preds = model.predict(&x);
//! assert_eq!(preds.rows(), 2);
//! ```
use crate::matrix::Matrix;
use std::collections::HashMap;

View File

@ -1,3 +1,14 @@
//! Simple k-means clustering working on [`Matrix`] data.
//!
//! ```
//! use rustframe::compute::models::k_means::KMeans;
//! use rustframe::matrix::Matrix;
//!
//! let data = Matrix::from_vec(vec![1.0, 1.0, 5.0, 5.0], 2, 2);
//! let (model, labels) = KMeans::fit(&data, 2, 10, 1e-4);
//! assert_eq!(model.centroids.rows(), 2);
//! assert_eq!(labels.len(), 2);
//! ```
use crate::compute::stats::mean_vertical;
use crate::matrix::Matrix;
use crate::random::prelude::*;

View File

@ -1,3 +1,16 @@
//! Ordinary least squares linear regression.
//!
//! ```
//! use rustframe::compute::models::linreg::LinReg;
//! use rustframe::matrix::Matrix;
//!
//! let x = Matrix::from_vec(vec![1.0, 2.0, 3.0, 4.0], 4, 1);
//! let y = Matrix::from_vec(vec![2.0, 3.0, 4.0, 5.0], 4, 1);
//! let mut model = LinReg::new(1);
//! model.fit(&x, &y, 0.01, 100);
//! let preds = model.predict(&x);
//! assert_eq!(preds.rows(), 4);
//! ```
use crate::matrix::{Matrix, SeriesOps};
pub struct LinReg {

View File

@ -1,3 +1,16 @@
//! Binary logistic regression classifier.
//!
//! ```
//! use rustframe::compute::models::logreg::LogReg;
//! use rustframe::matrix::Matrix;
//!
//! let x = Matrix::from_vec(vec![1.0, 2.0, 3.0, 4.0], 4, 1);
//! let y = Matrix::from_vec(vec![0.0, 0.0, 1.0, 1.0], 4, 1);
//! let mut model = LogReg::new(1);
//! model.fit(&x, &y, 0.1, 100);
//! let preds = model.predict(&x);
//! assert_eq!(preds[(0,0)], 0.0);
//! ```
use crate::compute::models::activations::sigmoid;
use crate::matrix::{Matrix, SeriesOps};

View File

@ -1,3 +1,19 @@
//! Lightweight machinelearning models built on matrices.
//!
//! Models are intentionally minimal and operate on the [`Matrix`](crate::matrix::Matrix) type for
//! inputs and parameters.
//!
//! ```
//! use rustframe::compute::models::linreg::LinReg;
//! use rustframe::matrix::Matrix;
//!
//! let x = Matrix::from_vec(vec![1.0, 2.0, 3.0, 4.0], 4, 1);
//! let y = Matrix::from_vec(vec![2.0, 3.0, 4.0, 5.0], 4, 1);
//! let mut model = LinReg::new(1);
//! model.fit(&x, &y, 0.01, 1000);
//! let preds = model.predict(&x);
//! assert_eq!(preds.rows(), 4);
//! ```
pub mod activations;
pub mod dense_nn;
pub mod gaussian_nb;

View File

@ -1,3 +1,14 @@
//! Principal Component Analysis using covariance matrices.
//!
//! ```
//! use rustframe::compute::models::pca::PCA;
//! use rustframe::matrix::Matrix;
//!
//! let data = Matrix::from_rows_vec(vec![1.0, 1.0, 2.0, 2.0], 2, 2);
//! let pca = PCA::fit(&data, 1, 0);
//! let projected = pca.transform(&data);
//! assert_eq!(projected.cols(), 1);
//! ```
use crate::compute::stats::correlation::covariance_matrix;
use crate::compute::stats::descriptive::mean_vertical;
use crate::matrix::{Axis, Matrix, SeriesOps};