mirror of
https://github.com/Magnus167/rustframe.git
synced 2025-11-19 10:26:10 +00:00
Compare commits
2 Commits
ci-update
...
d5db65467e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5db65467e | ||
|
|
ff97e6b0b6 |
34
.github/.archive/pr-checks.yml
vendored
Normal file
34
.github/.archive/pr-checks.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# name: pr-checks
|
||||
|
||||
# on:
|
||||
# pull_request:
|
||||
# branches: [pr_checks_disabled_for_now]
|
||||
# types:
|
||||
# - opened
|
||||
# # - synchronize
|
||||
# - reopened
|
||||
# - edited
|
||||
# - ready_for_review
|
||||
|
||||
# concurrency:
|
||||
# group: pr-checks-${{ github.event.number }}
|
||||
|
||||
# permissions:
|
||||
# contents: read
|
||||
# pull-requests: read
|
||||
# checks: write
|
||||
|
||||
# jobs:
|
||||
# pr-checks:
|
||||
# name: pr-checks
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
|
||||
# - name: Run PR checks
|
||||
# shell: bash
|
||||
# env:
|
||||
# GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# PR_NUMBER: ${{ github.event.number }}
|
||||
# run: |
|
||||
# python .github/scripts/pr_checks.py $PR_NUMBER
|
||||
64
.github/scripts/ci_checks.py
vendored
64
.github/scripts/ci_checks.py
vendored
@@ -1,64 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Optional
|
||||
import tomllib
|
||||
import packaging.version
|
||||
import requests
|
||||
|
||||
sys.path.append(os.getcwd())
|
||||
|
||||
ACCESS_TOKEN: Optional[str] = os.getenv("GH_TOKEN", None)
|
||||
|
||||
GITHUB_REQUEST_CONFIG = {
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Authorization": f"token {ACCESS_TOKEN}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
REPO_OWNER_USERNAME: str = "Magnus167"
|
||||
REPO_NAME: str = "rustframe"
|
||||
REPOSITORY_WEB_LINK: str = f"github.com/{REPO_OWNER_USERNAME}/{REPO_NAME}"
|
||||
|
||||
CARGO_TOML_PATH: str = "Cargo.toml"
|
||||
|
||||
|
||||
def load_cargo_toml() -> Dict[str, Any]:
|
||||
if not os.path.exists(CARGO_TOML_PATH):
|
||||
raise FileNotFoundError(f"{CARGO_TOML_PATH} does not exist.")
|
||||
|
||||
with open(CARGO_TOML_PATH, "rb") as file:
|
||||
return tomllib.load(file)
|
||||
|
||||
def get_latest_crates_io_version() -> str:
|
||||
url = "https://crates.io/api/v1/crates/rustframe"
|
||||
try:
|
||||
response = requests.get(url, headers=GITHUB_REQUEST_CONFIG)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data["crate"]["max_version"]
|
||||
except requests.RequestException as e:
|
||||
raise RuntimeError(f"Failed to fetch latest version from crates.io: {e}")
|
||||
|
||||
|
||||
def get_current_version() -> str:
|
||||
cargo_toml = load_cargo_toml()
|
||||
version = cargo_toml.get("package", {}).get("version", None)
|
||||
if not version:
|
||||
raise ValueError("Version not found in Cargo.toml")
|
||||
return version
|
||||
|
||||
|
||||
def check_version() -> None:
|
||||
latest_version = get_latest_crates_io_version()
|
||||
latest_version_tuple = packaging.version.parse(latest_version)
|
||||
current_version = get_current_version()
|
||||
current_version_tuple = packaging.version.parse(current_version)
|
||||
|
||||
if latest_version_tuple >= current_version_tuple:
|
||||
print(f"Current version {current_version_tuple} is less than or equal to latest version {latest_version_tuple} on crates.io.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Current version: {current_version_tuple}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_version()
|
||||
236
.github/scripts/pr_checks.py
vendored
Normal file
236
.github/scripts/pr_checks.py
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import json
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
import warnings
|
||||
import urllib.parse
|
||||
|
||||
from time import sleep
|
||||
|
||||
sys.path.append(os.getcwd())
|
||||
|
||||
ACCESS_TOKEN: Optional[str] = os.getenv("GH_TOKEN", None)
|
||||
|
||||
REQUEST_CONFIG = {
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Authorization": f"token {ACCESS_TOKEN}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
REPO_OWNER_USERNAME: str = "Magnus167"
|
||||
REPO_NAME: str = "rustframe"
|
||||
REPOSITORY_WEB_LINK: str = f"github.com/{REPO_OWNER_USERNAME}/{REPO_NAME}"
|
||||
|
||||
|
||||
def perform_api_call(
|
||||
target_url: str,
|
||||
call_headers: Optional[dict] = REQUEST_CONFIG,
|
||||
query_parameters: Dict[str, Any] = {},
|
||||
http_method: str = "GET",
|
||||
maximum_attempts: int = 5,
|
||||
) -> Any:
|
||||
assert http_method in ["GET", "DELETE", "POST", "PATCH", "PUT"]
|
||||
|
||||
attempt_count = 0
|
||||
while attempt_count < maximum_attempts:
|
||||
try:
|
||||
if query_parameters:
|
||||
encoded_parameters = urllib.parse.urlencode(query_parameters)
|
||||
target_url = f"{target_url}?{encoded_parameters}"
|
||||
|
||||
http_request_object = urllib.request.Request(target_url, method=http_method)
|
||||
|
||||
if call_headers:
|
||||
for key, value in call_headers.items():
|
||||
http_request_object.add_header(key, value)
|
||||
|
||||
with urllib.request.urlopen(http_request_object) as server_response:
|
||||
if server_response.status == 404:
|
||||
raise Exception(f"404: {target_url} not found.")
|
||||
|
||||
return json.loads(server_response.read().decode())
|
||||
|
||||
except urllib.error.HTTPError as error_details:
|
||||
unrecoverable_codes = [403, 404, 422]
|
||||
if error_details.code in unrecoverable_codes:
|
||||
raise Exception(f"Request failed: {error_details}")
|
||||
|
||||
print(f"Request failed: {error_details}")
|
||||
attempt_count += 1
|
||||
sleep(1)
|
||||
|
||||
except Exception as error_details:
|
||||
print(f"Request failed: {error_details}")
|
||||
attempt_count += 1
|
||||
sleep(1)
|
||||
|
||||
raise Exception("Request failed")
|
||||
|
||||
|
||||
valid_title_prefixes: List[str] = [
|
||||
"Feature:",
|
||||
"Bugfix:",
|
||||
"Documentation:",
|
||||
"CI/CD:",
|
||||
"Misc:",
|
||||
"Suggestion:",
|
||||
]
|
||||
|
||||
|
||||
def validate_title_format(
|
||||
item_title: str,
|
||||
) -> bool:
|
||||
estr = "Skipping PR title validation"
|
||||
for _ in range(5):
|
||||
warnings.warn(estr)
|
||||
print(estr)
|
||||
return True
|
||||
|
||||
is_format_correct: bool = False
|
||||
for prefix_pattern in valid_title_prefixes:
|
||||
cleaned_input: str = item_title.strip()
|
||||
if cleaned_input.startswith(prefix_pattern):
|
||||
is_format_correct = True
|
||||
break
|
||||
|
||||
if not is_format_correct:
|
||||
issue_message: str = (
|
||||
f"PR title '{item_title}' does not match any "
|
||||
f"of the accepted patterns: {valid_title_prefixes}"
|
||||
)
|
||||
raise ValueError(issue_message)
|
||||
|
||||
return is_format_correct
|
||||
|
||||
|
||||
def _locate_segment_indices(
|
||||
content_string: str,
|
||||
search_pattern: str,
|
||||
expect_numeric_segment: bool = False,
|
||||
) -> Tuple[int, int]:
|
||||
numeric_characters: List[str] = list(map(str, range(10))) + ["."]
|
||||
assert bool(content_string)
|
||||
assert bool(search_pattern)
|
||||
assert search_pattern in content_string
|
||||
start_index: int = content_string.find(search_pattern)
|
||||
end_index: int = content_string.find("-", start_index)
|
||||
if end_index == -1 and not expect_numeric_segment:
|
||||
return (start_index, len(content_string))
|
||||
|
||||
if expect_numeric_segment:
|
||||
start_index = start_index + len(search_pattern)
|
||||
for char_index, current_character in enumerate(content_string[start_index:]):
|
||||
if current_character not in numeric_characters:
|
||||
break
|
||||
end_index = start_index + char_index
|
||||
|
||||
return (start_index, end_index)
|
||||
|
||||
|
||||
def _verify_no_merge_flag(
|
||||
content_string: str,
|
||||
) -> bool:
|
||||
assert bool(content_string)
|
||||
return "DO-NOT-MERGE" not in content_string
|
||||
|
||||
|
||||
def _verify_merge_dependency(
|
||||
content_string: str,
|
||||
) -> bool:
|
||||
assert bool(content_string)
|
||||
dependency_marker: str = "MERGE-AFTER-#"
|
||||
|
||||
if dependency_marker not in content_string:
|
||||
return True
|
||||
|
||||
start_index, end_index = _locate_segment_indices(
|
||||
content_string=content_string, pattern=dependency_marker, numeric=True
|
||||
)
|
||||
dependent_item_id: str = content_string[start_index:end_index].strip()
|
||||
try:
|
||||
dependent_item_id = int(dependent_item_id)
|
||||
except ValueError:
|
||||
issue_message: str = f"PR number '{dependent_item_id}' is not an integer."
|
||||
raise ValueError(issue_message)
|
||||
|
||||
dependent_item_data: Dict[str, Any] = fetch_item_details(
|
||||
item_identifier=dependent_item_id
|
||||
)
|
||||
is_dependent_item_closed: bool = dependent_item_data["state"] == "closed"
|
||||
return is_dependent_item_closed
|
||||
|
||||
|
||||
def evaluate_merge_conditions(
|
||||
item_details: Dict[str, Any],
|
||||
) -> bool:
|
||||
item_body_content: str = item_details["body"]
|
||||
|
||||
if item_body_content is None:
|
||||
return True
|
||||
|
||||
item_body_content = item_body_content.strip().replace(" ", "-").upper()
|
||||
item_body_content = f" {item_body_content} "
|
||||
|
||||
condition_outcomes: List[bool] = [
|
||||
_verify_no_merge_flag(content_string=item_body_content),
|
||||
_verify_merge_dependency(content_string=item_body_content),
|
||||
]
|
||||
|
||||
return all(condition_outcomes)
|
||||
|
||||
|
||||
def validate_item_for_merge(
|
||||
item_data: Dict[str, Any],
|
||||
) -> bool:
|
||||
assert set(["number", "title", "state", "body"]).issubset(item_data.keys())
|
||||
accumulated_issues: str = ""
|
||||
if not validate_title_format(item_title=item_data["title"]):
|
||||
accumulated_issues += (
|
||||
f"PR #{item_data['number']} is not mergable due to invalid title.\n"
|
||||
)
|
||||
|
||||
if not evaluate_merge_conditions(item_details=item_data):
|
||||
accumulated_issues += (
|
||||
f"PR #{item_data['number']} is not mergable due to merge restrictions"
|
||||
" specified in the PR body."
|
||||
)
|
||||
|
||||
if accumulated_issues:
|
||||
raise ValueError(accumulated_issues.strip())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def fetch_item_details(
|
||||
item_identifier: int,
|
||||
):
|
||||
api_request_url: str = f"https://api.github.com/repos/{REPO_OWNER_USERNAME}/{REPO_NAME}/pulls/{item_identifier}"
|
||||
|
||||
raw_api_response_data: Dict[str, Any] = perform_api_call(target_url=api_request_url)
|
||||
|
||||
extracted_item_info: Dict[str, Any] = {
|
||||
"number": raw_api_response_data["number"],
|
||||
"title": raw_api_response_data["title"],
|
||||
"state": raw_api_response_data["state"],
|
||||
"body": raw_api_response_data["body"],
|
||||
}
|
||||
|
||||
return extracted_item_info
|
||||
|
||||
|
||||
def process_item_request(requested_item_id: int):
|
||||
extracted_item_info: Dict[str, Any] = fetch_item_details(
|
||||
item_identifier=requested_item_id
|
||||
)
|
||||
if not validate_item_for_merge(item_data=extracted_item_info):
|
||||
raise ValueError("PR is not mergable.")
|
||||
|
||||
print("PR is mergable.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
requested_item_id: int = int(sys.argv[1])
|
||||
process_item_request(requested_item_id=requested_item_id)
|
||||
41
.github/workflows/ci-checks.yml
vendored
41
.github/workflows/ci-checks.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: ci-checks
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
types: [review_requested, ready_for_review, synchronize, opened, reopened]
|
||||
branches:
|
||||
- main
|
||||
- test
|
||||
- develop
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
pages: write
|
||||
|
||||
jobs:
|
||||
ci-checks:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
uv venv
|
||||
uv pip install requests packaging
|
||||
- name: Run CI checks
|
||||
run: |
|
||||
uv run .github/scripts/ci_checks.py
|
||||
9
.github/workflows/run-benchmarks.yml
vendored
9
.github/workflows/run-benchmarks.yml
vendored
@@ -2,12 +2,9 @@ name: run-benchmarks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- test
|
||||
- main
|
||||
|
||||
jobs:
|
||||
pick-runner:
|
||||
@@ -37,9 +34,9 @@ jobs:
|
||||
toolchain: stable
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Setup venv
|
||||
run: |
|
||||
uv venv
|
||||
|
||||
2
.github/workflows/run-unit-tests.yml
vendored
2
.github/workflows/run-unit-tests.yml
vendored
@@ -5,8 +5,6 @@ on:
|
||||
types: [review_requested, ready_for_review, synchronize, opened, reopened]
|
||||
branches:
|
||||
- main
|
||||
- test
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "rustframe"
|
||||
authors = ["Palash Tyagi (https://github.com/Magnus167)"]
|
||||
version = "0.0.1-a.20250805"
|
||||
version = "0.0.1-a.20250804"
|
||||
edition = "2021"
|
||||
license = "GPL-3.0-or-later"
|
||||
readme = "README.md"
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
//! assert_eq!(dates.count().unwrap(), 3);
|
||||
//! ```
|
||||
pub mod dateutils;
|
||||
pub mod spigots;
|
||||
|
||||
pub use dateutils::{BDateFreq, BDatesGenerator, BDatesList};
|
||||
pub use dateutils::{DateFreq, DatesGenerator, DatesList};
|
||||
|
||||
243
src/utils/spigots.rs
Normal file
243
src/utils/spigots.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
/// Iterator producing successive approximations of π using the Nilakantha series.
|
||||
pub struct PiSpigot {
|
||||
k: u64,
|
||||
current: f64,
|
||||
}
|
||||
|
||||
impl Iterator for PiSpigot {
|
||||
type Item = f64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.k == 0 {
|
||||
self.k = 1;
|
||||
self.current = 3.0;
|
||||
return Some(self.current);
|
||||
}
|
||||
let k = self.k as f64;
|
||||
let term = 4.0 / ((2.0 * k) * (2.0 * k + 1.0) * (2.0 * k + 2.0));
|
||||
if self.k % 2 == 1 {
|
||||
self.current += term;
|
||||
} else {
|
||||
self.current -= term;
|
||||
}
|
||||
self.k += 1;
|
||||
Some(self.current)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of π indefinitely.
|
||||
pub fn pi_spigot() -> PiSpigot {
|
||||
PiSpigot { k: 0, current: 0.0 }
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of π as a vector.
|
||||
pub fn pi_values(n: usize) -> Vec<f64> {
|
||||
pi_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of τ = 2π indefinitely.
|
||||
pub fn tau_spigot() -> impl Iterator<Item = f64> {
|
||||
pi_spigot().map(|v| v * 2.0)
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of τ as a vector.
|
||||
pub fn tau_values(n: usize) -> Vec<f64> {
|
||||
tau_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
/// Iterator producing successive approximations of the Euler-Mascheroni constant γ.
|
||||
pub struct GammaSpigot {
|
||||
n: u64,
|
||||
harmonic: f64,
|
||||
}
|
||||
|
||||
impl Iterator for GammaSpigot {
|
||||
type Item = f64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.n += 1;
|
||||
self.harmonic += 1.0 / self.n as f64;
|
||||
let value = self.harmonic - (self.n as f64).ln();
|
||||
Some(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of γ indefinitely.
|
||||
pub fn gamma_spigot() -> GammaSpigot {
|
||||
GammaSpigot {
|
||||
n: 0,
|
||||
harmonic: 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of γ as a vector.
|
||||
pub fn gamma_values(n: usize) -> Vec<f64> {
|
||||
gamma_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
/// Iterator producing successive approximations of e using the series Σ 1/n!.
|
||||
pub struct ESpigot {
|
||||
n: u64,
|
||||
sum: f64,
|
||||
factorial: f64,
|
||||
}
|
||||
|
||||
impl Iterator for ESpigot {
|
||||
type Item = f64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.n == 0 {
|
||||
self.n = 1;
|
||||
self.sum = 1.0;
|
||||
self.factorial = 1.0;
|
||||
return Some(self.sum);
|
||||
}
|
||||
self.factorial *= self.n as f64;
|
||||
self.sum += 1.0 / self.factorial;
|
||||
self.n += 1;
|
||||
Some(self.sum)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of e indefinitely.
|
||||
pub fn e_spigot() -> ESpigot {
|
||||
ESpigot {
|
||||
n: 0,
|
||||
sum: 0.0,
|
||||
factorial: 1.0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of e as a vector.
|
||||
pub fn e_values(n: usize) -> Vec<f64> {
|
||||
e_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
/// Iterator producing successive approximations of √2 using Newton's method.
|
||||
pub struct Sqrt2Spigot {
|
||||
x: f64,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl Iterator for Sqrt2Spigot {
|
||||
type Item = f64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.first {
|
||||
self.first = false;
|
||||
Some(self.x)
|
||||
} else {
|
||||
self.x = 0.5 * (self.x + 2.0 / self.x);
|
||||
Some(self.x)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of √2 indefinitely.
|
||||
pub fn sqrt2_spigot() -> Sqrt2Spigot {
|
||||
Sqrt2Spigot {
|
||||
x: 1.0,
|
||||
first: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of √2 as a vector.
|
||||
pub fn sqrt2_values(n: usize) -> Vec<f64> {
|
||||
sqrt2_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
fn look_and_say(s: &str) -> String {
|
||||
let mut chars = s.chars().peekable();
|
||||
let mut result = String::new();
|
||||
while let Some(c) = chars.next() {
|
||||
let mut count = 1;
|
||||
while let Some(&next) = chars.peek() {
|
||||
if next == c {
|
||||
chars.next();
|
||||
count += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
result.push_str(&format!("{}{}", count, c));
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Iterator producing successive ratios of lengths of the look-and-say sequence.
|
||||
pub struct ConwaySpigot {
|
||||
current: String,
|
||||
}
|
||||
|
||||
impl Iterator for ConwaySpigot {
|
||||
type Item = f64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let next = look_and_say(&self.current);
|
||||
let ratio = next.len() as f64 / self.current.len() as f64;
|
||||
self.current = next;
|
||||
Some(ratio)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generator yielding approximations of Conway's constant λ indefinitely.
|
||||
pub fn conway_spigot() -> ConwaySpigot {
|
||||
ConwaySpigot {
|
||||
current: "1".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first `n` approximations of Conway's constant as a vector.
|
||||
pub fn conway_values(n: usize) -> Vec<f64> {
|
||||
conway_spigot().take(n).collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::f64::consts::{E, PI, TAU};
|
||||
|
||||
#[test]
|
||||
fn test_pi_spigot() {
|
||||
let vals = pi_values(1000);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
assert!((approx - PI).abs() < 1e-8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tau_spigot() {
|
||||
let vals = tau_values(1000);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
assert!((approx - TAU).abs() < 1e-8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_gamma_spigot() {
|
||||
let vals = gamma_values(100000);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
let gamma_true = 0.5772156649015329_f64;
|
||||
assert!((approx - gamma_true).abs() < 1e-5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_e_spigot() {
|
||||
let vals = e_values(10);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
assert!((approx - E).abs() < 1e-6);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sqrt2_spigot() {
|
||||
let vals = sqrt2_values(6);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
assert!((approx - 2_f64.sqrt()).abs() < 1e-12);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conway_spigot() {
|
||||
let vals = conway_values(25);
|
||||
let approx = vals.last().cloned().unwrap();
|
||||
let conway = 1.3035772690342964_f64;
|
||||
assert!((approx - conway).abs() < 1e-2);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user