Compare commits

...

5 Commits

5 changed files with 404 additions and 68 deletions

View File

@ -0,0 +1,360 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# ! uv pip install E:\\Work\\ruzt\\msyrs --upgrade"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Import Python packages\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import macrosynergy\n",
"import pandas as pd\n",
"import numpy as np\n",
"import polars as pl\n",
"import os\n",
"import time\n",
"\n",
"from macrosynergy.panel import view_timelines\n",
"from macrosynergy.management.types import QuantamentalDataFrame\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Import Python bindings - `msyrs`\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"import msyrs"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>bdates</th>\n",
" <th>0</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>2000-01-03</td>\n",
" <td>2000-01-03</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2000-01-10</td>\n",
" <td>2000-01-10</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>2000-01-17</td>\n",
" <td>2000-01-17</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>2000-01-24</td>\n",
" <td>2000-01-24</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>2000-01-31</td>\n",
" <td>2000-01-31</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1056</th>\n",
" <td>2020-03-30</td>\n",
" <td>2020-03-30</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1057</th>\n",
" <td>2020-04-06</td>\n",
" <td>2020-04-06</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1058</th>\n",
" <td>2020-04-13</td>\n",
" <td>2020-04-13</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1059</th>\n",
" <td>2020-04-20</td>\n",
" <td>2020-04-20</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1060</th>\n",
" <td>2020-04-27</td>\n",
" <td>2020-04-27</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>1061 rows × 2 columns</p>\n",
"</div>"
],
"text/plain": [
" bdates 0\n",
"0 2000-01-03 2000-01-03\n",
"1 2000-01-10 2000-01-10\n",
"2 2000-01-17 2000-01-17\n",
"3 2000-01-24 2000-01-24\n",
"4 2000-01-31 2000-01-31\n",
"... ... ...\n",
"1056 2020-03-30 2020-03-30\n",
"1057 2020-04-06 2020-04-06\n",
"1058 2020-04-13 2020-04-13\n",
"1059 2020-04-20 2020-04-20\n",
"1060 2020-04-27 2020-04-27\n",
"\n",
"[1061 rows x 2 columns]"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='W').to_pandas()\n",
"y = pd.Series(pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='W-MON'))\n",
"\n",
"pd.concat([x, y], axis=1)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Results for M\t & \tBMS\t are exactly the same\n",
"Results for Q\t & \tBQS\t are exactly the same\n",
"Results for W\t & \tW-MON\t are exactly the same\n",
"Results for WF\t & \tW-FRI\t are exactly the same\n"
]
}
],
"source": [
"for rs_freq, pd_freq in [('M', 'BMS'), ('Q', 'BQS'), ('W', 'W-MON'), ('WF', 'W-FRI')]:\n",
"\n",
"\n",
" x = msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq=rs_freq).to_pandas()\n",
" y = pd.Series(pd.bdate_range(start='2000-01-01', end='2020-05-01', freq=pd_freq))\n",
"\n",
" e = x == y\n",
" res = e.all()\n",
" non_matching_df = pd.concat([x[~e], y[~e]], axis=1)\n",
" assert res, f\"Results for {rs_freq}\\t and \\t{pd_freq}\\t are not the same\\n{non_matching_df}\"\n",
" print(f\"Results for {rs_freq}\\t & \\t{pd_freq}\\t are exactly the same\")\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"23.5 μs ± 1.02 μs per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n",
"67.4 μs ± 979 ns per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n",
"1.97 ms ± 57.3 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"4.65 ms ± 170 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"28.3 ms ± 898 μs per loop (mean ± std. dev. of 7 runs, 10 loops each)\n",
"93.8 ms ± 2.02 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n"
]
}
],
"source": [
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='D')\n",
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='1971-01-01', end_date='2040-05-01', freq='D')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='2000-01-01', end_date='2020-05-01', freq='D')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='1971-01-01', end_date='2040-05-01', freq='D')\n",
"%timeit pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='B')\n",
"%timeit pd.bdate_range(start='1971-01-01', end='2040-05-01', freq='B')"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"7.95 μs ± 146 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"17.9 μs ± 108 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"1.73 ms ± 20.8 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"4 ms ± 69.3 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"5.69 ms ± 139 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"19.1 ms ± 268 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n"
]
}
],
"source": [
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='WF')\n",
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='1971-01-01', end_date='2040-05-01', freq='WF')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='2000-01-01', end_date='2020-05-01', freq='WF')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='1971-01-01', end_date='2040-05-01', freq='WF')\n",
"%timeit pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='W-FRI')\n",
"%timeit pd.bdate_range(start='1971-01-01', end='2040-05-01', freq='W-FRI')"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"6.9 μs ± 126 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"13.1 μs ± 93.3 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"1.73 ms ± 29.3 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"4.2 ms ± 81.5 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"931 μs ± 14.2 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"3.05 ms ± 47.5 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n"
]
}
],
"source": [
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='ME')\n",
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='1971-01-01', end_date='2040-05-01', freq='ME')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='2000-01-01', end_date='2020-05-01', freq='ME')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='1971-01-01', end_date='2040-05-01', freq='ME')\n",
"%timeit pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='BME')\n",
"%timeit pd.bdate_range(start='1971-01-01', end='2040-05-01', freq='BME')"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3.65 μs ± 69.1 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"4.78 μs ± 38.7 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"1.73 ms ± 122 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"4.16 ms ± 286 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"340 μs ± 11.3 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"1.1 ms ± 11.5 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n"
]
}
],
"source": [
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='Q')\n",
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='1971-01-01', end_date='2040-05-01', freq='Q')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='2000-01-01', end_date='2020-05-01', freq='Q')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='1971-01-01', end_date='2040-05-01', freq='Q')\n",
"%timeit pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='BQS')\n",
"%timeit pd.bdate_range(start='1971-01-01', end='2040-05-01', freq='BQS')"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3.21 μs ± 83.4 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"3.66 μs ± 198 ns per loop (mean ± std. dev. of 7 runs, 100,000 loops each)\n",
"2.67 ms ± 459 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"3.71 ms ± 143 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"98.7 μs ± 1.47 μs per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n",
"289 μs ± 15.3 μs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n"
]
}
],
"source": [
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='2000-01-01', end_date='2020-05-01', freq='YE')\n",
"%timeit msyrs.utils.get_bdates_series_default_opt(start_date='1971-01-01', end_date='2040-05-01', freq='YE')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='2000-01-01', end_date='2020-05-01', freq='YE')\n",
"%timeit msyrs.utils.get_bdates_series_default_pl(start_date='1971-01-01', end_date='2040-05-01', freq='YE')\n",
"%timeit pd.bdate_range(start='2000-01-01', end='2020-05-01', freq='BYE')\n",
"%timeit pd.bdate_range(start='1971-01-01', end='2040-05-01', freq='BYE')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -5,18 +5,31 @@ use pyo3_polars::{PyDataFrame, PySeries};
#[allow(deprecated)] #[allow(deprecated)]
#[pymodule] #[pymodule]
pub fn utils(_py: Python, m: &PyModule) -> PyResult<()> { pub fn utils(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(get_bdates_series_default, m)?)?; m.add_function(wrap_pyfunction!(get_bdates_series_default_pl, m)?)?;
m.add_function(wrap_pyfunction!(get_bdates_series_default_opt, m)?)?;
Ok(()) Ok(())
} }
#[pyfunction] #[pyfunction]
pub fn get_bdates_series_default( pub fn get_bdates_series_default_pl(
start_date: String, start_date: String,
end_date: String, end_date: String,
freq: Option<String>, freq: Option<String>,
) -> PyResult<PySeries> { ) -> PyResult<PySeries> {
Ok(PySeries( Ok(PySeries(
crate::utils::dateutils::get_bdates_series_default(start_date, end_date, freq) crate::utils::dateutils::get_bdates_series_default_pl(start_date, end_date, freq)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?,
))
}
#[pyfunction]
pub fn get_bdates_series_default_opt(
start_date: String,
end_date: String,
freq: Option<String>,
) -> PyResult<PySeries> {
Ok(PySeries(
crate::utils::dateutils::get_bdates_series_default_opt(start_date, end_date, freq)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?, .map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?,
)) ))
} }

View File

@ -51,6 +51,8 @@ class panel:
def linear_composite(*args, **kwargs) -> DataFrame: ... def linear_composite(*args, **kwargs) -> DataFrame: ...
class utils: class utils:
__all__ = ["get_bdates_series_default"] __all__ = ["get_bdates_series_default", "get_bdates_series_default_opt"]
@staticmethod @staticmethod
def get_bdates_series_default(*args, **kwargs) -> Series: ... def get_bdates_series_default_pl(*args, **kwargs) -> Series: ...
@staticmethod
def get_bdates_series_default_opt(*args, **kwargs) -> Series: ...

View File

@ -1,3 +1,5 @@
use crate::utils::bdates;
use crate::utils::bdates::BDateFreq;
use chrono::NaiveDate; use chrono::NaiveDate;
use chrono::{Datelike, Weekday}; use chrono::{Datelike, Weekday};
use polars::prelude::*; use polars::prelude::*;
@ -57,61 +59,6 @@ pub fn get_bdates_list(
Ok(business_days) Ok(business_days)
} }
#[derive(Debug, Clone, Copy)]
pub enum BDateFreq {
Daily,
WeeklyMonday,
MonthStart,
QuarterStart,
YearStart,
MonthEnd,
QuarterEnd,
WeeklyFriday,
YearEnd,
}
impl BDateFreq {
pub fn from_string(freq: String) -> Result<Self, Box<dyn Error>> {
// use `from_str` to convert the string to a BDateFreq enum
Self::from_str(&freq)
}
pub fn from_str(freq: &str) -> Result<Self, Box<dyn Error>> {
match freq {
"D" => Ok(BDateFreq::Daily),
"W" => Ok(BDateFreq::WeeklyMonday),
"M" => Ok(BDateFreq::MonthStart),
"Q" => Ok(BDateFreq::QuarterStart),
"A" => Ok(BDateFreq::YearStart),
"ME" => Ok(BDateFreq::MonthEnd),
"QE" => Ok(BDateFreq::QuarterEnd),
"WF" => Ok(BDateFreq::WeeklyFriday),
"YE" => Ok(BDateFreq::YearEnd),
_ => Err("Invalid frequency specified".into()),
}
}
pub fn agg_type(&self) -> AggregationType {
match self {
BDateFreq::Daily
| BDateFreq::WeeklyMonday
| BDateFreq::MonthStart
| BDateFreq::QuarterStart
| BDateFreq::YearStart => AggregationType::Start,
BDateFreq::WeeklyFriday
| BDateFreq::MonthEnd
| BDateFreq::QuarterEnd
| BDateFreq::YearEnd => AggregationType::End,
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum AggregationType {
Start, // Indicates picking the first date in a group.
End, // Indicates picking the last date in a group.
}
// Map a BDateFreq to an AggregationType.
fn compute_group_key(d: NaiveDate, freq: BDateFreq) -> String { fn compute_group_key(d: NaiveDate, freq: BDateFreq) -> String {
match freq { match freq {
// For Daily, each date is its own group. // For Daily, each date is its own group.
@ -134,19 +81,32 @@ fn compute_group_key(d: NaiveDate, freq: BDateFreq) -> String {
BDateFreq::YearStart | BDateFreq::YearEnd => format!("{}", d.year()), BDateFreq::YearStart | BDateFreq::YearEnd => format!("{}", d.year()),
} }
} }
pub fn get_bdates_series_default_opt(
pub fn get_bdates_series_default(
start_date: String, start_date: String,
end_date: String, end_date: String,
freq: Option<String>, freq: Option<String>,
) -> Result<Series, Box<dyn Error>> { ) -> Result<Series, Box<dyn Error>> {
let freq = freq.unwrap_or_else(|| "D".to_string()); let freq = freq.unwrap_or_else(|| "D".to_string());
let freq = BDateFreq::from_str(&freq)?; let freq = BDateFreq::from_str(&freq)?;
get_bdates_series(start_date, end_date, freq) let series = Series::new(
"bdates".into(),
bdates::get_bdates_list_with_freq(&start_date, &end_date, freq)?,
);
Ok(series)
}
pub fn get_bdates_series_default_pl(
start_date: String,
end_date: String,
freq: Option<String>,
) -> Result<Series, Box<dyn Error>> {
let freq = freq.unwrap_or_else(|| "D".to_string());
let freq = BDateFreq::from_str(&freq)?;
get_bdates_series_pl(start_date, end_date, freq)
} }
/// Get the business dates between two dates as a Series. /// Get the business dates between two dates as a Series.
pub fn get_bdates_series( pub fn get_bdates_series_pl(
start_date: String, start_date: String,
end_date: String, end_date: String,
freq: BDateFreq, freq: BDateFreq,
@ -163,8 +123,8 @@ pub fn get_bdates_series(
])?; ])?;
let gb = df.lazy().group_by(["group"]); let gb = df.lazy().group_by(["group"]);
let aggx = match freq.agg_type() { let aggx = match freq.agg_type() {
AggregationType::Start => gb.agg([col("bdates").first()]), bdates::AggregationType::Start => gb.agg([col("bdates").first()]),
AggregationType::End => gb.agg([col("bdates").last()]), bdates::AggregationType::End => gb.agg([col("bdates").last()]),
}; };
let result = aggx.collect()?; let result = aggx.collect()?;
let result = result let result = result

View File

@ -1,3 +1,4 @@
pub mod qdf; pub mod bdates;
pub mod dateutils;
pub mod misc; pub mod misc;
pub mod dateutils; pub mod qdf;