Compare commits

...

15 Commits

Author SHA1 Message Date
Palash Tyagi
1275e7c2c9 refactor: rename apply_blacklist_lazy to apply_blacklist and update documentation 2025-04-21 01:35:40 +01:00
Palash Tyagi
0e4d58a9d8 fix: update .gitattributes to correctly mark all notebook files as linguist-vendored 2025-04-20 05:17:19 +01:00
Palash Tyagi
d16764944b chore: add .gitattributes to mark notebooks as linguist-vendored 2025-04-20 02:53:12 +01:00
Palash Tyagi
80603aa951 Implement code changes to enhance functionality and improve performance 2025-04-17 00:02:04 +01:00
Palash Tyagi
fb2efa99ac wip: apply blacklist 2025-04-17 00:01:58 +01:00
Palash Tyagi
178de83d1a testing... 2025-04-16 22:36:03 +01:00
Palash Tyagi
ee531deb7a update create_blacklist_from_qdf function signature to accept metrics as a vector 2025-04-16 19:27:46 +01:00
Palash Tyagi
0443906cc9 feat: refactor create_blacklist_from_qdf to accept multiple metrics and improve null handling 2025-04-16 19:27:35 +01:00
Palash Tyagi
f0a9242d10 rerean notebook 2025-04-16 01:37:52 +01:00
Palash Tyagi
df22667d63 updated notebook 2025-04-16 01:37:24 +01:00
Palash Tyagi
7d4c198067 feat: update create_blacklist_from_qdf function signature to include group_by_cid and blacklist_name parameters 2025-04-16 01:36:56 +01:00
Palash Tyagi
25192e425d completed wdf.columns implementation 2025-04-16 01:36:48 +01:00
Palash Tyagi
e602b8b2b4 fix: change code block syntax to ignore in documentation examples 2025-04-16 01:35:59 +01:00
Palash Tyagi
5d2ff3b88d fix: improve error messages in check_quantamental_dataframe function 2025-04-16 01:35:48 +01:00
Palash Tyagi
3d2afa01a8 feat: add build and install scripts for maturin integration 2025-04-15 21:28:09 +01:00
8 changed files with 416 additions and 2318 deletions

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
notebooks/** linguist-vendored

File diff suppressed because one or more lines are too long

33
scripts/unix/build.sh Normal file
View File

@@ -0,0 +1,33 @@
#!/bin/bash
# Exit immediately if a command exits with a non-zero status
set -e
# Run "maturin --help". If it fails, print an error message and exit.
if ! maturin --help > /dev/null 2>&1; then
echo "Failed to run maturin --help" >&2
exit 1
fi
# Delete any existing build directory and create a new one.
rm -rf ./build
mkdir -p ./build
# Copy ./src/msyrs.pyi to ./msyrs.pyi.
cp ./src/msyrs.pyi ./msyrs.pyi
# Build using maturin.
maturin build --release --sdist --out ./build/
# Get the first wheel file found in the build directory.
whl_file=$(ls ./build/*.whl 2>/dev/null | head -n 1)
if [ -z "$whl_file" ]; then
echo "No wheel file found in ./build" >&2
exit 1
fi
# Rename the wheel file from .whl to .zip.
base_name="${whl_file%.whl}"
mv "$whl_file" "${base_name}.zip"
# Delete the temporary .pyi file.
rm ./msyrs.pyi

20
scripts/unix/install.sh Normal file
View File

@@ -0,0 +1,20 @@
#!/bin/bash
set -e
# Ensure maturin is installed. For example, you can install it via:
# pip install maturin
# Run "maturin --help". If it fails, print an error message and exit.
if ! maturin --help > /dev/null 2>&1; then
echo "Failed to run maturin --help" >&2
exit 1
fi
# Copy ./src/msyrs.pyi to the current directory as msyrs.pyi
cp ./src/msyrs.pyi ./msyrs.pyi
# Run maturin develop in release mode.
maturin develop --release
# Delete the temporary msyrs.pyi file.
rm ./msyrs.pyi

View File

@@ -1,4 +1,3 @@
use pyo3::types::IntoPyDict;
use pyo3::{prelude::*, types::PyDict};
use pyo3_polars::{PyDataFrame, PySeries};
@@ -37,9 +36,19 @@ pub fn get_bdates_series_default_opt(
}
#[allow(deprecated)]
#[pyfunction(signature = (df, metric = None))]
pub fn create_blacklist_from_qdf(df: PyDataFrame, metric: Option<String>) -> PyResult<PyObject> {
let result = crate::utils::qdf::blacklist::create_blacklist_from_qdf(&df.into(), metric)
#[pyfunction(signature = (df, group_by_cid=None, blacklist_name=None, metrics=None))]
pub fn create_blacklist_from_qdf(
df: PyDataFrame,
group_by_cid: Option<bool>,
blacklist_name: Option<String>,
metrics: Option<Vec<String>>,
) -> PyResult<PyObject> {
let result = crate::utils::qdf::blacklist::create_blacklist_from_qdf(
&df.into(),
group_by_cid,
blacklist_name,
metrics,
)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?;
Python::with_gil(|py| {
let dict = PyDict::new(py);

View File

@@ -58,7 +58,7 @@ fn all_jpmaq_expressions(expressions: Vec<String>) -> bool {
///
/// Example Usage:
///
/// ```rust
/// ```ignore
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
///
@@ -102,7 +102,7 @@ impl Default for JPMaQSDownloadGetIndicatorArgs {
/// Struct for downloading data from the JPMaQS data from JPMorgan DataQuery API.
///
/// ## Example Usage
/// ```rust
/// ```ignore
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// use polars::prelude::*;
@@ -277,7 +277,7 @@ impl JPMaQSDownload {
///
/// Usage:
///
/// ```rust
/// ```ignore
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// let mut jpamqs_download = JPMaQSDownload::default();

View File

@@ -1,24 +1,81 @@
use crate::utils::bdates::{self, get_bdates_list_with_freq, BDateFreq};
use crate::utils::dateutils::{get_bdates_series_default_opt, get_min_max_real_dates};
use crate::utils::bdates::{get_bdates_list_with_freq, BDateFreq};
use crate::utils::dateutils::get_min_max_real_dates;
use crate::utils::misc::get_cid;
use crate::utils::qdf::core::check_quantamental_dataframe;
use chrono::NaiveDate;
use polars::prelude::*;
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};
use std::error::Error;
use crate::utils::qdf::get_unique_metrics;
// struct Blacklist which is a wrapper around hashmap and btreemap
#[derive(Debug, Clone)]
pub struct Blacklist {
pub blacklist: BTreeMap<String, (String, String)>,
}
// impl hashmap into
impl Blacklist {
pub fn into_hashmap(self) -> HashMap<String, (String, String)> {
self.blacklist.into_iter().collect()
}
}
/// Apply a blacklist to a Quantamental DataFrame.
///
/// * `blacklist` is a map from any “tickerlike” key to a tuple of
/// `(start_date, end_date)` in **inclusive** `"YYYYMMDD"` format.
/// * `metrics` if `None`, every metric from `get_unique_metrics(df)`
/// is used.
/// * `group_by_cid = Some(false)` is not implemented yet.
pub fn apply_blacklist(
df: &mut DataFrame,
blacklist: &BTreeMap<String, (String, String)>,
metrics: Option<Vec<String>>,
group_by_cid: Option<bool>,
) -> Result<DataFrame, Box<dyn std::error::Error>> {
check_quantamental_dataframe(df)?;
// dataframe is like:
// | cid | xcat | real_date | metric1 | metric2 |
// |-----|------|-----------|---------|---------|
// | A | B | 2023-01-01| 1.0 | 2.0 |
// | A | B | 2023-01-02| 1.0 | 2.0 |
// | A | C | 2023-01-01| 1.0 | 2.0 |
// | A | C | 2023-01-02| 1.0 | 2.0 |
// | D | E | 2023-01-01| 1.0 | 2.0 |
// | D | E | 2023-01-02| 1.0 | 2.0 |
// (real date column is Naive date)
// blacklist is like:
// {'A_B_1': ('2023-01-02', '2023-01-03'),
// 'A_B_2': ('2023-01-04', '2023-01-05'),
// 'A_C_1': ('2023-01-02', '2023-01-03'), }
// get_cid('A_B_1') = 'A'
// get_cid('A_B_2') = 'A'
// get_cid('D_E_1') = 'D'
Ok(df.clone())
}
/// Create a blacklist from a Quantamental DataFrame.
/// The blacklist is a mapping of tickers to date ranges where the specified metrics are null or NaN.
/// # Arguments:
/// * `df` - The Quantamental DataFrame.
/// * `group_by_cid` - If true, group the blacklist by `cid`. Defaults to true.
/// * `blacklist_name` - The name of the blacklist. Defaults to "BLACKLIST".
/// * `metrics` - The metrics to check for null or NaN values. If None, all metrics are used.
pub fn create_blacklist_from_qdf(
df: &DataFrame,
metric: Option<String>,
// ) -> Result<HashMap<String, (String, String)>, Box<dyn Error>> {
) -> Result<HashMap<String, Vec<String>>, Box<dyn Error>> {
// Verify that the DataFrame follows the Quantamental structure.
group_by_cid: Option<bool>,
blacklist_name: Option<String>,
metrics: Option<Vec<String>>,
) -> Result<BTreeMap<String, (String, String)>, Box<dyn Error>> {
check_quantamental_dataframe(df)?;
let mut blacklist: HashMap<String, Vec<String>> = HashMap::new();
// let mut blacklist: HashMap<String, (String, String)> = HashMap::new();
let mut blk: HashMap<String, Vec<NaiveDate>> = HashMap::new();
// Use the provided metric or default to "value".
let metric = metric.unwrap_or_else(|| "value".into());
let metrics = metrics.unwrap_or_else(|| get_unique_metrics(df).unwrap());
let blacklist_name = blacklist_name.unwrap_or_else(|| "BLACKLIST".into());
let group_by_cid = group_by_cid.unwrap_or(true);
let (min_date, max_date) = get_min_max_real_dates(df, "real_date".into())?;
let min_date_str = min_date.format("%Y-%m-%d").to_string();
@@ -30,19 +87,26 @@ pub fn create_blacklist_from_qdf(
BDateFreq::Daily,
)?;
// filter df
let null_mask = df.column(metric.as_str())?.is_null();
let nan_mask = df.column(metric.as_str())?.is_nan()?;
let null_mask = null_mask | nan_mask;
let df = df.filter(&null_mask)?;
// if none of the metrics are null or NaN, return an empty blacklist
if !metrics.iter().any(|metric| {
df.column(metric)
.map(|col| col.is_null().any())
.unwrap_or(false)
}) {
return Ok(BTreeMap::new());
}
// let null_mask = get_nan_mask(df, metrics)?;
// let df = df.filter(&null_mask)?.clone();
let df = df
.clone()
.lazy()
.filter(
col(metric.as_str())
.is_null()
.or(col(metric.as_str()).is_nan()),
)
.with_columns([
(cols(metrics.clone()).is_null().or(cols(metrics).is_nan())).alias("null_mask")
])
.filter(col("null_mask"))
// if is now empty, return an empty blacklist
.sort(
["cid", "xcat"],
SortMultipleOptions::default().with_maintain_order(true),
@@ -70,27 +134,139 @@ pub fn create_blacklist_from_qdf(
let rdt = get_vec_of_vec_of_dates_from_df(df)?;
// assert!(0 == 1, "{:?}", rdt);
let mut blk: HashMap<String, Vec<String>> = HashMap::new();
for (tkr, dates) in ticker_vec.iter().zip(rdt.iter()) {
blacklist.insert(tkr.to_string(), dates.clone());
if group_by_cid {
let _cid = get_cid(tkr.clone())?;
if blk.contains_key(&_cid) {
blk.get_mut(&_cid).unwrap().extend(dates.iter().cloned());
} else {
blk.insert(_cid, dates.clone());
}
} else {
blk.insert(tkr.to_string(), dates.clone());
}
}
for (_key, vals) in blk.iter_mut() {
// order is important - dedup depends on the vec being sorted
vals.sort();
vals.dedup();
}
Ok(blacklist)
}
fn convert_dates_list_to_date_ranges(
blacklist: HashMap<String, Vec<String>>,
all_bdates: Vec<NaiveDate>,
) -> HashMap<String, (String, String)> {
let blk = HashMap::new();
let bdates = all_bdates
let all_bdates_strs = all_bdates
.iter()
.map(|date| date.format("%Y-%m-%d").to_string())
.collect::<Vec<String>>();
//
let mut blacklist: HashMap<String, (String, String)> = HashMap::new();
for (tkr, dates) in blk.iter() {
let date_ranges = convert_dates_list_to_date_ranges(dates.clone(), all_bdates_strs.clone());
for (rng_idx, (start_date, end_date)) in date_ranges.iter() {
let range_key = format!("{}_{}_{}", tkr, blacklist_name.clone(), rng_idx);
blacklist.insert(range_key, (start_date.clone(), end_date.clone()));
}
}
// Ok(blacklist)
blk
let mut btree_map: BTreeMap<String, (String, String)> = BTreeMap::new();
for (key, (start_date, end_date)) in blacklist.iter() {
btree_map.insert(key.clone(), (start_date.clone(), end_date.clone()));
}
Ok(btree_map)
}
/// Get a mask of NaN values for the specified metrics in the DataFrame.
#[allow(dead_code)]
fn get_nan_mask(
df: &DataFrame,
metrics: Vec<String>,
) -> Result<ChunkedArray<BooleanType>, Box<dyn Error>> {
let null_masks: Vec<ChunkedArray<BooleanType>> = metrics
.iter()
.map(|metric| {
let null_mask = df.column(metric.as_str())?.is_null();
let nan_mask = df.column(metric.as_str())?.is_nan()?;
Ok(null_mask | nan_mask)
})
.collect::<Result<_, Box<dyn Error>>>()?;
let null_mask = null_masks
.into_iter()
.reduce(|acc, mask| acc | mask)
.unwrap_or_else(|| BooleanChunked::full_null("null_mask".into(), df.height()));
Ok(null_mask)
}
fn convert_dates_list_to_date_ranges(
blacklist: Vec<String>,
all_bdates_strs: Vec<String>,
) -> HashMap<String, (String, String)> {
// Step 1: Map every date in all_bdates_strs to its index
let mut all_map: HashMap<String, usize> = HashMap::new();
for (i, d) in all_bdates_strs.iter().enumerate() {
all_map.insert(d.clone(), i);
}
// Step 2: Convert each blacklisted date into its index, if it exists
let mut blacklisted_indices: Vec<usize> = Vec::new();
for dt in blacklist {
if let Some(&idx) = all_map.get(&dt) {
blacklisted_indices.push(idx);
}
}
// Step 3: Sort the blacklisted indices
blacklisted_indices.sort_unstable();
// Step 4: Traverse and group consecutive indices into ranges
let mut result: HashMap<i64, (String, String)> = HashMap::new();
let mut string_result: HashMap<String, (String, String)> = HashMap::new();
if blacklisted_indices.is_empty() {
return string_result;
}
let mut range_idx: i64 = 0;
let mut start_idx = blacklisted_indices[0];
let mut end_idx = start_idx;
for &cur_idx in blacklisted_indices.iter().skip(1) {
if cur_idx == end_idx + 1 {
// We are still in a contiguous run
end_idx = cur_idx;
} else {
// We hit a break in contiguity, so store the last range
result.insert(
range_idx,
(
all_bdates_strs[start_idx].clone(),
all_bdates_strs[end_idx].clone(),
),
);
range_idx += 1;
// Start a new range
start_idx = cur_idx;
end_idx = cur_idx;
}
}
// Don't forget to store the final range after the loop
result.insert(
range_idx,
(
all_bdates_strs[start_idx].clone(),
all_bdates_strs[end_idx].clone(),
),
);
let max_digits = result.keys().max().unwrap_or(&-1).to_string().len();
for (key, (start_date, end_date)) in result.iter() {
let new_key = format!("{:0width$}", key, width = max_digits);
string_result.insert(new_key, (start_date.clone(), end_date.clone()));
}
string_result
}
fn get_vec_of_vec_of_dates_from_df(df: DataFrame) -> Result<Vec<Vec<String>>, Box<dyn Error>> {
@@ -115,6 +291,7 @@ fn get_vec_of_vec_of_dates_from_df(df: DataFrame) -> Result<Vec<Vec<String>>, Bo
Ok(rdt)
}
#[allow(dead_code)]
fn get_vec_of_vec_of_naivedates_from_df(
df: DataFrame,
) -> Result<Vec<Vec<NaiveDate>>, Box<dyn Error>> {
@@ -159,3 +336,38 @@ fn get_vec_of_vec_of_naivedates_from_df(
// .collect::<Vec<Vec<String>>>();
// Ok(rdt)
// }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_convert_dates_list_to_date_ranges() {
let all_dates = vec![
"2023-01-01".to_string(),
"2023-01-02".to_string(),
"2023-01-03".to_string(),
"2023-01-04".to_string(),
"2023-01-05".to_string(),
"2023-01-06".to_string(),
];
let blacklist = vec![
"2023-01-02".to_string(),
"2023-01-03".to_string(),
"2023-01-05".to_string(),
];
let result = convert_dates_list_to_date_ranges(blacklist, all_dates);
// Expect two ranges:
// range 0 => ("2023-01-02", "2023-01-03")
// range 1 => ("2023-01-05", "2023-01-05")
assert_eq!(
result["0"],
("2023-01-02".to_string(), "2023-01-03".to_string())
);
assert_eq!(
result["1"],
("2023-01-05".to_string(), "2023-01-05".to_string())
);
}
}

View File

@@ -17,14 +17,15 @@ use std::error::Error;
pub fn check_quantamental_dataframe(df: &DataFrame) -> Result<(), Box<dyn Error>> {
let expected_cols = ["real_date", "cid", "xcat"];
let expected_dtype = [DataType::Date, DataType::String, DataType::String];
let err = "Quantamental DataFrame must have at least 4 columns: 'real_date', 'cid', 'xcat' and one or more metrics.";
for (col, dtype) in expected_cols.iter().zip(expected_dtype.iter()) {
let col = df.column(col);
if col.is_err() {
return Err(format!("Column {:?} not found", col).into());
return Err(format!("{} Column {:?} not found", err, col).into());
}
let col = col?;
if col.dtype() != dtype {
return Err(format!("Column {:?} has wrong dtype", col).into());
return Err(format!("{} Column {:?} has wrong dtype", err, col).into());
}
}
Ok(())