Compare commits

..

No commits in common. "f0a9242d10360599d6ff2a1cae0889a7390e934a" and "ed9d5d01a2a0e8dae200fde6b92bcc3d09726be8" have entirely different histories.

7 changed files with 2302 additions and 343 deletions

File diff suppressed because one or more lines are too long

View File

@ -1,33 +0,0 @@
#!/bin/bash
# Exit immediately if a command exits with a non-zero status
set -e
# Run "maturin --help". If it fails, print an error message and exit.
if ! maturin --help > /dev/null 2>&1; then
echo "Failed to run maturin --help" >&2
exit 1
fi
# Delete any existing build directory and create a new one.
rm -rf ./build
mkdir -p ./build
# Copy ./src/msyrs.pyi to ./msyrs.pyi.
cp ./src/msyrs.pyi ./msyrs.pyi
# Build using maturin.
maturin build --release --sdist --out ./build/
# Get the first wheel file found in the build directory.
whl_file=$(ls ./build/*.whl 2>/dev/null | head -n 1)
if [ -z "$whl_file" ]; then
echo "No wheel file found in ./build" >&2
exit 1
fi
# Rename the wheel file from .whl to .zip.
base_name="${whl_file%.whl}"
mv "$whl_file" "${base_name}.zip"
# Delete the temporary .pyi file.
rm ./msyrs.pyi

View File

@ -1,20 +0,0 @@
#!/bin/bash
set -e
# Ensure maturin is installed. For example, you can install it via:
# pip install maturin
# Run "maturin --help". If it fails, print an error message and exit.
if ! maturin --help > /dev/null 2>&1; then
echo "Failed to run maturin --help" >&2
exit 1
fi
# Copy ./src/msyrs.pyi to the current directory as msyrs.pyi
cp ./src/msyrs.pyi ./msyrs.pyi
# Run maturin develop in release mode.
maturin develop --release
# Delete the temporary msyrs.pyi file.
rm ./msyrs.pyi

View File

@ -1,7 +1,7 @@
use pyo3::types::IntoPyDict;
use pyo3::{prelude::*, types::PyDict}; use pyo3::{prelude::*, types::PyDict};
use pyo3_polars::{PyDataFrame, PySeries}; use pyo3_polars::{PyDataFrame, PySeries};
/// Python wrapper for [`crate::utils::qdf`] module. /// Python wrapper for [`crate::utils::qdf`] module.
#[allow(deprecated)] #[allow(deprecated)]
#[pymodule] #[pymodule]
@ -37,20 +37,10 @@ pub fn get_bdates_series_default_opt(
} }
#[allow(deprecated)] #[allow(deprecated)]
#[pyfunction(signature = (df, group_by_cid=None, blacklist_name=None, metric=None))] #[pyfunction(signature = (df, metric = None))]
pub fn create_blacklist_from_qdf( pub fn create_blacklist_from_qdf(df: PyDataFrame, metric: Option<String>) -> PyResult<PyObject> {
df: PyDataFrame, let result = crate::utils::qdf::blacklist::create_blacklist_from_qdf(&df.into(), metric)
group_by_cid: Option<bool>, .map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?;
blacklist_name: Option<String>,
metric: Option<String>,
) -> PyResult<PyObject> {
let result = crate::utils::qdf::blacklist::create_blacklist_from_qdf(
&df.into(),
group_by_cid,
blacklist_name,
metric,
)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(format!("{}", e)))?;
Python::with_gil(|py| { Python::with_gil(|py| {
let dict = PyDict::new(py); let dict = PyDict::new(py);
// for (key, (start_date, end_date)) in result { // for (key, (start_date, end_date)) in result {

View File

@ -58,7 +58,7 @@ fn all_jpmaq_expressions(expressions: Vec<String>) -> bool {
/// ///
/// Example Usage: /// Example Usage:
/// ///
/// ```ignore /// ```rust
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs; /// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload; /// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
/// ///
@ -102,7 +102,7 @@ impl Default for JPMaQSDownloadGetIndicatorArgs {
/// Struct for downloading data from the JPMaQS data from JPMorgan DataQuery API. /// Struct for downloading data from the JPMaQS data from JPMorgan DataQuery API.
/// ///
/// ## Example Usage /// ## Example Usage
/// ```ignore /// ```rust
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload; /// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs; /// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// use polars::prelude::*; /// use polars::prelude::*;
@ -277,7 +277,7 @@ impl JPMaQSDownload {
/// ///
/// Usage: /// Usage:
/// ///
/// ```ignore /// ```rust
/// use msyrs::download::jpmaqsdownload::JPMaQSDownload; /// use msyrs::download::jpmaqsdownload::JPMaQSDownload;
/// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs; /// use msyrs::download::jpmaqsdownload::JPMaQSDownloadGetIndicatorArgs;
/// let mut jpamqs_download = JPMaQSDownload::default(); /// let mut jpamqs_download = JPMaQSDownload::default();

View File

@ -1,23 +1,24 @@
use crate::utils::bdates::{get_bdates_list_with_freq, BDateFreq}; use crate::utils::bdates::{self, get_bdates_list_with_freq, BDateFreq};
use crate::utils::dateutils::get_min_max_real_dates; use crate::utils::dateutils::{get_bdates_series_default_opt, get_min_max_real_dates};
use crate::utils::misc::get_cid;
use crate::utils::qdf::core::check_quantamental_dataframe; use crate::utils::qdf::core::check_quantamental_dataframe;
use chrono::NaiveDate; use chrono::NaiveDate;
use polars::prelude::*; use polars::prelude::*;
use std::collections::{BTreeMap, HashMap}; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
pub fn create_blacklist_from_qdf( pub fn create_blacklist_from_qdf(
df: &DataFrame, df: &DataFrame,
group_by_cid: Option<bool>,
blacklist_name: Option<String>,
metric: Option<String>, metric: Option<String>,
) -> Result<BTreeMap<String, (String, String)>, Box<dyn Error>> { // ) -> Result<HashMap<String, (String, String)>, Box<dyn Error>> {
) -> Result<HashMap<String, Vec<String>>, Box<dyn Error>> {
// Verify that the DataFrame follows the Quantamental structure.
check_quantamental_dataframe(df)?; check_quantamental_dataframe(df)?;
let mut blacklist: HashMap<String, Vec<String>> = HashMap::new();
// let mut blacklist: HashMap<String, (String, String)> = HashMap::new();
let mut blk: HashMap<String, Vec<NaiveDate>> = HashMap::new();
// Use the provided metric or default to "value".
let metric = metric.unwrap_or_else(|| "value".into()); let metric = metric.unwrap_or_else(|| "value".into());
let blacklist_name = blacklist_name.unwrap_or_else(|| "BLACKLIST".into());
let group_by_cid = group_by_cid.unwrap_or(true);
let (min_date, max_date) = get_min_max_real_dates(df, "real_date".into())?; let (min_date, max_date) = get_min_max_real_dates(df, "real_date".into())?;
let min_date_str = min_date.format("%Y-%m-%d").to_string(); let min_date_str = min_date.format("%Y-%m-%d").to_string();
@ -69,118 +70,27 @@ pub fn create_blacklist_from_qdf(
let rdt = get_vec_of_vec_of_dates_from_df(df)?; let rdt = get_vec_of_vec_of_dates_from_df(df)?;
let mut blk: HashMap<String, Vec<String>> = HashMap::new(); // assert!(0 == 1, "{:?}", rdt);
for (tkr, dates) in ticker_vec.iter().zip(rdt.iter()) { for (tkr, dates) in ticker_vec.iter().zip(rdt.iter()) {
if group_by_cid { blacklist.insert(tkr.to_string(), dates.clone());
let _cid = get_cid(tkr.clone())?;
if blk.contains_key(&_cid) {
blk.get_mut(&_cid).unwrap().extend(dates.iter().cloned());
} else {
blk.insert(_cid, dates.clone());
}
} else {
blk.insert(tkr.to_string(), dates.clone());
}
}
for (_key, vals) in blk.iter_mut() {
// order is important - dedup depends on the vec being sorted
vals.sort();
vals.dedup();
} }
let all_bdates_strs = all_bdates Ok(blacklist)
}
fn convert_dates_list_to_date_ranges(
blacklist: HashMap<String, Vec<String>>,
all_bdates: Vec<NaiveDate>,
) -> HashMap<String, (String, String)> {
let blk = HashMap::new();
let bdates = all_bdates
.iter() .iter()
.map(|date| date.format("%Y-%m-%d").to_string()) .map(|date| date.format("%Y-%m-%d").to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
let mut blacklist: HashMap<String, (String, String)> = HashMap::new(); //
for (tkr, dates) in blk.iter() {
let date_ranges = convert_dates_list_to_date_ranges(dates.clone(), all_bdates_strs.clone());
for (rng_idx, (start_date, end_date)) in date_ranges.iter() {
let range_key = format!("{}_{}_{}", tkr, blacklist_name.clone(), rng_idx);
blacklist.insert(range_key, (start_date.clone(), end_date.clone()));
}
}
// Ok(blacklist)
let mut btree_map: BTreeMap<String, (String, String)> = BTreeMap::new(); blk
for (key, (start_date, end_date)) in blacklist.iter() {
btree_map.insert(key.clone(), (start_date.clone(), end_date.clone()));
}
Ok(btree_map)
}
fn convert_dates_list_to_date_ranges(
blacklist: Vec<String>,
all_bdates_strs: Vec<String>,
) -> HashMap<String, (String, String)> {
// Step 1: Map every date in all_bdates_strs to its index
let mut all_map: HashMap<String, usize> = HashMap::new();
for (i, d) in all_bdates_strs.iter().enumerate() {
all_map.insert(d.clone(), i);
}
// Step 2: Convert each blacklisted date into its index, if it exists
let mut blacklisted_indices: Vec<usize> = Vec::new();
for dt in blacklist {
if let Some(&idx) = all_map.get(&dt) {
blacklisted_indices.push(idx);
}
}
// Step 3: Sort the blacklisted indices
blacklisted_indices.sort_unstable();
// Step 4: Traverse and group consecutive indices into ranges
let mut result: HashMap<i64, (String, String)> = HashMap::new();
let mut string_result: HashMap<String, (String, String)> = HashMap::new();
if blacklisted_indices.is_empty() {
return string_result;
}
let mut range_idx: i64 = 0;
let mut start_idx = blacklisted_indices[0];
let mut end_idx = start_idx;
for &cur_idx in blacklisted_indices.iter().skip(1) {
if cur_idx == end_idx + 1 {
// We are still in a contiguous run
end_idx = cur_idx;
} else {
// We hit a break in contiguity, so store the last range
result.insert(
range_idx,
(
all_bdates_strs[start_idx].clone(),
all_bdates_strs[end_idx].clone(),
),
);
range_idx += 1;
// Start a new range
start_idx = cur_idx;
end_idx = cur_idx;
}
}
// Don't forget to store the final range after the loop
result.insert(
range_idx,
(
all_bdates_strs[start_idx].clone(),
all_bdates_strs[end_idx].clone(),
),
);
let max_digits = result.keys().max().unwrap_or(&-1).to_string().len();
for (key, (start_date, end_date)) in result.iter() {
let new_key = format!("{:0width$}", key, width = max_digits);
string_result.insert(new_key, (start_date.clone(), end_date.clone()));
}
string_result
} }
fn get_vec_of_vec_of_dates_from_df(df: DataFrame) -> Result<Vec<Vec<String>>, Box<dyn Error>> { fn get_vec_of_vec_of_dates_from_df(df: DataFrame) -> Result<Vec<Vec<String>>, Box<dyn Error>> {
@ -205,7 +115,6 @@ fn get_vec_of_vec_of_dates_from_df(df: DataFrame) -> Result<Vec<Vec<String>>, Bo
Ok(rdt) Ok(rdt)
} }
#[allow(dead_code)]
fn get_vec_of_vec_of_naivedates_from_df( fn get_vec_of_vec_of_naivedates_from_df(
df: DataFrame, df: DataFrame,
) -> Result<Vec<Vec<NaiveDate>>, Box<dyn Error>> { ) -> Result<Vec<Vec<NaiveDate>>, Box<dyn Error>> {
@ -250,32 +159,3 @@ fn get_vec_of_vec_of_naivedates_from_df(
// .collect::<Vec<Vec<String>>>(); // .collect::<Vec<Vec<String>>>();
// Ok(rdt) // Ok(rdt)
// } // }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_convert_dates_list_to_date_ranges() {
let all_dates = vec![
"2023-01-01".to_string(),
"2023-01-02".to_string(),
"2023-01-03".to_string(),
"2023-01-04".to_string(),
"2023-01-05".to_string(),
"2023-01-06".to_string(),
];
let blacklist = vec![
"2023-01-02".to_string(),
"2023-01-03".to_string(),
"2023-01-05".to_string(),
];
let result = convert_dates_list_to_date_ranges(blacklist, all_dates);
// Expect two ranges:
// range 0 => ("2023-01-02", "2023-01-03")
// range 1 => ("2023-01-05", "2023-01-05")
assert_eq!(result["0"], ("2023-01-02".to_string(), "2023-01-03".to_string()));
assert_eq!(result["1"], ("2023-01-05".to_string(), "2023-01-05".to_string()));
}
}

View File

@ -17,15 +17,14 @@ use std::error::Error;
pub fn check_quantamental_dataframe(df: &DataFrame) -> Result<(), Box<dyn Error>> { pub fn check_quantamental_dataframe(df: &DataFrame) -> Result<(), Box<dyn Error>> {
let expected_cols = ["real_date", "cid", "xcat"]; let expected_cols = ["real_date", "cid", "xcat"];
let expected_dtype = [DataType::Date, DataType::String, DataType::String]; let expected_dtype = [DataType::Date, DataType::String, DataType::String];
let err = "Quantamental DataFrame must have at least 4 columns: 'real_date', 'cid', 'xcat' and one or more metrics.";
for (col, dtype) in expected_cols.iter().zip(expected_dtype.iter()) { for (col, dtype) in expected_cols.iter().zip(expected_dtype.iter()) {
let col = df.column(col); let col = df.column(col);
if col.is_err() { if col.is_err() {
return Err(format!("{} Column {:?} not found", err, col).into()); return Err(format!("Column {:?} not found", col).into());
} }
let col = col?; let col = col?;
if col.dtype() != dtype { if col.dtype() != dtype {
return Err(format!("{} Column {:?} has wrong dtype", err, col).into()); return Err(format!("Column {:?} has wrong dtype", col).into());
} }
} }
Ok(()) Ok(())