Skip to content

Commit

Permalink
preliminary edits
Browse files Browse the repository at this point in the history
  • Loading branch information
akwon31 committed May 18, 2020
1 parent 288c7be commit 6df09eb
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,5 @@ docs/build/
.local/
cover/
*.pickle
.ipynb_checkpoints
*/.ipynb_checkpoints/*
mlfinlab.egg-info/*
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ def __init__(self, window, rho):
Initializes Correlation Driven Nonparametric Learning with the given window and rho value.
:param window: (int) Number of windows to look back for similarity sets.
:param rho: (float) Threshold for similarity.
:param rho: (float) Threshold for similarity. Rho should set in the range of [-1, 1].
Lower rho values will classify more periods as being similar, and higher values
will be more strict on identifying a period as similarly correlated.
"""
self.window = window
self.rho = rho
Expand All @@ -33,7 +35,7 @@ def _initialize(self, asset_prices, weights, resample_by):
Initializes the important variables for the object.
:param asset_prices: (pd.DataFrame) Historical asset prices.
:param weights: (list/np.array/pd.Dataframe) Initial weights set by the user.
:param weights: (list/np.array/pd.DataFrame) Initial weights set by the user.
:param resample_by: (str) Specifies how to resample the prices.
"""
super(CorrelationDrivenNonparametricLearning, self)._initialize(asset_prices, weights,
Expand All @@ -60,8 +62,10 @@ def _update_weight(self, time):
"""
# Create similar set.
similar_set = []

# Default is uniform weights.
new_weights = self._uniform_weight()

# Calculate for similar sets if time is greater or equal to window size.
if time >= self.window:
# Iterate through past windows.
Expand All @@ -70,6 +74,7 @@ def _update_weight(self, time):
if self.corr_coef[time - self.window + 1][past_time] > self.rho:
# Append the time for similar set.
similar_set.append(past_time + self.window)

if similar_set:
# Choose the corresponding relative return periods.
optimize_array = self.relative_return[similar_set]
Expand Down Expand Up @@ -113,11 +118,14 @@ def calculate_rolling_correlation_coefficient(self):
"""
# Flatten the array.
flattened = self.relative_return.flatten()

# Set index of rolled window.
idx = np.arange(self.number_of_assets * self.window)[None, :] + self.number_of_assets * \
np.arange(self.length_of_time - self.window + 1)[:, None]

# Retrieve the results of the rolled window.
rolled_returns = flattened[idx]

# Calculate correlation coefficient.
rolling_corr_coef = np.nan_to_num(np.corrcoef(rolled_returns), nan=0)
return rolling_corr_coef
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, window, rho, k):
windows, rho values, and k experts.
:param window: (int) Number of windows to look back for similarity sets. (1, 2, ..., w).
:param rho: (float) Number of rho values for threshold. (0, 1/rho, ..., (rho-1)/rho).
:param rho: (int) Number of rho values for threshold. (0, 1/rho, ..., (rho-1)/rho).
:param k: (int) Number of top-k experts.
"""
self.window = window
Expand Down Expand Up @@ -85,11 +85,11 @@ def _generate_experts(self):
pointer = 0

# Window from 1 to self.window.
for n_window in range(self.window):
for n_window in range(1, self.window):
# Rho from 0 to (rho - 1)/rho.
for n_rho in range(self.rho):
# Assign experts with parameters (n_window + 1, n_rho/rho).
self.expert_params[pointer] = [n_window + 1, n_rho/self.rho]
self.expert_params[pointer] = [n_window, n_rho/self.rho]
# Next pointer.
pointer += 1
# Assign parameters.
Expand Down

0 comments on commit 6df09eb

Please sign in to comment.