Untitled

 avatar
unknown
plain_text
3 days ago
10 kB
4
Indexable
import pandas as pd
import numpy as np
from datetime import datetime, timedelta

def calculate_momentum_signals(prices, start_date):
    """
    Calculates 9 momentum signals for each ticker at the given start date.
    
    Parameters:
    prices (DataFrame): DataFrame with dates in the first column and 13 tickers ahead.
    start_date (str): Date for which signals should be calculated (format: 'MM/DD/YYYY').
    
    Returns:
    DataFrame with momentum signals for each ticker at the given start date.
    """
    lookbacks = {"1M": 21, "3M": 63, "6M": 126}  # Approximate trading days
    tickers = prices.columns[1:]  # Exclude the 'Dates' column

    start_date = datetime.strptime(start_date, '%m/%d/%Y')

    # Ensure prices are sorted
    prices = prices.sort_values('Dates')

    # Check if start_date is within the date range of the data
    if start_date < prices['Dates'].min() or start_date > prices['Dates'].max():
        raise ValueError(f"Start date {start_date.strftime('%m/%d/%Y')} is outside the date range of the price data. "
                         f"Available date range: {prices['Dates'].min().strftime('%m/%d/%Y')} to {prices['Dates'].max().strftime('%m/%d/%Y')}")

    # Find the nearest available date on or after the start_date
    start_date = prices[prices['Dates'] >= start_date]['Dates'].min()

    # Set index to Date
    prices = prices.set_index('Dates')

    # Initialize list to store signal values
    signals_list = []

    for date in prices.loc[start_date:].index:
        if prices.index.get_loc(date) < max(lookbacks.values()):
            continue

        pt = prices.loc[date, tickers]
        signals = {"Dates": date}

        for label, lb in lookbacks.items():
            pt_n = prices.iloc[prices.index.get_loc(date) - lb][tickers]

            # Total Return Momentum
            total_return = (pt - pt_n) / pt_n

            # Price Minus Moving Average
            sma = prices[tickers].iloc[prices.index.get_loc(date)-lb : prices.index.get_loc(date)].mean()
            price_sma = (pt / sma) - 1

            # Risk-Adjusted Momentum
            log_returns = np.log(prices[tickers] / prices[tickers].shift(1))
            log_returns_sum = log_returns.iloc[prices.index.get_loc(date)-lb : prices.index.get_loc(date)].abs().sum()
            log_momentum = np.log(pt / pt_n) / np.where(log_returns_sum == 0, np.nan, log_returns_sum)

            # Store results
            for ticker in tickers:
                signals[f'TotalReturn_{label}_{ticker}'] = total_return[ticker]
                signals[f'PriceMinusSMA_{label}_{ticker}'] = price_sma[ticker]
                signals[f'RiskAdjusted_{label}_{ticker}'] = log_momentum[ticker]

        signals_list.append(signals)

    signals_df = pd.DataFrame(signals_list)
    return signals_df.set_index("Dates")

def rank_momentum_signals(signals_df):
    """
    Combines the 9 signals into a composite momentum score and ranks the 13 assets.
    """
    weights = {"1M": 0.15, "3M": 0.35, "6M": 0.50}
    tickers = set([col.split('_')[-1] for col in signals_df.columns])

    # Normalize each signal
    signals_norm = signals_df.apply(lambda x: (x - x.mean()) / x.std())

    # Compute weighted sum for each asset
    composite_scores = pd.DataFrame(index=signals_df.index, columns=tickers)
    for ticker in tickers:
        ticker_signals = signals_norm.filter(regex=f'.*_{ticker}$')
        composite_scores[ticker] = sum(weights[label] / 3 * ticker_signals.filter(like=label).mean(axis=1) for label in weights.keys())

    # Rank assets (1 = highest momentum, 13 = lowest momentum)
    rankings = composite_scores.rank(axis=1, ascending=False, method='dense')

    # Combine scores and rankings
    result_df = signals_df.copy()
    for ticker in tickers:
        result_df[f'Composite_Score_{ticker}'] = composite_scores[ticker]
        result_df[f'Rank_{ticker}'] = rankings[ticker]

    return result_df

def rebalance_portfolio(prices, start_date, end_date, rebalance_frequency, initial_aum=100_000_000):
    """
    Implements the momentum-based strategy with periodic rebalancing.
    """
    start_date = pd.to_datetime(start_date)
    end_date = pd.to_datetime(end_date)
    
    rebalance_dates = pd.date_range(start=start_date, end=end_date, freq=f'{rebalance_frequency}M')

    portfolio_history = []
    current_aum = initial_aum

    for rebalance_date in rebalance_dates:
        # Find the nearest trading day on or after the rebalance date
        print(prices.index)
        rebalance_date = pd.to_datetime(rebalance_date)
        nearest_date = prices[prices['Dates'] >= rebalance_date]['Dates'].min()
        if pd.isnull(nearest_date):
            break  # No more trading days after this rebalance date

        signals_df = calculate_momentum_signals(prices, nearest_date.strftime('%m/%d/%Y'))
        ranked_df = rank_momentum_signals(signals_df)

        top_assets = ranked_df.iloc[-1].filter(like='Rank_').nsmallest(6).index.str.split('_').str[-1]
        equal_allocation = current_aum / 6
        prices_on_date = prices.loc[rebalance_date, top_assets]
        quantities = equal_allocation / prices_on_date

        portfolio_history.append({
            'Date': rebalance_date,
            'AUM': current_aum,
            'Top_Assets': list(top_assets),
            'Quantities': list(quantities)
        })

        next_date = prices.index[prices.index > rebalance_date].min()
        if pd.isnull(next_date):
            break  # No more trading days after this rebalance date

        current_aum = sum(quantities * prices.loc[next_date, top_assets])

    return pd.DataFrame(portfolio_history)

def export_to_excel(prices, start_date, end_date, rebalance_frequency, output_file="momentum_strategy.xlsx"):
    """
    Runs the backtest and exports the results to an Excel file.
    """
    portfolio_df = rebalance_portfolio(prices, start_date, end_date, rebalance_frequency)
    portfolio_df.to_excel(output_file, index=False)
    print(f"Momentum strategy results saved to {output_file}")



RangeIndex(start=0, stop=7301, step=1)
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
<ipython-input-88-e4852422b760> in <module>()
----> 1 export_to_excel(prices, start_date, end_date, rebalance_frequency, output_file="momentum_strategy.xlsx")

<ipython-input-87-7694b8315d36> in export_to_excel(prices, start_date, end_date, rebalance_frequency, output_file)
    143     Runs the backtest and exports the results to an Excel file.
    144     """
--> 145     portfolio_df = rebalance_portfolio(prices, start_date, end_date, rebalance_frequency)
    146     portfolio_df.to_excel(output_file, index=False)
    147     print(f"Momentum strategy results saved to {output_file}")

<ipython-input-87-7694b8315d36> in rebalance_portfolio(prices, start_date, end_date, rebalance_frequency, initial_aum)
    121         top_assets = ranked_df.iloc[-1].filter(like='Rank_').nsmallest(6).index.str.split('_').str[-1]
    122         equal_allocation = current_aum / 6
--> 123         prices_on_date = prices.loc[rebalance_date, top_assets]
    124         quantities = equal_allocation / prices_on_date
    125 

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexing.py in __getitem__(self, key)
    871                     # AttributeError for IntervalTree get_value
    872                     pass
--> 873             return self._getitem_tuple(key)
    874         else:
    875             # we by definition only have the 0th axis

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexing.py in _getitem_tuple(self, tup)
   1042     def _getitem_tuple(self, tup: Tuple):
   1043         try:
-> 1044             return self._getitem_lowerdim(tup)
   1045         except IndexingError:
   1046             pass

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexing.py in _getitem_lowerdim(self, tup)
    784                 # We don't need to check for tuples here because those are
    785                 #  caught by the _is_nested_tuple_indexer check above.
--> 786                 section = self._getitem_axis(key, axis=i)
    787 
    788                 # We should never have a scalar section here, because

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexing.py in _getitem_axis(self, key, axis)
   1108         # fall thru to straight lookup
   1109         self._validate_key(key, axis)
-> 1110         return self._get_label(key, axis=axis)
   1111 
   1112     def _get_slice_axis(self, slice_obj: slice, axis: int):

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexing.py in _get_label(self, label, axis)
   1057     def _get_label(self, label, axis: int):
   1058         # GH#5667 this will fail if the label is not present in the axis.
-> 1059         return self.obj.xs(label, axis=axis)
   1060 
   1061     def _handle_lowerdim_multi_index_axis0(self, tup: Tuple):

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\generic.py in xs(self, key, axis, level, drop_level)
   3491             loc, new_index = self.index.get_loc_level(key, drop_level=drop_level)
   3492         else:
-> 3493             loc = self.index.get_loc(key)
   3494 
   3495             if isinstance(loc, np.ndarray):

~\AppData\Roaming\Python\Python36\site-packages\pandas\core\indexes\range.py in get_loc(self, key, method, tolerance)
    356                 except ValueError as err:
    357                     raise KeyError(key) from err
--> 358             raise KeyError(key)
    359         return super().get_loc(key, method=method, tolerance=tolerance)
    360 

KeyError: Timestamp('2024-01-31 00:00:00', freq='M')
Editor is loading...
Leave a Comment