Untitled
unknown
plain_text
2 months ago
18 kB
3
Indexable
import pandas as pd import numpy as np from datetime import datetime from dateutil.relativedelta import relativedelta ############################################### # 1. Data Loading ############################################### def load_price_data(filepath): """ Load historical prices from an Excel file. Assumes that the first column is dates and the remaining columns are tickers. """ df = pd.read_excel(filepath, index_col=0) df.index = pd.to_datetime(df.index) return df ############################################### # 2. Helper: Observation Dates ############################################### def get_observation_dates(start_date, end_date, rebalance_period): """ Returns a list of observation dates from start_date to end_date with a step equal to rebalance_period (in months). """ dates = [] current = start_date while current <= end_date: dates.append(current) current += relativedelta(months=rebalance_period) return dates ############################################### # 3. Initialize the Portfolio ############################################### def initialize_portfolio(prices, date, tickers, initial_aum): """ On the start date, invest equal notional amounts in each asset. Returns a dictionary mapping ticker -> quantity. """ portfolio = {} allocation = initial_aum / len(tickers) for ticker in tickers: price = prices.loc[date, ticker] portfolio[ticker] = allocation / price return portfolio ############################################### # 4. Lookback Metric Computation ############################################### def compute_lookback_metric(prices, current_date, ticker, lookback_period, metric_type='simple'): """ Computes the lookback metric for one ticker. - For 'simple': metric = (Price_today / Price_lookback) - 1. - For 'sma': metric = (Price_today - SMA) / SMA, where SMA is the simple moving average over the lookback period. This version uses the asof method to get the nearest available price on or before the date. """ # Ensure the DataFrame index is sorted. prices = prices.sort_index() # Define the lookback date. lookback_date = current_date - relativedelta(months=lookback_period) # Use 'asof' to fetch the price on or before the specified dates. current_price = prices[ticker].asof(current_date) lookback_price = prices[ticker].asof(lookback_date) # Check if prices were successfully retrieved. if pd.isna(current_price) or pd.isna(lookback_price): raise ValueError(f"Missing price data for {ticker} on {current_date} or {lookback_date}.") if metric_type == 'simple': metric = (current_price / lookback_price) - 1 elif metric_type == 'sma': # Get all prices between lookback_date and current_date. window = prices[ticker].loc[lookback_date:current_date] if window.empty: raise ValueError(f"No price data for {ticker} between {lookback_date} and {current_date}.") sma = window.mean() metric = (current_price - sma) / sma else: raise ValueError("Invalid metric type. Choose 'simple' or 'sma'.") return metric ############################################### # 5. Ranking Assets by Momentum ############################################### def rank_assets(prices, current_date, tickers, lookback_period, metric_type): """ For a given observation date, compute the chosen lookback metric for each asset, then sort (in descending order) so that the highest momentum gets rank 1. Returns: sorted_tickers: list of tickers in sorted order (best first) ranks: dictionary mapping ticker -> rank (1 is best) metrics: dictionary mapping ticker -> computed metric value """ metrics = {} for ticker in tickers: metric = compute_lookback_metric(prices, current_date, ticker, lookback_period, metric_type) metrics[ticker] = metric sorted_tickers = sorted(metrics, key=metrics.get, reverse=True) ranks = {ticker: rank+1 for rank, ticker in enumerate(sorted_tickers)} return sorted_tickers, ranks, metrics ############################################### # 6. Compute Current Portfolio Value ############################################### def compute_portfolio_value(portfolio, prices, current_date): """ Returns the portfolio AUM as of current_date. """ value = 0 for ticker, quantity in portfolio.items(): price = prices.loc[current_date, ticker] value += quantity * price return value ############################################### # 7. Rebalance the Portfolio ############################################### def rebalance_portfolio(portfolio, prices, current_date, tickers, sorted_tickers, internal_rebalance_ratios, rebalance_ratio): """ On an observation date: (a) Compute the current portfolio value and the rebalancing notional (rebalance_ratio * AUM). (b) For each asset, assign a target trade notional = (rebalance_amount * internal ratio), where the internal ratio is picked according to the asset's rank (from sorted_tickers). (c) For sell orders (negative target trades) check if the available notional (quantity × current price) is sufficient. If not, sell only what is available. (d) Redistribute the total sold amount among the buying orders (assets with positive target trades) proportionally. (e) Update each asset's quantity accordingly. Returns the updated portfolio along with some diagnostics. """ # 1. Compute current AUM and the total rebalance notional. portfolio_value = compute_portfolio_value(portfolio, prices, current_date) rebalance_amount = portfolio_value * rebalance_ratio # 2. Compute target trades for each asset according to the ranking. target_trades = {ticker: rebalance_amount * internal_rebalance_ratios[i] for i, ticker in enumerate(sorted_tickers)} # 3. For sell orders: check available notional and determine actual trades. total_sold = 0 actual_trades = {} for ticker, target_trade in target_trades.items(): price = prices.loc[current_date, ticker] if target_trade < 0: available_notional = portfolio[ticker] * price sell_target = abs(target_trade) actual_sell = min(available_notional, sell_target) actual_trades[ticker] = -actual_sell total_sold += actual_sell else: actual_trades[ticker] = 0 # 4. Redistribute the total_sold to the buying side. total_buy_target = sum(trade for trade in target_trades.values() if trade > 0) if total_buy_target > 0: for ticker, target_trade in target_trades.items(): if target_trade > 0: proportion = target_trade / total_buy_target buy_amount = total_sold * proportion actual_trades[ticker] = buy_amount # 5. Update portfolio quantities. new_portfolio = portfolio.copy() for ticker, trade_notional in actual_trades.items(): price = prices.loc[current_date, ticker] qty_change = trade_notional / price new_portfolio[ticker] += qty_change return new_portfolio, actual_trades, portfolio_value ############################################### # 8. Simulate the Strategy ############################################### def simulate_strategy(prices, eq_tickers, fi_tickers, alts_tickers, initial_aum, start_date, end_date, rebalance_period, rebalance_ratio, lookback_period, metric_type, internal_rebalance_ratios): """ Runs the simulation from start_date to end_date. At t0, we initialize the portfolio with equal weights. At each observation date, we compute lookback metrics, rank assets, then rebalance a fixed percentage (rebalance_ratio) of the current AUM. The results (portfolio AUM, individual quantities and notionals, returns, etc.) are recorded in a DataFrame. """ tickers = eq_tickers + fi_tickers + alts_tickers obs_dates = get_observation_dates(start_date, end_date, rebalance_period) results = [] # 8a. Initial portfolio (at start_date) portfolio = initialize_portfolio(prices, start_date, tickers, initial_aum) portfolio_value = compute_portfolio_value(portfolio, prices, start_date) results.append({ 'Date': start_date, 'Portfolio Value': portfolio_value, **{f'qty_{ticker}': portfolio[ticker] for ticker in tickers}, **{f'notional_{ticker}': portfolio[ticker] * prices.loc[start_date, ticker] for ticker in tickers}, 'Return': 0 }) prev_value = portfolio_value # 8b. Loop over each observation date (after the start date). for current_date in obs_dates[1:]: sorted_tickers, ranks, metrics = rank_assets(prices, current_date, tickers, lookback_period, metric_type) portfolio, trades, pre_rebalance_value = rebalance_portfolio( portfolio, prices, current_date, tickers, sorted_tickers, internal_rebalance_ratios, rebalance_ratio) portfolio_value = compute_portfolio_value(portfolio, prices, current_date) ret = (portfolio_value - prev_value) / prev_value prev_value = portfolio_value row = { 'Date': current_date, 'Portfolio Value': portfolio_value, 'Return': ret, 'Pre-Rebalance Value': pre_rebalance_value, } for ticker in tickers: row[f'qty_{ticker}'] = portfolio[ticker] row[f'notional_{ticker}'] = portfolio[ticker] * prices.loc[current_date, ticker] row[f'weight_{ticker}'] = row[f'notional_{ticker}'] / portfolio_value row[f'rank_{ticker}'] = ranks.get(ticker, np.nan) row[f'metric_{ticker}'] = metrics.get(ticker, np.nan) row[f'trade_{ticker}'] = trades.get(ticker, 0) results.append(row) result_df = pd.DataFrame(results) result_df.set_index('Date', inplace=True) column_groups = ['Portfolio Value', 'Return', 'Pre-Rebalance Value'] for prefix in ['qty_', 'notional_', 'weight_', 'rank_', 'metric_', 'trade_']: column_groups.extend([f'{prefix}{ticker}' for ticker in tickers]) result_df = result_df[column_groups] return result_df ############################################### # 9. Main – Example Usage ############################################### if __name__ == '__main__': # # Define the asset tickers. # eq_tickers = ['SPY US Equity', 'EEM US Equity', 'QQQ US Equity'] # fi_tickers = ['LQD US Equity', 'TLT US Equity', 'HYG US Equity'] # alts_tickers = ['GLD US Equity', 'SHV US Equity', 'VNQ US Equity', 'DBC US Equity'] eq_tickers = ['SPY US Equity'] fi_tickers = ['TLT US Equity'] alts_tickers = ['GLD US Equity', 'SHV US Equity'] initial_aum = 100e6 # e.g., 100 million start_date = pd.to_datetime('2008-01-01') end_date = pd.to_datetime('2025-02-01') rebalance_period = 2 # rebalance every month rebalance_ratio = 0.2 # 10% of current AUM is rebalanced each period lookback_period = 3 # 3-month lookback metric_type = 'simple' # use simple return metric; alternatively, set 'sma' # Define the internal rebalancing mapping. # For 10 assets (sorted best-to-worst), the intended trade proportions are: # Best asset: +70%, second: +20%, third: +10%, # then no trade for the middle four, # and the worst three: -10%, -20%, -70% (i.e. sell orders). # internal_rebalance_ratios = [0.6, 0.4, 0, -0.4, -0.6] internal_rebalance_ratios = [0.7, 0.3, -0.3, -0.7] # Specify the location of the Excel file. filepath = r"\\asiapac.nom\data\MUM\IWM\India_IWM_IPAS\Reet\Momentum Strategy\Codes\Historic Prices.xlsx" prices = load_price_data(filepath) # Run the simulation. result_df = simulate_strategy(prices, eq_tickers, fi_tickers, alts_tickers, initial_aum, start_date, end_date, rebalance_period, rebalance_ratio, lookback_period, metric_type, internal_rebalance_ratios) # Display the final results. pd.set_option('display.float_format', lambda x: f'{x:,.2f}') # print(result_df) # ------------------------------- # 4. Performance Metrics Calculation # ------------------------------- # Load the risk free rate data risk_free_filepath = r"\\asiapac.nom\data\MUM\IWM\India_IWM_IPAS\Reet\Momentum Strategy\Codes\Risk Free Rate.xlsx" risk_free_df = pd.read_excel(risk_free_filepath, sheet_name="Risk Free Rate", parse_dates=[0]) risk_free_df.columns = ['Date', '1m', '3m'] def calculate_sharpe_ratio(returns, period): volatility = returns.std() avg_return = returns.mean() # Map rebalance period to an annualization factor annualization_factor = {1: 12, 2: 6, 3: 4}.get(period, 12) # Default to 12 if not found return (avg_return / volatility) * (annualization_factor ** 0.5) if volatility > 0 else np.nan # Ensure that there is a 'Date' column (if not, reset the index) if 'Date' not in result_df.columns: result_df = result_df.reset_index().rename(columns={'index': 'Date'}) # Merge risk free rate data on the 'Date' column merged_df = pd.merge(result_df, risk_free_df, on='Date', how='left') if rebalance_period == 1: merged_df['rf_adjust'] = merged_df['1m'] / 12 elif rebalance_period == 2: merged_df['rf_adjust'] = merged_df['1m'] / 6 elif rebalance_period == 3: merged_df['rf_adjust'] = merged_df['1m'] / 4 else: merged_df['rf_adjust'] = 0 # Compute excess returns by subtracting the risk free rate from monthly returns merged_df['Excess Return'] = merged_df['Return'] - (merged_df['rf_adjust']/100) # Calculate performance metrics using excess returns excess_returns = merged_df['Excess Return'].dropna() sharpe_ratio = calculate_sharpe_ratio(excess_returns, rebalance_period) annualization_factor = {1: 12, 2: 6, 3: 4}.get(rebalance_period, 12) avg_monthly_excess_return = excess_returns.mean() monthly_excess_volatility = excess_returns.std() annualized_volatility = monthly_excess_volatility * (annualization_factor ** 0.5) annualised_risk_premium = avg_monthly_excess_return * annualization_factor # The rest of your existing calculations remain the same initial_aum = result_df['Portfolio Value'].iloc[0] final_aum = result_df['Portfolio Value'].iloc[-1] cumulative_return = (final_aum / initial_aum) - 1 if isinstance(start_date, str): start_date = datetime.strptime(start_date, '%Y-%m-%d') if isinstance(end_date, str): end_date = datetime.strptime(end_date, '%Y-%m-%d') num_months = (end_date.year - start_date.year) * 12 + (end_date.month - start_date.month) annualized_return = ((1 + cumulative_return) ** (12 / num_months)) - 1 def compute_max_drawdown(returns): cumulative = (1 + returns).cumprod() running_max = cumulative.cummax() drawdown = (cumulative - running_max) / running_max return drawdown.min() max_drawdown = compute_max_drawdown(result_df['Return']) # Calculate annual returns annual_returns = result_df.groupby(result_df['Date'].dt.year)['Return'].apply(lambda x: (1 + x).prod() - 1) min_annual_return = annual_returns.min() max_annual_return = annual_returns.max() # Create a DataFrame with all metrics metrics_df = pd.DataFrame({ 'Metric': [ 'Cumulative Return', 'Annualized Return', 'Average Monthly Excess Return', 'Monthly Excess Volatility', 'Annualized Volatility', 'Sharpe Ratio', 'Annualized Risk Premium', 'Maximum Drawdown', 'Minimum Annual Return', 'Maximum Annual Return' ], 'Value': [ cumulative_return, annualized_return, avg_monthly_excess_return, monthly_excess_volatility, annualized_volatility, sharpe_ratio, annualised_risk_premium, max_drawdown, min_annual_return, max_annual_return ] }) # Format the values as percentages where appropriate percentage_metrics = ['Cumulative Return', 'Annualized Return', 'Average Monthly Excess Return', 'Monthly Excess Volatility', 'Annualized Volatility', 'Maximum Drawdown', 'Minimum Annual Return', 'Maximum Annual Return', 'Annualized Risk Premium'] for metric in percentage_metrics: metrics_df.loc[metrics_df['Metric'] == metric, 'Value'] = metrics_df.loc[metrics_df['Metric'] == metric, 'Value'].apply(lambda x: f"{x:.2%}") # Format Sharpe Ratio to 2 decimal places metrics_df.loc[metrics_df['Metric'] == 'Sharpe Ratio', 'Value'] = metrics_df.loc[metrics_df['Metric'] == 'Sharpe Ratio', 'Value'].apply(lambda x: f"{x:.2f}") print("\nPerformance Metrics for the Portfolio:") print(metrics_df.to_string(index=False)) ----------------------------------------------------------------------------------- this is how I calculate result_df and make a metrics df for it now I need to make some plots to display its results. I need a max drawdown plot from start to end date then I need a max drawdown plot for the first 24 months starting at the start date. then I need a cumulative returns plot from satrt to the end, with SPY and TLT as well. and then I need a table of the returns month on month (or whatever the rebalance frequency is) during 2008. Thank you.
Editor is loading...
Leave a Comment