import concurrent.futures
import copy
import json
import pytz
import warnings

# These whole block including the paramiko import is an attempt to squash
# some warning noise due to a change in a library paramiko uses internally.
from cryptography.utils import CryptographyDeprecationWarning
# Suppress the CryptographyDeprecationWarning warning
warnings.filterwarnings("ignore", category=CryptographyDeprecationWarning)
import paramiko
import pysftp
import traceback
import sys
import re

from bs4 import BeautifulSoup

from model.trading.Rebalance import *
from model.trading.TradeRestrictions import *


## ================================================================================================================

class SimulationConfig:
    ROOTDIR = '/mnt/signal/simulation'

    def __init__(self,
                 subdir,
                 prefix='trading_portfolio',
                 shortName=None,
                 startTD=PyMonth.firstWeekday(201501),
                 endTD=PyMonth.lastWeekday(PyMonth.prevMonth(PyMonth.now())),
                 stratName='strat_asia_v1',

                 spreadsheetKey=None,

                 gamma=1.4,
                 adaptiveGamma=False,
                 adaptiveGammaTargetRisk=0.10,
                 adaptiveGammaTolerance=0.25,
                 tau=4.8,

                 rebalIntervalWD=1,
                 startingNAV=100_000_000,
                 staticNAV=False,
                 solver=cp.ECOS,
                 # solver                = cp.SCS,
                 maxIter=200000,
                 cycleIter=5000,
                 memo=None,
                 bcostMaskName=None,

                 executionPrice='vwap',
                 hedgeFX=True,

                 maxLeverage=None,
                 transition=False,
                 transitionInit=0.2,
                 transitionStep=0.2,
                 optimalTransition=False,
                 minNetExposure=None,
                 maxNetExposure=None,
                 minBetaAdjNet=None,
                 maxBetaAdjNet=None,
                 maxRisk=None,
                 offDiagRisk=None,

                 splitLongShort=None,
                 splitLongShortStartDate=PyDate.asDate(20220615),
                 splitLongShortCostlessTransition=False,

                 bcostLookback=8,
                 maxShortUtilization=0.8,
                 HAUM=None,
                 hypotheticalAUM=None,
                 pbList=None,
                 bcostMultiplierName=None,

                 univwtThreshold=None,
                 holdingThreshold=None,
                 # Enables code block that does portfolio correction after optimizer.
                 # Fixes borrows, lot sizes.
                 applyPostOptTrimming=True,
                 rmodelName=None,
                 tmodelName=None,
                 impactCostScale=None,
                 alphaName=None,
                 tradeUniverseName=None,
                 themeBoundName='trading_alpha_theme_exposure_bounds',
                 fcostName=None,
                 bcostName=None,
                 defaultFCost=None,
                 defaultBCost=None,
                 longBoundName=None,
                 shortBoundName=None,

                 initialPortfolio='optimal',
                 liquidityName=None,
                 defaultLiquidityUSDM=None,
                 maxLiquidityBoundLong=None,
                 maxLiquidityBoundShort=None,

                 defaultLongBound=None,
                 defaultShortBound=None,
                 applyShortBan=None,
                 applyExchangeHolidays=None,
                 countryBoundName=None,
                 industryBoundName=None,
                 sizeBoundName=None,

                 maxVolParticipation=None,
                 advName=None,
                 maxAdvProp=None,

                 noTradeDates=[],

                 targetTolerance=None,
                 toleranceRange=None,

                 minShortAvailM=0,

                 # This will chop a few SVs off the fcov used to calc variance
                 # This stabalizes the optimization. Makes it faster + prevents failures due to not psd covariance.
                 shrinkfcov=True,

                 # This will penalize deviation from zero net with country
                 tightCountry=False,

                 # This is a post-optimizer cut to prevent overborrowing. For new shorts <= availability.
                 # applyPostOptTrimming must be True for this to run.
                 trimOverborrows=False,

                 # Apply the trade restrictions process pre & post-optimiser.  Load GQR dealnames file and for EG parse
                 # their universe with restricts/dealnames.
                 tradeRestrictions=True,
                 max_borrow_cost_DNIS=None,  ## a la GS availabitily file units, % annualised borrow cost 
                 enable_gs_stability_score=False,
                 tr=None
                 ):

        self.subdir = subdir
        self.prefix = prefix

        PyLog.assertion(shortName is not None, 'Missing shortName')
        self.shortName = shortName

        self.spreadsheetKey = spreadsheetKey

        self.startTD = startTD
        self.endTD = endTD

        self.gamma = gamma
        self.adaptiveGamma = adaptiveGamma
        self.adaptiveGammaTargetRisk = adaptiveGammaTargetRisk
        self.adaptiveGammaTolerance = adaptiveGammaTolerance

        self.tau = tau

        self.stratName = stratName

        self.startingNAV = startingNAV
        self.staticNAV = staticNAV

        self.rebalIntervalWD = rebalIntervalWD

        self.applyExchangeHolidays = applyExchangeHolidays
        self.splitLongShort = splitLongShort
        self.splitLongShortStartDate = splitLongShortStartDate
        self.splitLongShortCostlessTransition = splitLongShortCostlessTransition

        self.bcostLookback = bcostLookback
        self.maxShortUtilization = maxShortUtilization
        self.HAUM = HAUM
        self.hypotheticalAUM = hypotheticalAUM
        self.pbList = pbList

        self.transition = transition
        self.transitionInit = transitionInit
        self.transitionStep = transitionStep
        self.optimalTransition = optimalTransition

        self.minShortAvailM = minShortAvailM
        self.trimOverborrows = trimOverborrows
        self.tradeRestrictions = tradeRestrictions
        self.max_borrow_cost_DNIS = max_borrow_cost_DNIS
        self.enable_gs_stability_score = enable_gs_stability_score

        self.setPaths(subdir)

        self.rebalConfig = RebalConfig(configName='Simulation',
                                       stratName=stratName,
                                       maxLeverage=maxLeverage,
                                       minNetExposure=minNetExposure,
                                       maxNetExposure=maxNetExposure,
                                       minBetaAdjNet=minBetaAdjNet,
                                       maxBetaAdjNet=maxBetaAdjNet,
                                       gamma=gamma,
                                       adaptiveGamma=adaptiveGamma,
                                       adaptiveGammaTargetRisk=adaptiveGammaTargetRisk,
                                       adaptiveGammaTolerance=adaptiveGammaTolerance,
                                       tau=tau,
                                       maxRisk=maxRisk,
                                       offDiagRisk=offDiagRisk,
                                       splitLongShort=splitLongShort,
                                       splitLongShortStartDate=splitLongShortStartDate,
                                       bcostLookback=bcostLookback,
                                       maxShortUtilization=maxShortUtilization,
                                       HAUM=HAUM,
                                       hypotheticalAUM=hypotheticalAUM,
                                       pbList=pbList,
                                       bcostMultiplierName=bcostMultiplierName,
                                       univwtThreshold=univwtThreshold,
                                       holdingThreshold=holdingThreshold,
                                       rmodelName=rmodelName,
                                       tmodelName=tmodelName,
                                       impactCostScale=impactCostScale,
                                       alphaName=alphaName,
                                       tradeUniverseName=tradeUniverseName,
                                       themeBoundName=themeBoundName,
                                       fcostName=fcostName,
                                       bcostName=bcostName,
                                       defaultFCost=defaultFCost,
                                       defaultBCost=defaultBCost,
                                       longBoundName=longBoundName,
                                       shortBoundName=shortBoundName,

                                       liquidityName=liquidityName,
                                       defaultLiquidityUSDM=defaultLiquidityUSDM,
                                       maxLiquidityBoundLong=maxLiquidityBoundLong,
                                       maxLiquidityBoundShort=maxLiquidityBoundShort,

                                       advName=advName,
                                       maxAdvProp=maxAdvProp,

                                       defaultLongBound=defaultLongBound,
                                       defaultShortBound=defaultShortBound,
                                       applyShortBan=applyShortBan,
                                       applyExchangeHolidays=applyExchangeHolidays,
                                       countryBoundName=countryBoundName,
                                       industryBoundName=industryBoundName,
                                       sizeBoundName=sizeBoundName,
                                       targetTolerance=targetTolerance,
                                       toleranceRange=toleranceRange,

                                       minShortAvailM=minShortAvailM,
                                       shrinkfcov=shrinkfcov,
                                       tightCountry=tightCountry,
                                       enable_gs_stability_score=enable_gs_stability_score,
                                       )

        self.holdingThreshold = self.rebalConfig.holdingThreshold
        self.applyPostOptTrimming = applyPostOptTrimming

        self.solver = solver
        self.maxIter = maxIter
        self.cycleIter = cycleIter

        self.memo = memo
        self.bcostMaskName = bcostMaskName

        PyLog.assertion(executionPrice in ['close', 'open', 'vwap'],
                        "Invalid execution price: {}".format(executionPrice))
        self.executionPrice = executionPrice
        self.hedgeFX = hedgeFX

        self.maxVolParticipation = maxVolParticipation
        self.initialPortfolio = initialPortfolio

        self.noTradeDates = [] if noTradeDates is None or len(noTradeDates) == 0 \
            else [PyDate.asDate(x) for x in noTradeDates]


    def setPaths(self, subdir):
        """ Set various paths. Other derived variables
        Breaking out into a method, so we can call after the object has been constructed not just count on init """

        self.simDir = os.path.join(self.ROOTDIR, subdir)
        if not FileUtil.exists(self.simDir):
            FileUtil.createDir(self.simDir, mode=0o777)

        self.auditDir = os.path.join(self.simDir, 'audit')
        if not FileUtil.exists(self.simDir):
            FileUtil.createDir(self.simDir)

        self.navFile = os.path.join(self.simDir, 'trading_portfolio_nav.plz4')
        self.tr = TradeRestrictions(self.simDir, MAX_BORROW_RATE=self.max_borrow_cost_DNIS ) if self.tradeRestrictions else None

    def preserveSimFile(self, srcfile):
        tgtfile = os.path.join(self.auditDir, os.path.basename(srcfile))
        os.makedirs(self.auditDir, exist_ok=True)
        command = 'cp -r -p {} {}'.format(srcfile, self.auditDir)
        os.system(command)
        PyLog.info(f"preserveSimFile:{command}")
        FileUtil.setPermission(tgtfile, 0o444)

    # -----------------------------------------------------------------------------------------------------------

    def getFileFrame(self, prefix=None, fileType=None):
        dfFiles = FileUtil.ls(self.simDir)
        dfFiles = dfFiles[dfFiles['directory'] == self.simDir]
        regx = re.compile(r'(trading|optimal)_portfolio')
        dfFiles = dfFiles.assign(prefix=['' if regx.search(x) is None
                                         else regx.search(x).group() for x in dfFiles['filename']])
        regx = re.compile(r'^(trading|optimal)_portfolio_|(_[0-9]{8})?\.plz4$')
        dfFiles = dfFiles.assign(fileType=[regx.sub('', x) for x in dfFiles['filename']])
        regx = re.compile(r'[0-9]{8}\.plz4$')
        dfFiles = dfFiles.assign(
            tradeDate=[None if regx.search(x) is None else PyDate.asDate(regx.search(x).group().split('.')[0])
                       for x in dfFiles['filename']])

        if prefix is not None:
            PyLog.assertion(prefix in ['trading_portfolio', 'optimal_portfolio'], "Invalid prefix: {}".format(prefix))
            dfFiles = dfFiles[dfFiles['prefix'] == prefix]

        if fileType is not None:
            PyLog.assertion(fileType in ['rebalance', 'positions_pretrade', 'positions_posttrade',
                                         'risk', 'daily_nav_pretrade', 'daily_nav_posttrade'],
                            "Invalid fileType: {}".format(fileType))
            dfFiles = dfFiles[dfFiles['fileType'] == fileType]

        dfFiles = dfFiles.sort_values(by='tradeDate')
        return dfFiles

    # -----------------------------------------------------------------------------------------------------------

    def loadNAVFrame(self, backup=False):
        filename = self.navFile
        if backup:
            filename = re.sub('_nav', '_nav_backup', filename)
        if FileUtil.exists(filename):
            PyLog.info(f'loadNAVFrame loaded ({filename})')
            xx = FileUtil.load(filename)
            PyLog.info(f"got nav frame of len {len(xx)}")
            return xx
        else:
            return pd.DataFrame()

    def writeNAVFrame(self, dfNAV, backup=False):
        filename = self.navFile
        if backup:
            filename = re.sub('_nav', '_nav_backup', filename)
        output = FileUtil.save(dfNAV, filename=filename)
        print(f'writeNAVFrame wrote ({filename})')

    # -----------------------------------------------------------------------------------------------------------

    def dailyNAVFile(self, tradeDate, step=None, extension='plz4'):
        PyLog.assertion(step in ['pretrade', 'posttrade'], 'Invalid step: {}'.format(step))
        PyLog.assertion(extension in ['plz4', 'json'], 'Invalid extension: {}'.format(extension))
        filename = '{}_daily_nav_{}_{}.{}'.format(self.prefix, step, PyDate.asString(tradeDate), extension)
        return os.path.join(self.simDir, filename)

    def writeDailyNAVFile(self, dctNAV, tradeDate, step=None):
        fn = self.dailyNAVFile(tradeDate=tradeDate, step=step, extension='plz4')
        output = FileUtil.save(dctNAV, filename=fn)

    def loadDailyNAVFile(self, tradeDate, step=None):
        filename = self.dailyNAVFile(tradeDate=tradeDate, step=step, extension='plz4')
        if FileUtil.exists(filename):
            dctNAV = FileUtil.load(filename)
        else:
            dctNAV = dict()
        PyLog.info(f"sim {self.shortName} loading DailyNAV {filename} on {tradeDate}")
        return dctNAV

    def exportDailyNAVFile(self, tradeDate, step=None):
        dctNAV = self.loadDailyNAVFile(tradeDate=tradeDate, step=step)
        dctNAV = {key: str(dctNAV[key]) for key in dctNAV.keys()}
        filename = self.dailyNAVFile(tradeDate=tradeDate, step=step, extension='json')
        with open(filename, 'w') as fp:
            json.dump(dctNAV, fp)
        return filename

    # -----------------------------------------------------------------------------------------------------------

    def dailyReturnsFile(self):
        return os.path.join(self.simDir, '{}_daily_returns.plz4'.format(self.prefix))

    def writeDailyReturnsFile(self, dfRet):
        output = FileUtil.save(dfRet, self.dailyReturnsFile())

    def loadDailyReturnsFile(self):
        filename = self.dailyReturnsFile()
        if not FileUtil.exists(filename):
            PyLog.info('Daily returns file not found: {}'.format(filename))
            dfRet = pd.DataFrame()
        else:
            dfRet = FileUtil.load(filename)
        return dfRet

    def updateDailyReturnsSpreadsheet(self):
        debug = True

        if self.spreadsheetKey is None:
            PyLog.error('Invalid spreadsheet key')

        dfUpdate = Simulation.generateDailyReturns(self, save=False)
        columns = ['signalDate', 'tradeDate', 'rebalDate', 'span', 'bodNAV', 'bodGrExp', 'fcostUSD', 'bcostUSD',
                   'preOptNAV', 'preOptGrExp', 'tau', 'preTradePnLHoldUSD', 'preTradeNAV', 'preTradeGrExp',
                   'tradeWeights', 'tradeValUSD', 'linearCostUSD', 'impactCostUSD', 'tcostUSD', 'postTradeNAV',
                   'postTradePnLHoldUSD', 'pnlHoldUSD', 'totalPnLUSD', 'eodNAV', 'eodGrExp', 'gamma',
                   'preTradePnLFxUSD', 'postTradePnLFxUSD', 'pnlFxUSD', 'fcost', 'bcost', 'tcost', 'holdret',
                   'logret', 'periodret', 'cumlogret', 'cumret']

        for column in set(columns).difference(set(dfUpdate.columns)):
            dfUpdate[column] = ''
        dfUpdate = dfUpdate[columns]

        wsheet = GoogleSheets(spreadsheetKey=self.spreadsheetKey, worksheetName='DailyReturns').wsheet
        lst = wsheet.col_values(2)[1:]
        maxTD = max([datetime.datetime.strptime(x, '%m/%d/%Y').date() for x in lst if x != '']
                    + [GoogleSheets.BASEDT])

        if maxTD == GoogleSheets.BASEDT:
            output = wsheet.spreadsheet.values_clear('DailyReturns!A2:AL{}'.format(max(2, len(lst) + 1)))
        elif maxTD < dfUpdate['tradeDate'].max():
            dfUpdate = dfUpdate[dfUpdate['tradeDate'] >= maxTD]
            for n in range(len(dfUpdate) - 1):
                output = wsheet.insert_row([], index=3)


        if len(dfUpdate) > 0:
            dfUpdate = dfUpdate.sort_values(by='tradeDate', ascending=False)
            dfUpdate = dfUpdate.assign(signalDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfUpdate['signalDate']])
            dfUpdate = dfUpdate.assign(tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfUpdate['tradeDate']])
            dfUpdate = dfUpdate.assign(rebalDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfUpdate['rebalDate']])
            dfUpdate = dfUpdate.fillna('')
            output = wsheet.update('A2', dfUpdate.values.tolist())


    # -----------------------------------------------------------------------------------------------------------

    def positionFile(self, tradeDate, prefix=None, step=None, extension='plz4'):
        PyLog.assertRegex(step, ['pretrade', 'posttrade', 'backup-[0-9]+'], 'Invalid step: {}'.format(step))
        PyLog.assertion(extension in ['plz4', 'csv'], 'Invalid extension: {}'.format(extension))
        prefix = self.prefix if prefix is None else prefix
        filename = '{}_positions_{}_{}.{}'.format(prefix, step, PyDate.asString(tradeDate), extension)
        return os.path.join(self.simDir, filename)


    def writePositionFile(self, dfPositions, tradeDate, prefix=None, step=None):

        fn = self.positionFile(tradeDate=tradeDate, prefix=prefix, step=step)

        if 'SIMS_POSNS_OUT_PATH' in os.environ:
            ## dch set new outpath..
            fn = os.environ['SIMS_POSNS_OUT_PATH'].replace('YYYYMMDD', str(tradeDate).replace('-','')).replace('STEP',step)
            print(f'writePositionFile SIMS_POSNS_OUT_PATH --> SIMS_POSNS_OUT_PATH write to ({fn})')

        debug = False
        filename = fn
        PyLog.info(f"writePositionFile... {filename}")
        if debug:
            try:
                output = FileUtil.save(dfPositions,
                                       filename=fn)
            except Exception as ee:
                PyLog.info("Simulate.writePositionFile ({}) ...ERR({}) in writing!".format(fn, ee))
                PyLog.info(traceback.format_exc())
        else:
            output = FileUtil.save(dfPositions,
                                   filename=fn)

    def loadPositionFile(self, tradeDate, prefix=None, step=None):
        fn = self.positionFile(tradeDate=tradeDate, prefix=prefix, step=step)
        if 'SIMS_POSNS_IN_PATH' in os.environ:
        ## dch set new outpath..
            fn = os.environ['SIMS_POSNS_IN_PATH'].replace('YYYYMMDD', str(tradeDate).replace('-','')).replace('STEP',step)

        if FileUtil.exists(fn):
            PyLog.info(f"Simulation.py:loadPositionFile ({fn})")
            return FileUtil.load(fn)
        else:
            PyLog.info("Simulation.py:loadPositionFile ..go load file {} ---DOES NOT EXIST..".format(fn))
            return pd.DataFrame()

    def rebalFile(self, tradeDate, prefix=None):
        prefix = self.prefix if prefix is None else prefix
        filename = '{}_rebalance_{}.plz4'.format(prefix, PyDate.asString(tradeDate))
        return os.path.join(self.simDir, filename)

    def writeRebalFile(self, dctOptResult, tradeDate, prefix=None):
        output = FileUtil.save(dctOptResult, filename=self.rebalFile(tradeDate=tradeDate, prefix=prefix))

    def loadRebalFile(self, tradeDate, prefix=None):
        filename = self.rebalFile(tradeDate=tradeDate, prefix=prefix)
        if FileUtil.exists(filename):
            return FileUtil.load(filename)
        else:
            return dict()

    # -----------------------------------------------------------------------------------------------------------

    def riskReportFile(self, tradeDate, prefix=None):
        prefix = self.prefix if prefix is None else prefix
        filename = '{}_risk_{}.plz4'.format(prefix, PyDate.asString(tradeDate))
        return os.path.join(self.simDir, filename)

    def writeRiskReport(self, dctRiskReport, tradeDate, prefix=None):
        output = FileUtil.save(dctRiskReport, filename=self.riskReportFile(tradeDate=tradeDate, prefix=prefix))

    def loadRiskReport(self, tradeDate, prefix=None):
        filename = self.riskReportFile(tradeDate=tradeDate, prefix=prefix)
        if FileUtil.exists(filename):
            return FileUtil.load(filename)
        else:
            return dict()

    # -----------------------------------------------------------------------------------------------------------

    def attributionFile(self, backup=False):
        filename = 'trading_portfolio_attribution{}.plz4'.format('_backup' if backup else '')
        filename = os.path.join(self.simDir, filename)
        return filename

    def loadAttributionFile(self, backup=False):
        filename = self.attributionFile(backup)
        if FileUtil.exists(filename):
            dctAttrib = FileUtil.load(filename)
            return dctAttrib['dfAttrib'], dctAttrib['dfPnL'], dctAttrib['dfCtryAtt'], dctAttrib['dfSctrAtt']
        else:
            return pd.DataFrame(), pd.DataFrame(), pd.DataFrame(), pd.DataFrame()

    def backupAttributionFile(self):
        srcname = self.attributionFile(backup=False)
        tgtname = self.attributionFile(backup=True)
        if FileUtil.exists(srcname):
            if FileUtil.exists(tgtname):
                FileUtil.remove(tgtname)
            FileUtil.rename(srcname, tgtname)

    def writeAttributionFile(self, dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt, backup=False):
        filename = self.attributionFile(backup)
        dctAttrib = {'dfAttrib': dfAttrib, 'dfPnL': dfPnL, 'dfCtryAtt': dfCtryAtt, 'dfSctrAtt': dfSctrAtt}
        output = FileUtil.save(dctAttrib, filename=filename)

    # -----------------------------------------------------------------------------------------------------------

    def attributionFile2(self, backup=False):
        filename = 'trading_portfolio_attribution2{}.plz4'.format('_backup' if backup else '')
        filename = os.path.join(self.simDir, filename)
        return filename

    def loadAttributionFile2(self, backup=False):
        filename = self.attributionFile2(backup)
        if FileUtil.exists(filename):
            dctAttrib = FileUtil.load(filename)
            return dctAttrib['dfAttrib'], dctAttrib['dfPnL'], dctAttrib['dfCtryAtt'], dctAttrib['dfSctrAtt']
        else:
            return pd.DataFrame(), pd.DataFrame(), pd.DataFrame(), pd.DataFrame()

    def backupAttributionFile2(self):
        srcname = self.attributionFile2(backup=False)
        tgtname = self.attributionFile2(backup=True)
        if FileUtil.exists(srcname):
            if FileUtil.exists(tgtname):
                FileUtil.remove(tgtname)
            FileUtil.rename(srcname, tgtname)

    def writeAttributionFile2(self, dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt, backup=False):
        filename = self.attributionFile2(backup)
        dctAttrib = {'dfAttrib': dfAttrib, 'dfPnL': dfPnL, 'dfCtryAtt': dfCtryAtt, 'dfSctrAtt': dfSctrAtt}
        output = FileUtil.save(dctAttrib, filename=filename)

    # -----------------------------------------------------------------------------------------------------------

class Simulation:
    ROOTDIR = '/mnt/signal/simulation'

    @classmethod
    def outDir(cls, subdir):
        outDir = os.path.join(cls.ROOTDIR, subdir)
        if not FileUtil.exists(outDir):
            FileUtil.createDir(outDir, mode=0o777)
        return outDir

    ## generate trading portfolios in sequence ------------------------------------------------------------

    @classmethod
    def initializeEmpty(cls, simConfig):

        tradeDate = PyDate.prevWeekday(simConfig.startTD)
        signalDate = PyDate.prevWeekday(tradeDate)

        dfPositions = pd.DataFrame(columns=['assetKey', 'eodWeights', 'eodValUSD'])
        simConfig.writePositionFile(dfPositions, tradeDate=tradeDate, step='posttrade')

        dctNAV = {'signalDate': signalDate,
                  'tradeDate': PyDate.nextWeekday(signalDate),
                  'rebalDate': tradeDate,
                  'eodNAV': simConfig.startingNAV,
                  'gamma': simConfig.gamma}
        simConfig.writeDailyNAVFile(dctNAV, tradeDate, step='posttrade')

    @classmethod
    def initializeOptimal(cls, simConfig, verbose=False, checkDCP=False):

        tradeDate = PyDate.prevWeekday(simConfig.startTD)
        signalDate = PyDate.prevWeekday(tradeDate)

        dctOptResult = Rebalance.runOptimal(rebalConfig=simConfig.rebalConfig,
                                            signalDate=signalDate,
                                            solver=simConfig.solver,
                                            maxIter=simConfig.maxIter,
                                            cycleIter=simConfig.cycleIter,
                                            verbose=verbose,
                                            checkDCP=checkDCP,
                                            tradeRestrictions = simConfig.tr)

        simConfig.writeRebalFile(dctOptResult, tradeDate=tradeDate)

        dfPositions = dctOptResult['dfWeights'][['assetKey', 'optimalWeights']]. \
            rename(columns={'optimalWeights': 'eodWeights'})
        dfPositions = dfPositions[abs(dfPositions['eodWeights']) >= simConfig.holdingThreshold]
        dfPositions = dfPositions.assign(eodValUSD=dfPositions['eodWeights'] * simConfig.startingNAV)
        simConfig.writePositionFile(dfPositions, tradeDate=tradeDate, step='posttrade')

        dctNAV = {'signalDate': signalDate,
                  'tradeDate': PyDate.nextWeekday(signalDate),
                  'rebalDate': tradeDate,
                  'eodNAV': simConfig.startingNAV,
                  'gamma': dctOptResult['gamma']}
        simConfig.writeDailyNAVFile(dctNAV, tradeDate, step='posttrade')

    @classmethod
    def runTrading(cls, simConfig, resumeSimulation=True, lastTD=None,
                   verbose=False, checkDCP=False, terminateOn=None, terminate=False):

        if terminate:
            today = PyDate.today()
            tomorrow = PyDate.tomorrow()
            threshold1 = datetime.datetime(today.year, today.month, today.day, 6, 0, 0)
            threshold2 = datetime.datetime(today.year, today.month, today.day, 14, 29, 0)
            threshold3 = datetime.datetime(tomorrow.year, tomorrow.month, tomorrow.day, 6, 0, 0)
            if datetime.datetime.now() < threshold1:
                terminateOn = threshold1
            elif datetime.datetime.now() < threshold2:
                terminateOn = threshold2
            else:
                terminateOn = threshold3

        if resumeSimulation and FileUtil.exists(simConfig.navFile):

            dfNAV = simConfig.loadNAVFrame()

            if lastTD is not None:
                ## backup dfNAV
                simConfig.writeNAVFrame(dfNAV, backup=True)
                lastTD = PyDate.asDate(lastTD)
                PyLog.assertion(lastTD in dfNAV['tradeDate'].tolist(),
                                "Invalid last trade date: {}".format(PyDate.asISO(lastTD)))
                dfNAV = dfNAV[dfNAV['tradeDate'] <= lastTD]

            dfNAV = dfNAV.sort_values(by='tradeDate', ascending=True)
            # dctNAV = dfNAV.iloc[-1].to_dict()
            # tradeDate = PyDate.nextWeekday(dctNAV['tradeDate'])
            tradeDate = PyDate.nextWeekday(dfNAV['tradeDate'].iloc[-1])

        else:
            ## initialize simulation
            if simConfig.initialPortfolio == 'optimal':
                cls.initializeOptimal(simConfig)
            elif simConfig.initialPortfolio == 'empty':
                cls.initializeEmpty(simConfig)
            tradeDate = PyDate.weekdayGE(simConfig.startTD)

        ## main simulation -----------------------------------------------------------------------------

        while PyDate.le(tradeDate, simConfig.endTD):
            if (terminateOn is not None) and ((datetime.datetime.now() > terminateOn)):
                PyLog.info("Terminated by time condition: {}".format(terminateOn.strftime('%Y-%m-%d %H:%M')))
                break
            else:
                cls.simulatePreTrade(tradeDate, simConfig, verbose=verbose, checkDCP=checkDCP)
                cls.simulatePostTrade(tradeDate, simConfig)
                tradeDate = PyDate.nextWeekday(tradeDate)

    @classmethod
    def getExchangeHolidays(cls, tradeDate):
        hframe = SignalMgr.getStatic('trading_exchange_holidays_master_frame')
        hframe = hframe[hframe['tradeDate'] == tradeDate]
        hframe = hframe[hframe['trading'] == 'No']
        lstHolidays = hframe['quoteCountry'].tolist()
        if 'HK' in hframe['quoteCountry']:
            lstHolidays = list(set(lstHolidays + ['CN', 'XH']))
        return lstHolidays

    @classmethod
    def simulatePreTrade(cls, tradeDate, simConfig, verbose=False, checkDCP=False, preserveSim=False):

        debug = True

        signalDate = prevTD = PyDate.prevWeekday(tradeDate)
        span = PyDate.span(prevTD, tradeDate)
        dctNAV = simConfig.loadDailyNAVFile(tradeDate=prevTD, step='posttrade')

        if simConfig.adaptiveGamma:
            simConfig.rebalConfig.gamma = dctNAV['gamma']

        stratName = simConfig.stratName
        fcostName = simConfig.rebalConfig.fcostName
        defaultFCost = simConfig.rebalConfig.defaultFCost
        defaultBCost = simConfig.rebalConfig.defaultBCost

        if PyDate.ge(tradeDate, PyDate.plusWeekdays(dctNAV['rebalDate'], simConfig.rebalIntervalWD)) and \
                tradeDate not in simConfig.noTradeDates:
            rebalDate = tradeDate
            PyLog.info(f"Simulate.simulatePreTrade set td==rd tradeDate: {tradeDate} rebalDate: {rebalDate}")
            # seeing KeyError: 'rebalDate' on the if statement above, but still sets td==rd. Okay, we are running daily rebal seems fine.
        else:
            rebalDate = dctNAV['rebalDate']

        NAV = simConfig.startingNAV if simConfig.staticNAV else dctNAV['eodNAV']
        dctNAV = dict(signalDate=signalDate,
                      tradeDate=tradeDate,
                      rebalDate=rebalDate,
                      span=span,
                      bodNAV=NAV)


        ## BOD weights
        if simConfig.splitLongShort and simConfig.splitLongShortCostlessTransition \
                and (simConfig.splitLongShortStartDate == tradeDate):
            dctOptResult = Rebalance.runOptimal(rebalConfig=simConfig.rebalConfig,
                                                signalDate=PyDate.prevWeekday(tradeDate),
                                                NAV=dctNAV['bodNAV'],
                                                solver=simConfig.solver,
                                                maxIter=simConfig.maxIter,
                                                cycleIter=simConfig.cycleIter,
                                                verbose=False,
                                                checkDCP=True,
                                                tradeRestrictions = simConfig.tr)
            dfWeights = dctOptResult['dfWeights']
            dfPositions = dfWeights[['assetKey', 'postOptWeights']]
            dfPositions = dfPositions[Real.isNonZero(dfPositions['postOptWeights'])]
            dfPositions = dfPositions.rename(columns={'postOptWeights': 'bodWeights'})
            dfPositions = dfPositions.assign(bodValUSD=dfPositions['bodWeights'] * dctNAV['bodNAV'])
        elif simConfig.staticNAV:
            dfPositions = simConfig.loadPositionFile(tradeDate=prevTD, step='posttrade')
            dfPositions = dfPositions[['assetKey', 'eodValUSD']]. \
                rename(columns={'eodValUSD': 'bodValUSD'})
            dfPositions = dfPositions[Real.isNonZero(dfPositions['bodValUSD'])]
            dfPositions = dfPositions.assign(bodWeights=dfPositions['bodValUSD'] / dctNAV['bodNAV'])
        else:
            dfPositions = simConfig.loadPositionFile(tradeDate=prevTD, step='posttrade')
            dfPositions = dfPositions[['assetKey', 'eodWeights', 'eodValUSD']]. \
                rename(columns={'eodWeights': 'bodWeights', 'eodValUSD': 'bodValUSD'})
            dfPositions = dfPositions[Real.isNonZero(dfPositions['bodWeights'])]


        dctNAV['bodGrExp'] = abs(dfPositions['bodWeights']).sum()

        ## apply fcost
        dfm = SignalMgr.getFrame(fcostName, signalDate, stratName).rename(columns={fcostName: 'fcost'}). \
            drop(columns='signalDate')

        dfPositions = dfPositions.merge(dfm, how='left', on='assetKey')
        dfPositions = dfPositions.assign(fcost=dfPositions['fcost'].fillna(defaultFCost))
        dfPositions = dfPositions.assign(
            fcostUSD=Real.isPositive(dfPositions['bodValUSD']).astype(float) * dfPositions['bodValUSD'] *
                     dfPositions['fcost'] * span / 365)
        dctNAV['fcostUSD'] = dfPositions['fcostUSD'].sum()


        ## apply bcost
        ## Note, the time condition here is different from that in Rebalance. We want the splitLongShort
        ## applied in optimization first before applying it in the portfolio accounting.
        if simConfig.splitLongShort and (tradeDate >= PyDate.nextWeekday(simConfig.splitLongShortStartDate)):
            dfm = cls.computeBCost(simConfig, tradeDate, dfPositions, NAV=dctNAV['bodNAV'])
            dfPositions = dfPositions.merge(dfm, how='left', on='assetKey')
            dfPositions = dfPositions.assign(bcost=dfPositions['bcost'].fillna(0.0))
        else:
            bcostName = simConfig.rebalConfig.bcostName
            dfm = SignalMgr.getFrame(bcostName, signalDate, stratName). \
                rename(columns={bcostName: 'bcost'}). \
                drop(columns='signalDate')
            dfPositions = dfPositions.merge(dfm, how='left', on='assetKey')
            dfPositions = dfPositions.assign(bcost=dfPositions['bcost'].fillna(defaultBCost))


        dfPositions = dfPositions.assign(
            bcostUSD=- Real.isNegative(dfPositions['bodValUSD']).astype(float) * dfPositions['bodValUSD'] *
                     dfPositions['bcost'] * span / 365)
        dctNAV['bcostUSD'] = dfPositions['bcostUSD'].sum()

        ## determine pre-optimization weights
        dctNAV['preOptNAV'] = dctNAV['bodNAV'] - dctNAV['fcostUSD'] - dctNAV['bcostUSD']

        dfPositions = dfPositions.assign(preOptWeights=dfPositions['bodValUSD'] / dctNAV['preOptNAV'])
        dctNAV['preOptGrExp'] = abs(dfPositions['preOptWeights']).sum()

        rebalConfig = copy.copy(simConfig.rebalConfig)

        PyLog.info(
            f"simulatePreTrade td:{tradeDate} rd:{rebalDate} tran:{simConfig.transition} optimalTransition:{simConfig.optimalTransition}")
        ## optimization
        if tradeDate == rebalDate:
            preOptWeights = FrameUtil.toSeries(dfPositions, keyCol='assetKey', valCol='preOptWeights')

            if simConfig.transition:
                maxLeverage = simConfig.transitionInit \
                              + simConfig.transitionStep * PyDate.span(simConfig.startTD, tradeDate)
                rebalConfig.maxLeverage = min(rebalConfig.maxLeverage, maxLeverage)

            if (simConfig.transition or simConfig.optimalTransition) \
                    and tradeDate == PyDate.weekdayGE(simConfig.startTD):

                dctOptResult = Rebalance.run(rebalConfig=rebalConfig,
                                             signalDate=signalDate,
                                             preOptWeights=preOptWeights,
                                             NAV=dctNAV['preOptNAV'],
                                             solver=simConfig.solver,
                                             maxIter=simConfig.maxIter,
                                             cycleIter=simConfig.cycleIter,
                                             verbose=verbose,
                                             checkDCP=checkDCP,
                                             optimal=True,
                                             tradeRestrictions = simConfig.tr)
            else:
                try:
                    dctOptResult = Rebalance.run(rebalConfig=rebalConfig,
                                                 signalDate=signalDate,
                                                 preOptWeights=preOptWeights,
                                                 NAV=dctNAV['preOptNAV'],
                                                 solver=simConfig.solver,
                                                 maxIter=simConfig.maxIter,
                                                 cycleIter=simConfig.cycleIter,
                                                 verbose=verbose,
                                                 checkDCP=checkDCP,
                                                 tradeRestrictions = simConfig.tr)
                except Exception as ee:
                    # This retry is suspect too.  re-run optimal=True on failure?
                    try:
                        dctOptResult = Rebalance.run(rebalConfig=rebalConfig,
                                                     signalDate=signalDate,
                                                     preOptWeights=preOptWeights,
                                                     NAV=dctNAV['preOptNAV'],
                                                     solver=simConfig.solver,
                                                     maxIter=simConfig.maxIter,
                                                     cycleIter=simConfig.cycleIter,
                                                     verbose=verbose,
                                                     checkDCP=checkDCP,
                                                     optimal=True,
                                                     tradeRestrictions = simConfig.tr)
                    except Exception as ee:
                        PyLog.info(traceback.format_exc())
                        # previously we re-ran the optimization using t-1 data. that produced incorrect trades.
                        raise Exception("Optimizer failed. See log")

            simConfig.writeRebalFile(dctOptResult, tradeDate=tradeDate)

            dfWeights = dctOptResult['dfWeights']

            if not simConfig.applyPostOptTrimming:
                wfm = dfWeights[['assetKey', 'postOptWeights']]
                wfm = wfm[Real.isNonZero(wfm['postOptWeights'])]
                dfPositions = dfPositions.merge(wfm[['assetKey', 'postOptWeights']], how='outer', on='assetKey')
                dfPositions.fillna(value=0, inplace=True) ## beware, there are non-zero post opt weights not in bod positions..)

            else:
                ## apply post-optimization trimming

                dframe = dfPositions.merge(dfWeights.drop(columns=['preOptWeights']), how='outer', on='assetKey')
                dframe = dframe[Real.isNonZero(dframe['preOptWeights']) | Real.isNonZero(dframe['postOptWeights'])]
                dframe.fillna(value=0, inplace=True) ## beware, there are non-zero post opt weights not in bod positions..

                pfm = SignalMgr.get('price_frame_latest', signalDate)
                dframe = dframe.merge(pfm[['assetKey', 'priceCloseUSD']], how='left', on='assetKey')
                dfm = SignalMgr.getFrame('trading_lot_size', signalDate).rename(columns={'trading_lot_size': 'lotSize'})
                dframe = dframe.merge(dfm[['assetKey', 'lotSize']], how='left', on='assetKey')

                # let's get preOpt weights from the positions, not the optimization.
                # since at this point weights + dframe are different lengths
                # dframe = dframe.assign(preOptValUSD = dfWeights['preOptWeights'] * dctNAV['preOptNAV'])
                dframe.preOptWeights = dframe.preOptWeights.fillna(0.0)
                dframe['preOptValUSD'] = dframe['preOptWeights'] * dctNAV['preOptNAV']

                dframe = dframe.assign(
                    preOptQty=round(dframe['preOptValUSD'] / dframe['priceCloseUSD'], 0).astype(int))
                dframe = dframe.assign(tradeValUSD=dframe['tradeWeights'] * dctNAV['preOptNAV'])

                ## dch remove small trade sizes ###################################################################
                dch_tradingThreshold_USD = 100
                cut = \
                    ((dframe['tradeValUSD']).abs() < dch_tradingThreshold_USD) \
                    & (dframe['preOptWeights'] != dframe['postOptWeights']) \
                    & ((dframe['tradeValUSD']).abs() > 0) \
                    & ((dframe['postOptWeights']).abs() != 0)

                ### dch todo.. there are non-zero effective liquidations here that we will kill however plan to catch in small positions cut for now.

                small_trades = dframe[cut][['assetKey', 'tradeValUSD', 'preOptWeights', 'postOptWeights']]

                #PyLog.info('dch:: Remove small trades < ({})USD :: Remove ({}) trades.  Leave ({})'.format(
                    #dch_tradingThreshold_USD, len(small_trades), \
                    #len(dframe[dframe['tradeValUSD'].abs() > 0]) - len(small_trades)))

                #PyLog.info(small_trades.sort_values(by='tradeValUSD'))
                fn_st = simConfig.simDir + '/small_trades.{}.csv'.format(str(tradeDate).replace('-', ''))
                small_trades.to_csv(fn_st, index=False, float_format='%.10f')
                PyLog.info('dch::small_trades: wrote({}) size({})'.format(fn_st, len(small_trades)))

                dframe.loc[cut, 'postOptWeights'] = dframe.loc[cut, 'preOptWeights']
                dframe.loc[cut, 'tradeWeights'] = 0
                dframe = dframe.assign(tradeValUSD=dframe['tradeWeights'] * dctNAV['preOptNAV'])

                #####  done remove small trade USD ##################################################################

                ##############  trimming for lot size
                dframe = dframe.assign(
                    tradeQtyRaw=round(dframe['tradeValUSD'] / dframe['priceCloseUSD'], 0).astype(int))
                dframe = dframe.assign(
                    tradeQtyRnd=(dframe['tradeQtyRaw'] / dframe['lotSize']).astype(int) * dframe['lotSize'])
                ####################################

                ###############  dch let's add in the trimming for overborrows here..
                if simConfig.trimOverborrows:

                    try:

                        PyLog.info('SimulatePreTrade::Trimming on overborrows -use GS file ({})'.format(tradeDate))

                        borrows = ShortAvailability.getRange(tradeDate, tradeDate, pbList=['GS'])
                        borrows = borrows[borrows['assetKey'].isin(dframe['assetKey'])]
                        dframe = dframe.merge(borrows[['assetKey', 'availableQty']], how='left', on='assetKey')
                        dframe.fillna(value=0, inplace=True)

                        # how many new_borrows are we going to ask for?
                        dframe['new_borrows'] = 0
                        dframe.loc[dframe['tradeQtyRnd'] < 0, 'new_borrows'] = dframe.loc[
                            dframe['tradeQtyRnd'] < 0, 'tradeQtyRnd']  # new_borrows requested is all tradeQty<0
                        dframe.loc[dframe['preOptQty'] > 0, 'new_borrows'] = dframe.loc[dframe[
                                                                                            'preOptQty'] > 0, 'new_borrows'] + \
                                                                             dframe.loc[dframe[
                                                                                            'preOptQty'] > 0, 'preOptQty']  # add in any existing longs
                        dframe.loc[dframe[
                                       'new_borrows'] > 0, 'new_borrows'] = 0  # new_borrows is still always less than zero ;)

                        dframe['over_borrows'] = 0
                        dframe.loc[dframe['new_borrows'] < -dframe['availableQty'], 'over_borrows'] = dframe.loc[dframe[
                                                                                                                     'new_borrows'] < -
                                                                                                                 dframe[
                                                                                                                     'availableQty'], 'availableQty'] + \
                                                                                                      dframe.loc[dframe[
                                                                                                                     'new_borrows'] < -
                                                                                                                 dframe[
                                                                                                                     'availableQty'], 'new_borrows']  # for the cases where we are asking for too much, calc over_borrows

                        actuallyMakeCut = True

                        # Make the adjustment to dframe..
                        if actuallyMakeCut:    dframe['tradeQtyRnd'] -= dframe[
                            'over_borrows']  # add this adjustment back in to dframe. (erm recal that both are - here ;) )
                        ## already this makes a pull due to the refactor below however it is not a hard boundary..

                        # calc a shortBound hard target weight..? (!!nb it flips below but here shortBoundHard is a small positive number!))

                        ## dch no, don't like this block.. we forgot that post_weights is effectively preWeight+tradeWeight.
                        #  dframe['dch_min_tradeVal_usd'] = dframe['tradeQtyRnd'] * dframe['priceCloseUSD']
                        #  dframe['dch_imply_shortBoundHard'] = (dframe['dch_min_tradeVal_usd'] / dctNAV['preOptNAV']).abs()
                        #  dframe['shortBoundHard'] = dframe[['dch_imply_shortBoundHard', 'shortBoundHard']].min(axis=1)

                        fn_ob = 'Overborrows_trimming.{}.csv'.format(str(tradeDate).replace('-', ''))
                        sim_dir = simConfig.simDir
                        fn_ob = os.path.join(f"{sim_dir}/{fn_ob}")
                        deltas = dframe[
                            ['assetKey', 'over_borrows', 'new_borrows', 'availableQty', 'tradeQtyRnd', 'preOptQty',
                             'shortBoundHard']]

                        #                        deltas = deltas[deltas['over_borrows'] != 0]

                        deltas['tradeDate'] = tradeDate
                        deltas.to_csv(fn_ob, index=False, float_format='%.10f')
                        PyLog.info(
                            'SimulatePreTrade::trimming::over-borrows:: wrote({}) size({})'.format(fn_ob, len(deltas)))

                        PyLog.info('SimulatePreTrade::trimming on overborrows... DONE')
                    except Exception as ee:
                        PyLog.info(
                            'ERR SimulatePreTrade Caught exception in trimming overborrows ERR({}) ...continue...'.format(
                                ee))
                ###################################################################

                #########################################   Trade restriction block  --only touches postOptWeights for now..
                if(simConfig.tradeRestrictions and simConfig.tr):
#                    fn_ob = 'preTR.{}.csv'.format(str(tradeDate).replace('-','')); sim_dir = simConfig.simDir; fn_ob = os.path.join(f"{sim_dir}/{fn_ob}")
#                    dframe.to_csv(fn_ob, index=False, float_format='%.10f')

                    dframe = simConfig.tr.applyTradeRestrictions(tradeDate, dframe, dctNAV['preOptNAV'])

#                    fn_ob = 'postTR.{}.csv'.format(str(tradeDate).replace('-','')); sim_dir = simConfig.simDir; fn_ob = os.path.join(f"{sim_dir}/{fn_ob}")
#                    dframe.to_csv(fn_ob, index=False, float_format='%.10f')
                #########################################    DONE TradeRestriction Block

                dframe = dframe.assign(
                    residualQty=dframe['preOptQty'] + dframe['tradeQtyRnd'])
                dframe = dframe.assign(
                    tradeQtyCmb=dframe['tradeQtyRnd']
                                - (abs(dframe['residualQty']) < dframe['lotSize']).astype(int)
                                * dframe['residualQty'])
                dframe = dframe.assign(
                    postOptQty=dframe['preOptQty'] + dframe['tradeQtyCmb'])
                dframe = dframe.assign(
                    postOptValUSD=dframe['postOptQty'] * dframe['priceCloseUSD'])
                dframe = dframe.assign(
                    trimmedWeights=dframe['postOptValUSD'] / dctNAV['preOptNAV'])


                ## dch min holding weight ##########################################################################
                dframe.loc[abs(dframe['trimmedWeights']) < simConfig.holdingThreshold, 'trimmedWeights'] = 0.0

                ## dch min holding USD #############################################################################
                dch_holdingThreshold_USD = 10000

                cut = (abs(dframe['postOptValUSD']) < dch_holdingThreshold_USD) \
                      & (abs(dframe['postOptValUSD']) > 0)  ## tgt position is not already @zero

                small_holdings = dframe[cut][['assetKey', 'postOptValUSD']].sort_values(by='postOptValUSD')
                PyLog.info(
                    'dch:: Remove small target positions < ({})USD :: Remove ({}) target positions.  Leave ({})'.format(
                        dch_holdingThreshold_USD, len(small_holdings), \
                        len(dframe[dframe['postOptValUSD'].abs() > 0]) - len(small_holdings)))
                #PyLog.info(small_holdings)

                fn_sh = simConfig.simDir + '/small_holdings.{}.csv'.format(str(tradeDate).replace('-', ''))
                small_holdings.to_csv(fn_sh, index=False, float_format='%.10f')
                PyLog.info('dch::small_holdings: wrote({}) size({})'.format(fn_sh, len(small_holdings)))

                dframe.loc[cut, 'trimmedWeights'] = 0.0
                ####################################################################################################

                dframe = dframe.assign(
                    trimmedWeights=dframe[['trimmedWeights', 'longBoundHard']].min(axis=1))
                dframe = dframe.assign(shortBoundHard=-dframe['shortBoundHard'])
                dframe = dframe.assign(
                    trimmedWeights=dframe[['trimmedWeights', 'shortBoundHard']].max(axis=1))
                dfPositions = dfPositions.merge(
                    dframe[['assetKey', 'trimmedWeights']].rename(columns={'trimmedWeights': 'postOptWeights'}),
                    how='outer', on='assetKey')

                dfPositions.fillna(value=0, inplace=True) ## beware, there are non-zero post opt weights not in bod positions..

            dfPositions = dfPositions.assign(
                optTradeWeights=dfPositions['postOptWeights'].fillna(0.0) - dfPositions['preOptWeights'].fillna(0.0))
            dfPositions = dfPositions.merge(dfWeights[['assetKey', 'longBoundHard', 'shortBoundHard']],
                                            how='left', on='assetKey')

            ###########################  dch write interim post-opt step debug weights log  ################

            fn_ob = 'debug_interim_post_opt_weights.{}.csv'.format(str(tradeDate).replace('-', ''))
            sim_dir = simConfig.simDir
            fn_ob = os.path.join(f"{sim_dir}/{fn_ob}")
            #            deltas = dframe[['assetKey','over_borrows','new_borrows','availableQty','tradeQtyRnd','preOptQty','shortBoundHard','dch_min_tradeVal_usd','dch_imply_shortBoundHard']]
            PyLog.info(
                'debug_interim_post_opt_weights Using NAV({})   (dctNAV[\'preOptNAV\'])({})'.format(dctNAV['preOptNAV'],
                                                                                                    prevTD))
            dfPositions.to_csv(fn_ob, index=False, float_format='%.10f')
            ################# ... done dch write interim debug log

            # if simConfig.applyExchangeHolidays:
            #     lstHolidays = cls.getExchangeHolidays(tradeDate)
            #     if len(lstHolidays) > 0:
            #         cfm = SignalMgr.getFrame('model_country', signalDate)
            #         assets = cfm[cfm['model_country'].isin(lstHolidays)]['assetKey'].tolist()
            #         dfPositions.loc[dfPositions['assetKey'].isin(assets), 'optTradeWeights'] = 0.0

            dctNAV['tau'] = rebalConfig.tau
            dctNAV['gamma'] = dctOptResult['gamma']


        else:
            dfPositions = dfPositions.assign(postOptWeights=dfPositions['preOptWeights'])
            dfPositions = dfPositions.assign(optTradeWeights=0.0)
            dctNAV['tau'] = rebalConfig.tau
            dctNAV['gamma'] = rebalConfig.gamma

        ## insert and useful new columns into dfPositions...
        if simConfig.applyPostOptTrimming :
            if 'tradeQtyRnd' in dframe.columns:
                dframe['trade_qty'] = dframe['tradeQtyRnd']
                dfPositions = dfPositions.merge(dframe[['assetKey', 'trade_qty', 'preOptQty']], how='left', on='assetKey').fillna(0.0)
            else:
                Pylog.warn('simulatePretrade:: WARN No tradeQtyRnd in dframe.. did the opt run?.. tradeDate==rebalDate???.  Not inserting trade_qty or target_contracts into dfPositions')
        else:
            ## go fill in the basics in dfPositions since we didn't do any trimming, rounding etc..
            dfPositions.preOptWeights = dfPositions.preOptWeights.fillna(0.0)
            dfPositions = dfPositions[Real.isNonZero(dfPositions['preOptWeights']) | Real.isNonZero(dfPositions['postOptWeights'])]
            dfPositions['preOptValUSD'] = dfPositions['preOptWeights'] * dctNAV['preOptNAV']
            dfPositions['postOptValUSD'] = dfPositions['postOptWeights'] * dctNAV['preOptNAV']
            pfm = SignalMgr.get('price_frame_latest', signalDate)
            dfPositions = dfPositions.merge(pfm[['assetKey', 'priceCloseUSD']], how='left', on='assetKey')
            cut_bad_px= ( (dfPositions['preOptValUSD'] != 0) | (dfPositions['postOptValUSD'] != 0) ) & (dfPositions['priceCloseUSD'].isnull())
            if len( dfPositions[ cut_bad_px ]  )>0:
                msg = 'WARN:simulatePretrade Bad Prices in stocks for which we have pre or post opt weights len({})'.format(len( dfPositions[ cut_bad_px ]  ))
                PyLog.info()
                PyLog.warn(msg)
                PyLog.info('cut_bad_px::\n{}'.format(dfPositions[ cut_bad_px ] ))
                raise(msg)

            dfPositions[ 'preOptQty'] = round(dfPositions['preOptValUSD'] / dfPositions['priceCloseUSD'], 0).astype(int)
            dfPositions[ 'postOptQty'] = round(dfPositions['postOptValUSD'] / dfPositions['priceCloseUSD'], 0).astype(int) ## aka target_contracts
            dfPositions['trade_qty']  = dfPositions['postOptQty'] - dfPositions['preOptQty']

        dfPositions['target_contracts'] = dfPositions['preOptQty'] + dfPositions['trade_qty']

        if debug: PyLog.info(f"tau: {dctNAV['tau']} gamma:{dctNAV['gamma']} rebalConfig.gamma:{rebalConfig.gamma}")
        simConfig.writePositionFile(dfPositions, tradeDate=tradeDate, step='pretrade')
        simConfig.writeDailyNAVFile(dctNAV, tradeDate=tradeDate, step='pretrade')


        if preserveSim:
            simConfig.preserveSimFile(simConfig.positionFile(tradeDate=tradeDate, step='pretrade'))
            simConfig.preserveSimFile(simConfig.rebalFile(tradeDate=tradeDate))
            simConfig.preserveSimFile(simConfig.dailyNAVFile(tradeDate=tradeDate, step='pretrade'))


    @classmethod
    def computeBCost(cls, simConfig, tradeDate, dfPositions, NAV):
        lookback = simConfig.bcostLookback
        dfShorts = dfPositions[Real.isNegative(dfPositions['bodWeights'])]


        if len(dfShorts) == 0:
            dfUtilized = pd.DataFrame(columns=['assetKey', 'bcost'])
            return dfUtilized


        endTD = PyDate.prevWeekday(tradeDate)
        startTD = PyDate.minusWeekdays(endTD, lookback - 1)


        dfWgts = pd.DataFrame({'tradeDate': PyDate.sequenceWeekday(startTD, endTD, decreasing=True),
                               'timeWgts': Stats.expwts(lookback, lookback / 2)})

        dfWgts = dfWgts.assign(timeScale=dfWgts['timeWgts'] / dfWgts['timeWgts'].iloc[0])


        dfRaw = ShortAvailability.getRange(startTD, endTD, pbList=simConfig.pbList)


        dfRaw = dfRaw[dfRaw['assetKey'].isin(dfShorts['assetKey'])]


        if simConfig.HAUM:
            AUM = simConfig.hypotheticalAUM
        else:
            AUM = NAV

        dfRaw = dfRaw.assign(
            scale=simConfig.maxShortUtilization
                  + (1 - simConfig.maxShortUtilization)
                  * ((dfRaw['pbCode'] == 'JPM') & (dfRaw['category'] == 'connect')).astype(int))
        dfRaw = dfRaw.assign(vsHAUM=dfRaw['scale'] * dfRaw['notionalUSD'] / AUM)
        dfRaw = dfRaw.merge(dfWgts[['tradeDate', 'timeScale']], how='left', on='tradeDate')

        ## create buffer in case the utilized pct exceeds available pct
        defaultBCost = max(simConfig.rebalConfig.defaultBCost, dfRaw['availableRate'].max() + 0.01)
        dfm = dfShorts[['assetKey']].drop_duplicates().assign(availableRate=defaultBCost)
        dfm = dfm.assign(vsHAUM=1.0, timeScale=1.0, dataType='defaultBCost')
        dfRaw = pd.concat([dfRaw, dfm], ignore_index=True)

        ## availableRate is float and it works poorly with group()
        dfRaw = dfRaw.assign(rateClass=['{:.4f}'.format(x) for x in dfRaw['availableRate']])

        dfConsol = dfRaw.groupby(['assetKey', 'rateClass']). \
            apply(lambda dfm: (dfm['vsHAUM'] * dfm['timeScale']).sum()). \
            reset_index().rename(columns={0: 'vsHAUM'})

        dfConsol = dfConsol.assign(availableRate=dfConsol['rateClass'].astype(float))
        dfConsol = dfConsol.sort_values(by=['assetKey', 'availableRate'], ascending=True). \
            groupby('assetKey').apply(lambda dfm: dfm.assign(cumHi=dfm['vsHAUM'].cumsum())).reset_index(drop=True)
        dfConsol = dfConsol.groupby('assetKey'). \
            apply(lambda dfm: dfm.assign(cumLo=[0.0] + dfm['cumHi'][:-1].tolist())).reset_index(drop=True)
        dfConsol = dfConsol.merge(dfShorts[['assetKey', 'bodWeights']], how='left', on='assetKey')

        ## Note, this implicitly assumes that the utilized pct does not exceed the maximum availability
        dfUtilized = dfConsol[dfConsol['cumLo'] <= -dfConsol['bodWeights']]
        dfUtilized = dfUtilized.assign(bodWeights=-dfUtilized['bodWeights'])
        dfUtilized = dfUtilized.assign(utilizedPct=dfUtilized[['cumHi', 'bodWeights']].min(axis=1))
        dfUtilized = dfUtilized.assign(weight=dfUtilized['utilizedPct'] - dfUtilized['cumLo'])
        dfUtilized = dfUtilized.assign(wtdRate=dfUtilized['availableRate'] * dfUtilized['weight'])
        dfUtilized = dfUtilized.groupby('assetKey').agg({'wtdRate': sum, 'weight': sum, 'utilizedPct': max}). \
            reset_index()
        dfUtilized = dfUtilized.assign(bcost=dfUtilized['wtdRate'] / dfUtilized['weight'])
        return dfUtilized[['assetKey', 'bcost']]

    @classmethod
    def simulatePostTrade(cls, tradeDate, simConfig, preserveSim=False):

        debug = True

        signalDate = PyDate.prevWeekday(tradeDate)

        stratName = simConfig.stratName
        tmodelName = simConfig.rebalConfig.tmodelName
        impactCostScale = simConfig.rebalConfig.impactCostScale


        dctNAV = simConfig.loadDailyNAVFile(tradeDate=tradeDate, step='pretrade')

        dfPositions = simConfig.loadPositionFile(tradeDate=tradeDate, step='pretrade')
        if debug: PyLog.info('Loaded positions for ({})'.format(tradeDate))
        if debug:
            PyLog.info(f"Positions for {tradeDate} size {len(dfPositions)} cols:{dfPositions.columns}")
            if len(dfPositions) > 0:
                top = dfPositions[['assetKey', 'bodValUSD']].sort_values('bodValUSD', key=abs).head(5)
                PyLog.info(f"top positions\n{top}")


        ## determine applicable returns
        if simConfig.executionPrice == 'open':
            preTradeRet = 'coret'
            postTradeRet = 'ocret'
        elif simConfig.executionPrice == 'vwap':
            preTradeRet = 'cvret'
            postTradeRet = 'vcret'


        rfm = []
        try:
            rfm = SignalMgr.get('auxiliary_returns_frame', tradeDate, stratName)
            PyLog.info('done!//')
            PyLog.info(f"rfm len : {len(rfm)} cols:{rfm.columns.tolist()}")
        except Exception as ee:
            try:
                # roll back two!
                x = PyDate.prevWeekday(tradeDate)
                prevTD = PyDate.asString(PyDate.prevWeekday(x))
                #                prevTD = PyDate.asString(  PyDate.prevWeekday(tradeDate) )

                rfm = SignalMgr.get('auxiliary_returns_frame', prevTD, stratName)
            except Exception as eee:
               PyLog.info("dummy info")

        dfPositions = dfPositions.merge(rfm[['assetKey', preTradeRet, postTradeRet, 'fxretd']],
                                        how='left', on='assetKey')

        dfPositions = dfPositions.assign(
            preTradePnLHoldUSD=dfPositions['bodValUSD'].fillna(0.0) * dfPositions[preTradeRet].fillna(0.0))
        # dfPositions = dfPositions.assign(
        #     preTradeValUSD = dfPositions['bodValUSD'].fillna(0.0) + dfPositions['preTradePnLHoldUSD'])


        dfPositions = dfPositions.assign(
            preTradeValUSD=dfPositions['bodValUSD'].fillna(0.0) * (1 + dfPositions[preTradeRet].fillna(0.0))
                           / (1 - 0.5 * dfPositions['fxretd'].fillna(0.0)))
        dfPositions = dfPositions.assign(
            preTradePnLFxUSD=dfPositions['preTradeValUSD'] - dfPositions['bodValUSD'].fillna(0.0)
                             - dfPositions['preTradePnLHoldUSD'])

        dctNAV['preTradePnLHoldUSD'] = dfPositions['preTradePnLHoldUSD'].sum()
        dctNAV['preTradePnLFxUSD'] = 0.0 if simConfig.hedgeFX else dfPositions['preTradePnLFxUSD'].sum()


        PyLog.info(f"dctNAV {dctNAV}")

        dctNAV['preTradeNAV'] = dctNAV['preOptNAV'] + dctNAV['preTradePnLHoldUSD']

        # dctNAV['preOptNAV']=0 ##dch this is why the extensim_pm actually has to run the prior day..

        dctNAV['preTradeNAV'] = dctNAV['preOptNAV'] + dctNAV['preTradePnLHoldUSD'] + dctNAV['preTradePnLFxUSD']


        dfPositions = dfPositions.assign(preTradeWeights=dfPositions['preTradeValUSD'] / dctNAV['preTradeNAV'])


        dctNAV['preTradeGrExp'] = abs(dfPositions['preTradeWeights']).sum()


        # dfPositions = dfPositions.assign(
        #     tradeValUSD = dfPositions['optTradeWeights'] * dctNAV['preOptNAV']
        #                   * (1 + dfPositions[preTradeRet].fillna(0.0)))

        if simConfig.maxVolParticipation is not None:
            pfm = SignalMgr.get('price_frame', tradeDate)
            pfm = pfm.assign(tradeValUSDM=pfm[['tradeValueUSDM', 'consolTrValUSDM']].fillna(0.0).max(axis=1))
            pfm = pfm.assign(
                tradeBoundLo=-simConfig.maxVolParticipation * pfm['tradeValUSDM'] * 1000000 / dctNAV['preTradeNAV'],
                tradeBoundHi=+simConfig.maxVolParticipation * pfm['tradeValUSDM'] * 1000000 / dctNAV['preTradeNAV'])
            dfPositions = dfPositions.merge(pfm[['assetKey', 'tradeBoundLo', 'tradeBoundHi']],
                                            how='left', on='assetKey')
            dfPositions = dfPositions.assign(
                execWeights=dfPositions[['optTradeWeights', 'tradeBoundLo']].fillna(0.0).max(axis=1))
            dfPositions = dfPositions.assign(
                execWeights=dfPositions[['execWeights', 'tradeBoundHi']].fillna(0.0).min(axis=1))
            ## need to take care of the forced trades here
            ## apply hard bounds
            dfPositions = dfPositions.assign(
                postExecWeights=dfPositions['preTradeWeights'] + dfPositions['execWeights'])
            if tradeDate not in simConfig.noTradeDates:
                dfPositions = dfPositions.assign(
                    postExecWeights=dfPositions.assign(shortBoundHard=-dfPositions['shortBoundHard']) \
                        [['postExecWeights', 'shortBoundHard']].max(axis=1))
                dfPositions = dfPositions.assign(
                    postExecWeights=dfPositions[['postExecWeights', 'longBoundHard']].min(axis=1))
            dfPositions = dfPositions.assign(
                execWeights=dfPositions['postExecWeights'] - dfPositions['preTradeWeights'])
        else:
            dfPositions = dfPositions.assign(execWeights=dfPositions['optTradeWeights'])
            dfPositions = dfPositions.assign(
                postExecWeights=dfPositions['preTradeWeights'] + dfPositions['execWeights'])


        dfPositions = dfPositions.assign(
            tradeValUSD=dfPositions['execWeights'] * dctNAV['preOptNAV']
                        * (1 + dfPositions[preTradeRet].fillna(0.0)) / (1 - 0.5 * dfPositions['fxretd'].fillna(0.0)))
        dfPositions = dfPositions.assign(tradeWeights=dfPositions['tradeValUSD'] / dctNAV['preTradeNAV'])
        dctNAV['tradeWeights'] = abs(dfPositions['tradeWeights']).sum()
        dctNAV['tradeValUSD'] = abs(dfPositions['tradeValUSD']).sum()


        ## compute tcost
        tmodel = []
        try:
            tmodel = SignalMgr.get(tmodelName, signalDate, stratName)
        except Exception as ee:
            xx = PyDate.prevWeekday(signalDate)
            xx = PyDate.asString(xx)
            tmodel = SignalMgr.get(tmodelName, xx, stratName)

        if debug: PyLog.info(f"got tmodel len:{len(tmodel)} cols:{tmodel.columns}")

        tmodel = tmodel[['assetKey', 'linearBuyCoeff', 'linearSellCoeff', 'impactCoeffTH']]
        dfPositions = dfPositions.merge(tmodel, how='left', on='assetKey')


        dfPositions.loc[dfPositions['assetKey'] == '$USD', 'linearBuyCoeff'] = 0.0
        dfPositions.loc[dfPositions['assetKey'] == '$USD', 'linearSellCoeff'] = 0.0
        dfPositions.loc[dfPositions['assetKey'] == '$USD', 'impactCoeffTH'] = 0.0

        dfPositions = dfPositions.assign(
            linearCostUSD=dctNAV['preTradeNAV'] *
                          ((Filter.bound(dfPositions['tradeWeights'], lower=0.0) *
                            dfPositions['linearBuyCoeff'].fillna(dfPositions['linearBuyCoeff'].max())) -
                           (Filter.bound(dfPositions['tradeWeights'], upper=0.0) *
                            dfPositions['linearSellCoeff'].fillna(dfPositions['linearSellCoeff'].max()))))


        dfPositions = dfPositions.assign(
            impactCostUSD=impactCostScale * np.power(dctNAV['preTradeNAV'], 3 / 2) *
                          (np.power(abs(dfPositions['tradeWeights']), 3 / 2) *
                           dfPositions['impactCoeffTH'].fillna(dfPositions['impactCoeffTH'].max())))
        dfPositions = dfPositions.assign(tcostUSD=dfPositions['linearCostUSD'] + dfPositions['impactCostUSD'])


        dctNAV['linearCostUSD'] = dfPositions['linearCostUSD'].sum()
        dctNAV['impactCostUSD'] = dfPositions['impactCostUSD'].sum()
        dctNAV['tcostUSD'] = dctNAV['linearCostUSD'] + dctNAV['impactCostUSD']
        dctNAV['postTradeNAV'] = dctNAV['preTradeNAV'] - dctNAV['tcostUSD']


        dfPositions = dfPositions.assign(
            postTradeValUSD=dfPositions['preTradeValUSD'].fillna(0.0) + dfPositions['tradeValUSD'].fillna(0.0))
        dfPositions = dfPositions.assign(postTradeWeights=dfPositions['postTradeValUSD'] / dctNAV['postTradeNAV'])


        ## apply post-trade returns
        dfPositions = dfPositions.assign(
            postTradePnLHoldUSD=dfPositions['postTradeValUSD'] * dfPositions[postTradeRet].fillna(0.0))


        # dfPositions = dfPositions.assign(
        #     eodValUSD = dfPositions['postTradeValUSD'] + dfPositions['postTradePnLHoldUSD'])
        dfPositions = dfPositions.assign(
            eodValUSD=dfPositions['postTradeValUSD'].fillna(0.0)
                      * (1 + dfPositions[postTradeRet].fillna(0.0))
                      / (1 - 0.5 * dfPositions['fxretd'].fillna(0.0)))
        dfPositions = dfPositions.assign(
            postTradePnLFxUSD=dfPositions['eodValUSD'].fillna(0.0) - dfPositions['postTradeValUSD'].fillna(0.0)
                              - dfPositions['postTradePnLHoldUSD'].fillna(0.0))


        dfPositions = dfPositions.assign(
            pnlHoldUSD=dfPositions['preTradePnLHoldUSD'].fillna(0.0) + dfPositions['postTradePnLHoldUSD'].fillna(0.0))
        dfPositions = dfPositions.assign(
            pnlFxUSD=dfPositions['preTradePnLFxUSD'].fillna(0.0) + dfPositions['postTradePnLFxUSD'].fillna(0.0))
        # dfPositions = dfPositions.assign(
        #     totalPnLUSD = dfPositions['pnlHoldUSD'].fillna(0.0) - dfPositions['fcostUSD'].fillna(0.0)
        #                   - dfPositions['bcostUSD'].fillna(0.0) - dfPositions['tcostUSD'].fillna(0.0))


        dfPositions = dfPositions.assign(
            totalPnLUSD=dfPositions['pnlHoldUSD'].fillna(0.0) + dfPositions['pnlFxUSD'].fillna(0.0)
                        - dfPositions['fcostUSD'].fillna(0.0) - dfPositions['bcostUSD'].fillna(0.0)
                        - dfPositions['tcostUSD'].fillna(0.0))
        dctNAV['postTradePnLHoldUSD'] = dfPositions['postTradePnLHoldUSD'].sum()
        dctNAV['postTradePnLFxUSD'] = 0.0 if simConfig.hedgeFX else dfPositions['postTradePnLFxUSD'].sum()

        PyLog.info(f"dctNAV {dctNAV}")

        dctNAV['pnlHoldUSD'] = dctNAV['preTradePnLHoldUSD'] + dctNAV['postTradePnLHoldUSD']
        dctNAV['pnlFxUSD'] = dctNAV['preTradePnLFxUSD'] + dctNAV['postTradePnLFxUSD']

        dctNAV['totalPnLUSD'] = dctNAV['pnlHoldUSD'] + dctNAV['pnlFxUSD'] \
                                - dctNAV['fcostUSD'] - dctNAV['bcostUSD'] - dctNAV['tcostUSD']
        # dctNAV['totalPnLUSD'] =0


        dctNAV['eodNAV'] = dctNAV['postTradeNAV'] + dctNAV['postTradePnLHoldUSD'] + dctNAV['postTradePnLFxUSD']
        dfPositions = dfPositions.assign(eodWeights=dfPositions['eodValUSD'] / dctNAV['eodNAV'])
        dctNAV['eodGrExp'] = abs(dfPositions['eodWeights']).sum()
        dfPositions = dfPositions.drop(columns=['linearBuyCoeff', 'linearSellCoeff', 'impactCoeffTH'])


        simConfig.writeDailyNAVFile(dctNAV, tradeDate=tradeDate, step='posttrade')
        simConfig.writePositionFile(dfPositions, tradeDate=tradeDate, step='posttrade')

        ## update NAV frame
        dfNAV = simConfig.loadNAVFrame()
        if len(dfNAV) == 0:
            dfNAV = pd.DataFrame([dctNAV])
        else:
            dfNAV = dfNAV[dfNAV['tradeDate'] < tradeDate]
            dfNAV = pd.concat([dfNAV, pd.DataFrame([dctNAV])], ignore_index=True)
        simConfig.writeNAVFrame(dfNAV)

        if preserveSim:
            simConfig.preserveSimFile(simConfig.positionFile(tradeDate=tradeDate, step='posttrade'))
            simConfig.preserveSimFile(simConfig.dailyNAVFile(tradeDate=tradeDate, step='posttrade'))

    @classmethod
    def generateTradingSimulationReport(cls, simConfig, memo=None, outDir=None, includeOptimal=False, suffix=None,
                                        startTD=None, endTD=None, toAddr=[], saveDailyReturns=True):

        if memo is None:
            memo = simConfig.memo
        if outDir is None:
            outDir = '/data/reports/{}/{}'.format(PyEnvironment.username(), simConfig.subdir)
        if suffix is not None:
            outDir = outDir + '_' + suffix
        if not FileUtil.exists(outDir):
            FileUtil.createDir(outDir)

        dfReturns = cls.generateDailyReturns(simConfig, csvname=os.path.join(outDir, 'daily_returns.csv'),
                                             save=saveDailyReturns)


        if endTD is not None:
            dfReturns = dfReturns[dfReturns['tradeDate'] <= PyDate.asDate(endTD)]
        if startTD is not None:
            dfReturns = dfReturns[dfReturns['tradeDate'] >= PyDate.asDate(startTD)]
            dfReturns = dfReturns.assign(cumlogret=dfReturns['logret'].cumsum())
            dfReturns = dfReturns.assign(cumret=np.exp(dfReturns['cumlogret']) - 1)
        dfSumm, dfExpTrd, dfExpOpt = cls.compileTradingSummaryFrame(simConfig, dfReturns, includeOptimal)
        dfGrossExp = cls.compileGrossExposureFrame(simConfig, dfReturns)

        dctSimReport = cls.compileTradingSimulationReport(dfSumm, dfExpTrd, dfExpOpt, dfGrossExp, simConfig, memo=memo)

        dctReport = cls.compileTradingHtmlReport(dctSimReport)
        filename = os.path.join(outDir, 'trading_portfolio_simulation_report.html')
        try:
            PyHtml.standardReport(dctReport, filename=filename, title='Simulation Report (trading portfolio)',
                                  font=2, halign='center', header=None)
        except Exception as ee:
            PyLog.info(
                'ERR:: dch::generateTradingSimulationReport standardReport filename({}) ERR({})'.format(filename, ee))
            PyLog.info(traceback.format_exc())

        if len(toAddr) > 0:
            cls.sendPerformanceReportEmail(simConfig, dctReport, toAddr)


    @classmethod
    def sendPerformanceReportEmail(cls, simConfig, dctReport, toAddr, ccAddr=None, bccAddr=None):

        timeStr = '{}'.format(datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M'))
        message = '\n'.join([PyHtml.insertTextItem(dctReport[key])[0] for key in ['Returns', 'MTD', '@@P&LbyCountry']])

        ## exchange holidays
        tradeDate = PyDate.today()
        hframe = SignalMgr.getStatic('trading_exchange_holidays_master_frame')
        hframe = hframe[hframe['tradeDate'] >= PyDate.minus(tradeDate, 5)]
        hframe = hframe[hframe['tradeDate'] <= PyDate.plus(tradeDate, 5)]
        hframe = hframe[hframe['trading'] != 'Full']
        hframe = hframe[['tradeDate', 'quoteCountry', 'countryName', 'holidayName', 'trading']]

        if len(hframe) > 0:
            tag = PyHtml.insertTable(hframe, tableTitle='Exchange Holidays')[0]
            lst = re.split('<tr>', tag)
            for n in range(2, len(lst)):
                TD = PyDate.asDate(BeautifulSoup(lst[n], 'lxml').find('td').get_text(strip=True))
                if TD < tradeDate:
                    lst[n] = re.sub('<td ', '<td style="background-color:#E5E7E9" ', lst[n])
                elif TD == tradeDate:
                    lst[n] = re.sub('<td ', '<td style="background-color:#FDEBD0" ', lst[n])
                else:
                    lst[n] = re.sub('<td ', '<td style="background-color:#D5F5E3" ', lst[n])
            tag = '<tr>'.join(lst)
            message += ('\n' + tag)

        MailUtil.send(fromAddr="ap-production@greenwichquantitative.com",
                      toAddr=toAddr,
                      ccAddr=ccAddr,
                      bccAddr=bccAddr,
                      subject="[GQRAPMNS - {}] MTD Performance ({})".format(simConfig.shortName, timeStr),
                      body=message)

    @classmethod
    def compileGrossExposureFrame(cls, simConfig, dfReturns=None):

        if dfReturns is None:
            dfReturns = simConfig.loadDailyReturnsFile()

        dframe = list()
        cframe = cls.getModelCountryFrame()

        for tradeDate in dfReturns['tradeDate']:
            dfPositions = simConfig.loadPositionFile(tradeDate=tradeDate, step='posttrade')[['assetKey', 'eodWeights']]
            dfPositions = dfPositions.merge(cframe, how='left', on='assetKey')
            dfPositions = dfPositions.assign(vsAUM=abs(dfPositions['eodWeights']))
            dfm = dfPositions.groupby('country').agg({'vsAUM': sum}).reset_index()
            dfm = dfm.assign(tradeDate=tradeDate)
            dframe.append(dfm)

        dframe = pd.concat(dframe, ignore_index=True)
        dfm = dframe.groupby('tradeDate').agg({'vsAUM': sum}).reset_index().assign(country='total')
        dframe = pd.concat([dframe, dfm], ignore_index=True)
        dframe = dframe.merge(dfm[['tradeDate', 'vsAUM']].rename(columns={'vsAUM': 'total'}),
                              how='left', on='tradeDate')
        dframe = dframe.assign(vsGMV=dframe['vsAUM'] / dframe['total'])
        dframe = dframe.assign(year=[x.year for x in dframe['tradeDate']])

        dfGrossExp = dframe.groupby(['year', 'country']).agg({'vsAUM': np.mean, 'vsGMV': np.mean}).reset_index()
        return dfGrossExp

    @classmethod
    def compileTradingSummaryFrame(cls, simConfig, dfReturns=None, includeOptimal=True):

        debug = False

        if dfReturns is None:
            dfReturns = simConfig.loadDailyReturnsFile()


        lst = list()
        dfExpTrd = list()
        dfExpOpt = list()
        keys = ['rawIR', 'netIR', 'grossExposure', 'netExposure', 'expectedReturn', 'exanteRisk',
                'numPositions', 'numLong', 'numShort']

        for signalDate, tradeDate in zip(dfReturns['signalDate'], dfReturns['tradeDate']):
            try:
                dctReport = simConfig.loadRiskReport(tradeDate=tradeDate, prefix='trading_portfolio')
                dfExpTrd.append(dctReport['exposures'].assign(tradeDate=tradeDate))
                dct = {'tradeDate': tradeDate}
                dct.update({k: v for k, v in dctReport.items() if k in keys})
                dfm = pd.DataFrame([dct]). \
                    rename(columns={'rawIR': 'rawIRTrd', 'netIR': 'netIRTrd',
                                    'grossExposure': 'grExpTrd', 'netExposure': 'netExpTrd',
                                    'expectedReturn': 'expRetTrd', 'exanteRisk': 'exanteRiskTrd',
                                    'numPositions': 'numPosTrd', 'numLong': 'numLongTrd', 'numShort': 'numShortTrd'})
                if includeOptimal:
                    dctReport = simConfig.loadRiskReport(tradeDate=tradeDate, prefix='optimal_portfolio')
                    dfExpOpt.append(dctReport['exposures'].assign(tradeDate=tradeDate))
                    dct = {'tradeDate': tradeDate}
                    dct.update({k: v for k, v in dctReport.items() if k in keys})
                    dfm = dfm.merge(pd.DataFrame([dct]). \
                                    rename(columns={'rawIR': 'rawIROpt', 'netIR': 'netIROpt',
                                                    'grossExposure': 'grExpOpt', 'netExposure': 'netExpOpt',
                                                    'expectedReturn': 'expRetOpt', 'exanteRisk': 'exanteRiskOpt',
                                                    'numPositions': 'numPosOpt', 'numLong': 'numLongOpt',
                                                    'numShort': 'numShortOpt'}), how='left', on='tradeDate')
            except Exception as ee:
                PyLog.info(traceback.format_exc())

            lst.append(dfm)


        dfSumm = dfReturns.merge(pd.concat(lst, ignore_index=True), how='left', on='tradeDate')
        dfSumm = dfSumm.sort_values(by='tradeDate')

        dfExpTrd = pd.concat(dfExpTrd, ignore_index=True)
        dfExpTrd = dfReturns[['signalDate', 'tradeDate']].merge(dfExpTrd, how='right', on='tradeDate')


        if len(dfExpOpt) > 0:
            dfExpOpt = pd.concat(dfExpOpt, ignore_index=True)
            dfExpOpt = dfReturns[['signalDate', 'tradeDate']].merge(dfExpOpt, how='right', on='tradeDate')
        else:
            dfExpOpt = pd.DataFrame()


        return dfSumm, dfExpTrd, dfExpOpt

    @classmethod
    def compileTradingSimulationReport(cls, dfSumm, dfExpTrd, dfExpOpt, dfGrossExp, simConfig, memo=None):

        dctSimReport = dict()
        dctSimReport['memo'] = memo

        dfSumm = dfSumm.sort_values(by='tradeDate', ascending=True)
        dfSumm = dfSumm.assign(year=[x.year for x in dfSumm['tradeDate']])
        dfSumm = dfSumm.assign(highwater=[dfSumm['cumret'].iloc[:(n + 1)].max() for n in range(len(dfSumm))])
        dfSumm = dfSumm.assign(highwater=Filter.bound(dfSumm['highwater'].fillna(0.0), lower=0.0))
        dfSumm = dfSumm.assign(drawdown=np.exp(np.log(dfSumm['cumret'] + 1) - np.log(dfSumm['highwater'] + 1)) - 1)

        dfSumm = dfSumm.assign(meDate=[PyDate.prevWeekday(PyMonth.lastWeekday(PyMonth.prevMonth(x)))
                                       for x in dfSumm['tradeDate']])
        dfSumm = dfSumm.merge(
            dfSumm[['tradeDate', 'cumret']].rename(columns={'tradeDate': 'meDate', 'cumret': 'cumretME'}),
            how='left', on='meDate')
        dfSumm = dfSumm.assign(cumretME=dfSumm['cumretME'].fillna(0.0))

        dfSumm = dfSumm.assign(highwaterME=[dfSumm['cumretME'].iloc[:(n + 1)].max() for n in range(len(dfSumm))])
        dfSumm = dfSumm.assign(highwaterME=Filter.bound(dfSumm['highwaterME'].fillna(0.0), lower=0.0))
        dfSumm = dfSumm.assign(
            drawdownME=np.exp(np.log(dfSumm['cumret'] + 1) - np.log(dfSumm['highwaterME'] + 1)) - 1)
        dfSumm = dfSumm.assign(drawdownME=Filter.bound(dfSumm['drawdownME'], upper=0.0))

        if ('rawIROpt' in dfSumm.columns) and ('netIROpt' in dfSumm.columns):
            dfSumm = dfSumm.assign(rawTC=dfSumm['rawIRTrd'] / dfSumm['rawIROpt'])
            dfSumm = dfSumm.assign(netTC=dfSumm['netIRTrd'] / dfSumm['netIROpt'])

        dfSumm = dfSumm.assign(fcost=-dfSumm['fcost'].cumsum())
        dfSumm = dfSumm.assign(bcost=-dfSumm['bcost'].cumsum())
        dfSumm = dfSumm.assign(tcost=-dfSumm['tcost'].cumsum())
        dfSumm = dfSumm.assign(holdret=dfSumm['cumret'] - dfSumm['fcost'] - dfSumm['bcost'] - dfSumm['tcost'])

        dfSumm = dfSumm.assign(longExpTrd=(dfSumm['netExpTrd'] + dfSumm['grExpTrd']) / 2)
        dfSumm = dfSumm.assign(shortExpTrd=(dfSumm['netExpTrd'] - dfSumm['grExpTrd']) / 2)

        numWD = Frequency.perYear('WEEKDAY')

        dct = dict()
        dct['period'] = 'all'
        dct['cumret'] = dfSumm['cumret'].iloc[-1]
        dct['highwater'] = dfSumm['highwaterME'].iloc[-1]
        dct['maxDD'] = dfSumm['drawdownME'].min()

        PyLog.info(f"dfSumm of len:{len(dfSumm)} cols:{dfSumm.columns}")

        dfRebal = dfSumm[dfSumm['tradeWeights'] > 0]
        PyLog.info(f"dfRebal len:{len(dfRebal)}")

        #        rebalPerYear = 365 / (PyDate.span(dfRebal['tradeDate'].min(), dfRebal['tradeDate'].max()) / len(dfRebal))
        rebalPerYear = 365 / (
                PyDate.span(dfRebal['tradeDate'].min(), dfRebal['tradeDate'].max()) / len(dfRebal)) if len(
            dfRebal) > 1 else 1

        dct['avgTradeWgts'] = dfRebal['tradeWeights'].mean()
        dct['avgHldPerMth'] = 12 / ((dfRebal['tradeWeights'] / dfRebal['grExpTrd']).mean() * rebalPerYear)


        dct['annRet'] = numWD * dfSumm['periodret'].mean()
        dct['annStd'] = np.sqrt(numWD) * dfSumm['periodret'].std()
        dct['SharpeRatio'] = dct['annRet'] / dct['annStd']


        dfRets = list()
        dfRets.append(dct)


        ## by year
        for year in dfSumm['year'].unique():
            dfm = dfSumm[dfSumm['year'] == year].copy()
            dfm = dfm.sort_values(by='tradeDate', ascending=True)
            dfm = dfm.assign(cumret=np.exp(dfm['logret'].cumsum()) - 1)
            dfm = dfm.drop(columns=['cumretME'])

            dfm = dfm.merge(
                dfm[['tradeDate', 'cumret']].rename(columns={'tradeDate': 'meDate', 'cumret': 'cumretME'}),
                how='left', on='meDate')
            dfm = dfm.assign(cumretME=dfm['cumretME'].fillna(0.0))

            dfm = dfm.assign(highwater=[dfm['cumretME'].iloc[:n].max() for n in range(len(dfm))])
            dfm = dfm.assign(highwater=Filter.bound(dfm['highwater'].fillna(0.0), lower=0.0))
            # dfm = dfm.assign(drawdown  = Filter.bound(dfm['cumret'] - dfm['highwater'], upper=0.0))
            dfm = dfm.assign(drawdown=np.exp(np.log(dfm['cumret'] + 1) - np.log(dfm['highwater'] + 1)) - 1)

            dct = dict()
            dct['period'] = year
            dct['cumret'] = dfm['cumret'].iloc[-1]
            dct['highwater'] = dfm['highwater'].iloc[-1]
            dct['maxDD'] = dfm['drawdown'].min()

            dfRebal = dfm[dfm['tradeWeights'] > 0]
            if len(dfRebal) > 1:
                rebalPerYear = (PyDate.span(dfRebal['tradeDate'].min(), dfRebal['tradeDate'].max()) / len(dfRebal))
                rebalPerYear = 365 / rebalPerYear
            else:
                rebalPerYear = np.NaN
            dct['avgTradeWgts'] = dfRebal['tradeWeights'].mean()
            dct['avgHldPerMth'] = 12 / ((dfRebal['tradeWeights'] / dfRebal['grExpTrd']).mean() * rebalPerYear)

            dct['annRet'] = numWD * dfm['periodret'].mean()
            dct['annStd'] = np.sqrt(numWD) * dfm['periodret'].std()
            dct['SharpeRatio'] = dct['annRet'] / dct['annStd']
            dfRets.append(dct)

        dfRets = pd.DataFrame(dfRets)

        dctSimReport['dfRets'] = dfRets

        ## MTD Returns
        dfmL = dfSumm.assign(month=[PyMonth.asNumeric(x) for x in dfSumm['tradeDate']]). \
            groupby('month').agg({'logret': sum}).reset_index().rename(columns={'logret': 'cumlogret'})
        dfmL = dfmL.assign(returns=np.exp(dfmL['cumlogret']) - 1).drop(columns='cumlogret')
        dfmL = dfmL.tail(23)
        dfmL = dfmL.assign(returns=['{:.2f}%'.format(100 * x) for x in dfmL['returns']])
        dfmL['&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;'] = ''
        dfmL = dfmL.sort_values(by='month', ascending=False)

        currMonth = PyMonth.asNumeric(dfSumm['tradeDate'].max())
        dfmR = dfSumm[dfSumm['tradeDate'] >= PyMonth.firstWeekday(currMonth)][['tradeDate', 'logret', 'periodret']]. \
            rename(columns={'periodret': 'returns'})
        dfmR = dfmR.assign(returns=['{:.2f}%'.format(100 * x) for x in dfmR['returns']])
        dfmR = dfmR.assign(MTD=np.exp(dfmR['logret'].cumsum()) - 1)
        dfmR = dfmR.assign(MTD=['{:.2f}%'.format(100 * x) for x in dfmR['MTD']])
        dfmR = dfmR.drop(columns='logret')
        dfmR = dfmR.sort_values(by='tradeDate', ascending=False)
        dfmR = pd.concat([dfmR, pd.DataFrame([{'tradeDate': '', 'returns': '', 'MTD': ''}] * 23)], ignore_index=True)
        dfmR = dfmR.head(23)


        prevMonth = PyMonth.prevMonth(currMonth)
        dfmM = dfSumm[(dfSumm['tradeDate'] >= PyMonth.firstWeekday(prevMonth)) &
                      (dfSumm['tradeDate'] <= PyMonth.lastWeekday(prevMonth))][['tradeDate', 'logret', 'periodret']]. \
            rename(columns={'periodret': 'returns'})
        dfmM = dfmM.assign(returns=['{:.2f}%'.format(100 * x) for x in dfmM['returns']])
        dfmM = dfmM.assign(MTD=np.exp(dfmM['logret'].cumsum()) - 1)
        dfmM = dfmM.assign(MTD=['{:.2f}%'.format(100 * x) for x in dfmM['MTD']])
        dfmM = dfmM.drop(columns='logret')
        dfmM = dfmM.sort_values(by='tradeDate', ascending=False)
        dfmM = pd.concat([dfmM, pd.DataFrame([{'tradeDate': '', 'returns': '', 'MTD': ''}] * 23)], ignore_index=True)
        dfmM = dfmM.head(23)
        dfmM['&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;'] = ''

        dfMTD = pd.concat([dfmL.reset_index(drop=True), dfmM.reset_index(drop=True), dfmR.reset_index(drop=True)],
                          axis=1)
        dfMTD.columns = ['&nbsp;{}&nbsp;'.format(x) for x in dfMTD.columns]

        dctSimReport['dfMTD'] = dfMTD


        ## P&L by country
        lastTD = dfSumm['tradeDate'].max()
        cfm = cls.getModelCountryFrame()
        lstTD = PyDate.sequenceWeekday(PyYear.firstWeekday(lastTD), lastTD)
        PyLog.info(
            f"Will attempt to load position files from {PyYear.firstWeekday(lastTD)} to {lastTD}. len {len(lstTD)}")
        dframe = list()
        for tradeDate in lstTD:
            pfm = simConfig.loadPositionFile(tradeDate=tradeDate, step='posttrade')
            if not pfm.empty:
                pfm = pfm.merge(cfm, how='left', on='assetKey')
                dfm = pfm.groupby('country').agg({'totalPnLUSD': sum}).reset_index().assign(tradeDate=tradeDate)
                dframe.append(dfm[['tradeDate', 'country', 'totalPnLUSD']])

        dframe = pd.concat(dframe, ignore_index=True)

        dfm = dframe[dframe['tradeDate'] == lastTD]
        dfm = dfm.groupby('country').agg({'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        totalPnLUSD = dfm['totalPnLUSD'].sum()
        dfm = pd.concat([dfm, pd.DataFrame({'country': ['total'], 'totalPnLUSD': [totalPnLUSD]})],
                        ignore_index=True)
        dfm = dfm.assign(proportion=dfm['totalPnLUSD'] / abs(totalPnLUSD))
        dfm = dfm.assign(totalPnLUSD=TextUtil.asCommaInteger(dfm['totalPnLUSD'].tolist()),
                         proportion=['{:.1f}%'.format(x * 100) for x in dfm['proportion']])
        dfPnL = pd.DataFrame({'1D': PyHtml.insertTable(dfm, tableTitle='1D P&L by Country')})
        dfPnL['&nbsp;&nbsp;&nbsp;'] = ''

        dfm = dframe[[PyMonth.asNumeric(x) == PyMonth.asNumeric(lastTD) for x in dframe['tradeDate']]]
        dfm = dfm.groupby('country').agg({'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        totalPnLUSD = dfm['totalPnLUSD'].sum()
        dfm = pd.concat([dfm, pd.DataFrame({'country': ['total'], 'totalPnLUSD': [totalPnLUSD]})],
                        ignore_index=True)
        dfm = dfm.assign(proportion=dfm['totalPnLUSD'] / abs(totalPnLUSD))
        dfm = dfm.assign(totalPnLUSD=TextUtil.asCommaInteger(dfm['totalPnLUSD'].tolist()),
                         proportion=['{:.1f}%'.format(x * 100) for x in dfm['proportion']])
        dfPnL['MTD'] = PyHtml.insertTable(dfm, tableTitle='MTD P&L by Country')
        dfPnL[' &nbsp;&nbsp;'] = ''


        dfm = dframe[[PyQuarter.asQuarter(x) == PyQuarter.asQuarter(lastTD) for x in dframe['tradeDate']]]
        dfm = dfm.groupby('country').agg({'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        totalPnLUSD = dfm['totalPnLUSD'].sum()
        dfm = pd.concat([dfm, pd.DataFrame({'country': ['total'], 'totalPnLUSD': [totalPnLUSD]})],
                        ignore_index=True)
        dfm = dfm.assign(proportion=dfm['totalPnLUSD'] / abs(totalPnLUSD))
        dfm = dfm.assign(totalPnLUSD=TextUtil.asCommaInteger(dfm['totalPnLUSD'].tolist()),
                         proportion=['{:.1f}%'.format(x * 100) for x in dfm['proportion']])
        dfPnL['QTD'] = PyHtml.insertTable(dfm, tableTitle='QTD P&L by Country')
        dfPnL['&nbsp; &nbsp'] = ''

        dfm = dframe.groupby('country').agg({'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        totalPnLUSD = dfm['totalPnLUSD'].sum()
        dfm = pd.concat([dfm, pd.DataFrame({'country': ['total'], 'totalPnLUSD': [totalPnLUSD]})],
                        ignore_index=True)
        dfm = dfm.assign(proportion=dfm['totalPnLUSD'] / abs(totalPnLUSD))
        dfm = dfm.assign(totalPnLUSD=TextUtil.asCommaInteger(dfm['totalPnLUSD'].tolist()),
                         proportion=['{:.1f}%'.format(x * 100) for x in dfm['proportion']])
        dfPnL['YTD'] = PyHtml.insertTable(dfm, tableTitle='YTD P&L by Country')

        dctSimReport['countryPnL'] = dfPnL


        try:
            ## Plots
            dctSimReport['cumretPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y=['cumret', 'holdret', 'fcost', 'bcost', 'tcost'])
            # dctSimReport['drawdownPlot'] =\
            #     PlotUtil.dfPlot(dfSumm, x='tradeDate', y='drawdown')
            dctSimReport['meDrawdownPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y='drawdownME')
        except Exception as ee:
            PyLog.info(traceback.format_exc())

        try:
            dfSumm = dfSumm.fillna(0)

            dctSimReport['costPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y=['fcostUSD', 'bcostUSD', 'tcostUSD'])

            dctSimReport['tcostPlot'] = \
                PlotUtil.dfPlot(dfSumm[dfSumm['linearCostUSD'] > 0.0], x='tradeDate',
                                y=['linearCostUSD', 'impactCostUSD'])

            dctSimReport['tradePlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y='tradeWeights')


            if ('rawTC' in dfSumm.columns) and ('netTC' in dfSumm.columns):
                dctSimReport['tcPlot'] = \
                    PlotUtil.dfPlot(dfSumm, x='tradeDate', y=['rawTC', 'netTC'])

            dctSimReport['portRetPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y='pnlHoldUSD')
            dctSimReport['periodRetPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y='periodret')

            dctSimReport['netExpPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y=['netExpTrd', 'longExpTrd', 'shortExpTrd'])
            dctSimReport['grossExpPlot'] = \
                PlotUtil.dfPlot(dfSumm, x='tradeDate', y='grExpTrd')

        except Exception as ee:
            PyLog.info('dch::Simulate.py.compileTradingSimulationReport ERR({})'.format(ee))
            PyLog.info(traceback.format_exc())


        try:
            dctSimReport['ctryNetExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('country_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='net', categCol='factor')
            dctSimReport['induNetExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('ind_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='net', categCol='factor')
            dctSimReport['sizeNetExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('size_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='net', categCol='factor')

            dctSimReport['ctryGrossExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('country_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='gross', categCol='factor')
            dctSimReport['induGrossExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('ind_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='gross', categCol='factor')
            dctSimReport['sizeGrossExpPlot'] = \
                PlotUtil.plot(dfExpTrd[[x.startswith('size_') for x in dfExpTrd['factor']]],
                              x='tradeDate', y='gross', categCol='factor')


            dctSimReport['numPositions'] = \
                PlotUtil.dfPlot(dfSumm.rename(columns={'numPosTrd': 'numPositions', 'numLongTrd': 'numLong',
                                                       'numShortTrd': 'numShort'}),
                                x='tradeDate', y=['numPositions', 'numLong', 'numShort'])
            dctSimReport['expectedReturns'] = \
                PlotUtil.dfPlot(dfSumm.rename(columns={'expRetTrd': 'expectedReturn'}),
                                x='tradeDate', y='expectedReturn')
            dctSimReport['exanteRisk'] = \
                PlotUtil.dfPlot(dfSumm.rename(columns={'exanteRiskTrd': 'exanteRisk'}),
                                x='tradeDate', y='exanteRisk')
        except Exception as ee:
            PyLog.info('dch::Simulate.py.compileTradingSimulationReport ERR({})'.format(ee))

        PyLog.info("dch::Simulate.py.compileTradingSimulationReport 35.6")

        modelName = Strategy.getModelName(simConfig.stratName)
        dctSimReport['themes'] = FactorModel.getAlphaThemes(modelName) + ['market'] \
                                 + FactorModel.getRiskThemes(modelName)


        try:
            pfm = dfExpTrd[dfExpTrd['factor'].isin(FactorModel.getAlphaThemes(modelName))]
            dctSimReport['alphaThemeExposures'] = PlotUtil.plot(pfm, 'tradeDate', 'net', 'factor')

            pfm = dfExpTrd[dfExpTrd['factor'].isin(['market'] + FactorModel.getRiskThemes(modelName))]
            dctSimReport['riskThemeExposures'] = PlotUtil.plot(pfm, 'tradeDate', 'net', 'factor')


            if len(dfExpOpt) > 0:
                for theme in dctSimReport['themes']:
                    pfm = dfExpTrd[dfExpTrd['factor'] == theme][['tradeDate', 'net']].rename(columns={'net': 'trading'})
                    pfm = pfm.merge(dfExpOpt[dfExpTrd['factor'] == theme][['tradeDate', 'net']]. \
                                    rename(columns={'net': 'optimal'}), how='outer', on='tradeDate')
                    dctSimReport['netExp - ' + theme] = \
                        PlotUtil.dfPlot(pfm, x='tradeDate', y=['trading', 'optimal'], order=False,
                                        title=theme, ylabel='netExp')

                dctSimReport['buckets'] = ['size_LARGE', 'size_MID', 'size_SMALL', 'size_MICRO']
                for bucket in dctSimReport['buckets']:
                    pfm = dfExpTrd[dfExpTrd['factor'] == bucket][['tradeDate', 'net']].rename(
                        columns={'net': 'trading'})
                    pfm = pfm.merge(dfExpOpt[dfExpTrd['factor'] == bucket][['tradeDate', 'net']]. \
                                    rename(columns={'net': 'optimal'}), how='outer', on='tradeDate')
                    dctSimReport['netExp - ' + bucket] = \
                        PlotUtil.dfPlot(pfm, x='tradeDate', y=['trading', 'optimal'], order=False,
                                        title=bucket, ylabel='netExp')


            ## tabulate the average gross exposure by country

            dfm = dfGrossExp[dfGrossExp['year'] == dfGrossExp['year'].max()].sort_values(by='vsGMV', ascending=True)
            categories = dfm['country'].tolist()
            dfGrossExp = dfGrossExp.assign(country=pd.Categorical(dfGrossExp['country'], categories=categories))

        except Exception as ee:

            PyLog.info(traceback.format_exc())

        dfm = dfGrossExp[['country', 'year', 'vsAUM']].pivot(index='year', columns='country', values='vsAUM')
        dfm.columns.name = None
        dfm.columns = list(dfm.columns)

        dfm = dfm.reset_index()

        dfm = dfm.assign(year=dfm['year'].astype(str))

        dfm.columns = ['&nbsp;&nbsp;&nbsp;&nbsp;' + str(x) + '&nbsp;&nbsp;&nbsp;&nbsp;' for x in dfm.columns]


        dctSimReport['GEvsAUM'] = dfm

        dfm = dfGrossExp[['country', 'year', 'vsGMV']].pivot(index='year', columns='country', values='vsGMV')
        dfm.columns.name = None
        dfm.columns = list(dfm.columns)
        dfm = dfm.reset_index()
        dfm = dfm.assign(year=dfm['year'].astype(str))
        dfm.columns = ['&nbsp;&nbsp;&nbsp;&nbsp;' + str(x) + '&nbsp;&nbsp;&nbsp;&nbsp;' for x in dfm.columns]


        dctSimReport['GEvsGMV'] = dfm


        return dctSimReport

    @classmethod
    def compileTradingHtmlReport(cls, dctSimReport):

        PyLog.info("Compiling HTML report...")

        debug = True

        dctReport = dict()

        if debug: PyLog.info('compileTradingHtmlReport 1')
        dctReport['Returns'] = PyHtml.insertTable(dctSimReport['dfRets'],
                                                  tableTitle='Portfolio Returns', numDigits=4)[0]
        if debug: PyLog.info('compileTradingHtmlReport 2')
        dctReport['MTD'] = PyHtml.insertTable(dctSimReport['dfMTD'],
                                              tableTitle='MTD Returns', numDigits=4)[0]

        if debug: PyLog.info('compileTradingHtmlReport 3')
        dctReport['Daily Returns (csv)'] = PyHtml.insertTable(
            pd.DataFrame({'Daily Returns (csv)':
                              PyHtml.insertLink('daily_returns.csv', 'Daily Returns (csv)')}),
            tableTitle='Daily Returns (csv)', colHeading=False, width=300)[0]

        dctReport['@@P&LbyCountry'] = PyHtml.insertTable(
            dctSimReport['countryPnL'], tableTitle=None, colHeading=False, width=900)[0]

        if dctSimReport['memo'] is not None:
            dctReport['Memo'] = PyHtml.insertTable(pd.DataFrame({'memo': [dctSimReport['memo']]}),
                                                   tableTitle='Memo', colHeading=False)[0]

        if debug: PyLog.info('compileTradingHtmlReport 4')

        try:
            dctReport['@@Returns'] = {
                'Cumulative Returns': dctSimReport['cumretPlot'],
                'Period Returns': dctSimReport['periodRetPlot'],
                # 'Drawdown (Daily)': dctSimReport['drawdownPlot'],
                'Drawdown (Month-end)': dctSimReport['meDrawdownPlot']}
            if debug: PyLog.info('compileTradingHtmlReport 4.1')
            # 'Period Ret USD': dctSimReport['portRetPlot']

            if 'tcPlot' in dctSimReport.keys():
                if debug: PyLog.info('compileTradingHtmlReport 4.2')
                dctReport['@@Costs'] = {
                    'Trade Weight': dctSimReport['tradePlot'],
                    'Costs': dctSimReport['costPlot'],
                    'TCost Breakdown': dctSimReport['tcostPlot'],
                    'Transfer Coefficients': dctSimReport['tcPlot']}
            else:
                if debug: PyLog.info('compileTradingHtmlReport 4.3')
                try:
                    dctReport['@@Costs'] = {
                        'Trade Weight': dctSimReport['tradePlot'],
                        'Costs': dctSimReport['costPlot'],
                        'TCost Breakdown': dctSimReport['tcostPlot']}
                except Exception as ee:
                    PyLog.info('dch::compileTradingHtmlReport @@Costs ERR({})'.format(ee))
        except Exception as ee:

            PyLog.info(traceback.format_exc())

        if debug: PyLog.info('compileTradingHtmlReport 5')

        try:
            dctReport['@@PortExp'] = {
                'Net Exposures': dctSimReport['netExpPlot'],
                'Gross Exposures': dctSimReport['grossExpPlot']}

            dctReport['@@BucketNetExp'] = {
                'Country Net Exp': dctSimReport['ctryNetExpPlot'],
                'Industry Net Exp': dctSimReport['induNetExpPlot'],
                'Size Net Exp': dctSimReport['sizeNetExpPlot']}

            dctReport['@@BucketGrossExp'] = {
                'Country Gross Exp': dctSimReport['ctryGrossExpPlot'],
                'Industry Gross Exp': dctSimReport['induGrossExpPlot'],
                'Size Gross Exp': dctSimReport['sizeGrossExpPlot']}

            if debug: PyLog.info('compileTradingHtmlReport 6')
            rowId = 0
            colId = 0
            dct = dict()
            for theme in dctSimReport['themes']:
                plotName = 'netExp - ' + theme
                if plotName in dctSimReport.keys():
                    dct[theme] = dctSimReport[plotName]
                    if colId == 3:
                        dctReport['@@NetExposure:{}'.format(rowId)] = dct
                        dct = dict()
                        colId = 0
                        rowId += 1
                    else:
                        colId += 1

            if len(dct) > 0:
                dctReport['@@NetExposure:{}'.format(rowId)] = dct

            if 'buckets' in dctSimReport.keys():
                rowId += 1
                colId = 0
                dct = dict()
                for bucket in dctSimReport['buckets']:
                    plotName = 'netExp - ' + bucket
                    if plotName in dctSimReport.keys():
                        dct[bucket] = dctSimReport[plotName]
                        if colId == 3:
                            dctReport['@@NetExposure:{}'.format(rowId)] = dct
                            dct = dict()
                            colId = 0
                            rowId += 1
                        else:
                            colId += 1

                if len(dct) > 0:
                    dctReport['@@NetExposure:{}'.format(rowId)] = dct

            if debug: PyLog.info('compileTradingHtmlReport 7')

            dctReport['@@Monitor'] = {
                'Number of Positions': dctSimReport['numPositions'],
                'Expected Returns': dctSimReport['expectedReturns'],
                'Ex-Ante Risk': dctSimReport['exanteRisk']}

            dctReport['Country Average Gross Exposure vs AUM'] = \
                PyHtml.insertTable(dctSimReport['GEvsAUM'], numDigits=4,
                                   tableTitle='Country Average Gross Exposure vs AUM')[0]
            dctReport['Country Average Gross Exposure vs GMV'] = \
                PyHtml.insertTable(dctSimReport['GEvsGMV'], numDigits=4,
                                   tableTitle='Country Average Gross Exposure vs GMV')[0]

            if debug: PyLog.info('compileTradingHtmlReport 8')
        except Exception as ee:
            PyLog.info('dch end compileTradingHtmlReport ERR({})'.format(ee))

        return dctReport

    @classmethod
    def compileTradingAttributionReport(cls, simConfig, dfAttrib=None, dfPnL=None, dfCtryAtt=None, dfSctrAtt=None,
                                        refresh=False, updateInterval=5):

        PyLog.info("Compiling trading attribution report...")

        dctReport = dict()

        try:
            if (dfAttrib is None) or (dfPnL is None) or (dfCtryAtt is None) or (dfSctrAtt is None):
                dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = \
                    cls.compileTradingAttributionFrame2(simConfig, refresh=refresh, updateInterval=updateInterval)

            #            dctReport = dict()

            ## Links to the underlying data -------------------------------------------------------------------------

            dctReport['Underlying Data'] = PyHtml.insertTable(
                pd.DataFrame({'Return Attribution (csv)': PyHtml.insertLink('attrib.csv', 'Return Attribution (csv)'),
                              'P&L Attribution (csv)': PyHtml.insertLink('pnl.csv', 'P&L Attribution (csv)'),
                              'Country Attribution (csv)': PyHtml.insertLink('ctryatt.csv',
                                                                             'Country Attribution (csv)'),
                              'Sector Attribution (csv)': PyHtml.insertLink('sctratt.csv',
                                                                            'Sector Attribution (csv)')}),
                tableTitle='Underlying Data (csv)', colHeading=False, width=1200)[0]


        except Exception as ee:
            PyLog.info('dch::compileTradingAttributionReport underlying ERR({})'.format(ee))
        ## P&L attribution --------------------------------------------------------------------------------------

        try:
            dfm = dfPnL.sort_values(by='signalDate', ascending=False). \
                drop_duplicates(subset=['category', 'item'], keep='first'). \
                groupby('category').agg({'cumPnLUSD': sum}).reset_index(). \
                rename(columns={'cumPnLUSD': 'grossPnL'})
            pframe = dfPnL.merge(dfm, how='left', on='category')
            pframe = pframe.assign(pnlContrib=pframe['cumPnLUSD'] / abs(pframe['grossPnL']))

            pd.set_option('display.max_rows', 500)
            dct = dict()
            for category in ['country', 'industry', 'sector', 'size']:
                pfm = pframe[pframe['category'] == category]
                PyLog.info(f"categroy is {category} pfm len: {len(pfm)} pfm head:\n{pfm.head(5)}")
                dct['P&L Contrib - {}'.format(category)] = PlotUtil.plot(pfm, 'signalDate', 'pnlContrib', 'item')
            dctReport['@@pnl'] = dct

        except Exception as ee:
            PyLog.info('dch::compileTradingAttributionReport PNL ERR({})'.format(ee))

        ## factor attribution ----------------------------------------------------------------------------------

        try:

            dct = dict()
            alphaThemes = FactorModel.getAlphaThemes(Strategy.getModelName(simConfig.stratName))
            pfm = dfAttrib[dfAttrib['factor'].isin(alphaThemes)]
            dct['AlphaThemeContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            dfAggr = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            dfAggr = dfAggr.sort_values(by='signalDate', ascending=True)
            dfAggr = dfAggr.assign(alphaThemes=dfAggr['contrib'].cumsum()).drop(columns='contrib')

            dfAggr = dfAggr.merge(dfAttrib[dfAttrib['factor'] == 'specret'][['signalDate', 'cumcontrib']]. \
                                  rename(columns={'cumcontrib': 'specret'}), how='left', on='signalDate')

            pfm = dfAggr.assign(spread=dfAggr['alphaThemes'] + dfAggr['specret'])

            dct['AggrAlphaThemeContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', ['alphaThemes', 'spread'])
            dct['SpecretContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'specret')

            riskThemes = ['market'] + FactorModel.getRiskThemes(Strategy.getModelName(simConfig.stratName))
            pfm = dfAttrib[dfAttrib['factor'].isin(riskThemes)][['signalDate', 'factor', 'cumcontrib']]
            dfm = pfm[pfm['factor'] == 'market'].rename(columns={'cumcontrib': 'market'}).drop(columns='factor'). \
                merge(pfm[pfm['factor'] == 'sbeta_market'].rename(columns={'cumcontrib': 'sbeta_market'}).drop(
                columns='factor'),
                how='outer', on='signalDate')
            dfm = dfm.assign(factor='net_market', cumcontrib=dfm['market'] + dfm['sbeta_market'])
            pfm = pd.concat([pfm, dfm[['signalDate', 'factor', 'cumcontrib']]])
            dct['RiskThemeContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            pfm = pfm[pfm['factor'] != 'net_market'].groupby('signalDate').aggregate({'cumcontrib': sum}).reset_index()
            dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'riskThemes'}), how='left', on='signalDate')

            dctReport['@@themes'] = dct

            dct = dict()
            dctAggr = dict()

            pfm = dfAttrib[[x.startswith('country_') for x in dfAttrib['factor']]]
            dct['CountryTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            pfm = pfm.sort_values(by='signalDate', ascending=True)
            pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
            dctAggr['AggrCountryTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
            dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'country'}), how='left', on='signalDate')

            pfm = dfAttrib[[x.startswith('ind_') for x in dfAttrib['factor']]]
            dct['IndustryTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            pfm = pfm.sort_values(by='signalDate', ascending=True)
            pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
            dctAggr['AggryIndustryTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
            dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'industry'}), how='left', on='signalDate')

            pfm = dfAttrib[[x.startswith('size_') for x in dfAttrib['factor']]]
            dct['SizeTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            pfm = pfm.sort_values(by='signalDate', ascending=True)
            pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
            dctAggr['AggrSizeTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
            dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'size'}), how='left', on='signalDate')

            costs = ['fcost', 'bcost', 'linearCost', 'impactCost']
            pfm = dfAttrib[dfAttrib['factor'].isin(costs)]
            dct['CostContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

            pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            pfm = pfm.sort_values(by='signalDate', ascending=True)
            pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
            dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'costs'}), how='left', on='signalDate')

            categories = ['alphaThemes', 'riskThemes', 'specret', 'country', 'industry', 'size', 'costs']
            dctAggr['CategContrib'] = PlotUtil.dfPlot(dfAggr, 'signalDate', categories)

            dctReport['@@categories'] = dct
            dctReport['@@categAggr'] = dctAggr

            pfm = dfAttrib.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
            pfm = pfm.sort_values(by='signalDate', ascending=True)
            pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
            dct = dict()
            dct['CompositeContrib - log'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')

            pfm = pfm.assign(cumcontrib=np.exp(pfm['cumcontrib']) - 1)
            dct['CompositeContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')

            dctReport['@@composite'] = dct

        except Exception as ee:
            PyLog.info('dch::compileTradingAttributionReport factor ERR({})'.format(ee))

        ## country factor attribution -----------------------------------------------------------------------

        try:

            dctReport['@@country-factor'] = PyHtml.insertHeadline('Country-Factor Attribution')[0]

            modelName = Strategy.getModelName(simConfig.stratName)
            themes = FactorModel.getAlphaThemes(modelName)
            rfactors = FactorModel.getRiskThemes(modelName)

            dfAggr = dfCtryAtt[dfCtryAtt['factor'].isin(themes)]
            dfAggr = dfAggr.groupby(['country', 'signalDate']). \
                agg({'cumcontrib': sum}).reset_index().rename(columns={'cumcontrib': 'themes'})
            dfAggr = dfAggr.merge(dfCtryAtt[dfCtryAtt['factor'] == 'specret'][['country', 'signalDate', 'cumcontrib']]. \
                                  rename(columns={'cumcontrib': 'specret'}), how='left', on=['country', 'signalDate'])
            dfAggr = dfAggr.assign(spread=dfAggr['themes'] + dfAggr['specret'])

            dfm = dfCtryAtt[[x.split('_')[0] in ['country', 'ind', 'size'] for x in dfCtryAtt['factor']]]
            dfm = dfm.assign(category=[x.split('_')[0] for x in dfm['factor']])
            dfm = dfm[['country', 'signalDate', 'category', 'cumcontrib']]
            dfm = dfm.groupby(['country', 'category', 'signalDate']).agg({'cumcontrib': sum}).reset_index()
            dfm = dfm.pivot(index=['country', 'signalDate'], columns='category', values='cumcontrib'). \
                rename(columns={'country': 'ctry'}).reset_index()
            dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'signalDate'])

            dfm = dfCtryAtt[dfCtryAtt['factor'].isin(['market'] + rfactors)][
                ['country', 'factor', 'signalDate', 'cumcontrib']]
            dfm = dfm.groupby(['country', 'signalDate']).agg({'cumcontrib': sum}).reset_index(). \
                rename(columns={'cumcontrib': 'risk'})
            dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'signalDate'])

            dfThemes = dfCtryAtt[dfCtryAtt['factor'].isin(themes)][['country', 'signalDate', 'factor', 'cumcontrib']]. \
                rename(columns={'factor': 'theme'})

            dct = dict()
            dct['Ctry-AggrAlphaTheme'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'themes', 'country', title='AggrAlphaThemeContrib')
            dct['Ctry-AggrSpecret'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'specret', 'country', title='AggrSpecretContrib')
            dct['Ctry-AggrAlphaSpread'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'spread', 'country', title='AggrAlphaSpreadContrib')
            dctReport['@@Ctry-Aggr01'] = dct

            dct = dict()
            dct['Ctry-AggrCtryContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'ctry', 'country', title='AggrCtryContrib')
            dct['Ctry-AggrIndContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'ind', 'country', title='AggrIndContrib')
            dct['Ctry-AggrSizeContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'size', 'country', title='AggrSizeContrib')
            dct['Ctry-AggrRisk'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'risk', 'country', title='AggrRiskContrib')
            dctReport['@@Ctry-Aggr02'] = dct

            categories = ['themes', 'specret', 'spread', 'ctry', 'ind', 'size', 'risk']

            for country in ['CN', 'JP', 'KR', 'TW', 'HK', 'AU', 'IN']:
                dct = dict()
                dct['AggrContrib-{}'.format(country)] = \
                    PlotUtil.dfPlot(dfAggr[dfAggr['country'] == country], 'signalDate', categories,
                                    title='AggrContrib ({})'.format(country))
                dct['AlphaThemeContrib-{}'.format(country)] = \
                    PlotUtil.plot(dfThemes[dfThemes['country'] == country], 'signalDate', 'cumcontrib', 'theme',
                                  title='AlphaThemeContrib ({})'.format(country))
                dctReport[country] = dct

        except Exception as ee:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)
            PyLog.info(traceback.format_exc())

        ## sector factor attribution -----------------------------------------------------------------------

        try:
            dctReport['@@sector-factor'] = PyHtml.insertHeadline('Sector-Factor Attribution')[0]

            modelName = Strategy.getModelName(simConfig.stratName)
            themes = FactorModel.getAlphaThemes(modelName)
            rfactors = FactorModel.getRiskThemes(modelName)

            dfAggr = dfSctrAtt[dfSctrAtt['factor'].isin(themes)]
            dfAggr = dfAggr.groupby(['sector', 'signalDate']). \
                agg({'cumcontrib': sum}).reset_index().rename(columns={'cumcontrib': 'themes'})
            dfAggr = dfAggr.merge(dfSctrAtt[dfSctrAtt['factor'] == 'specret'][['sector', 'signalDate', 'cumcontrib']]. \
                                  rename(columns={'cumcontrib': 'specret'}), how='left', on=['sector', 'signalDate'])
            dfAggr = dfAggr.assign(spread=dfAggr['themes'] + dfAggr['specret'])

            dfm = dfSctrAtt[[x.split('_')[0] in ['country', 'ind', 'size'] for x in dfSctrAtt['factor']]]
            dfm = dfm.assign(category=[x.split('_')[0] for x in dfm['factor']])
            dfm = dfm[['sector', 'signalDate', 'category', 'cumcontrib']]
            dfm = dfm.groupby(['sector', 'category', 'signalDate']).agg({'cumcontrib': sum}).reset_index()
            dfm = dfm.pivot(index=['sector', 'signalDate'], columns='category', values='cumcontrib'). \
                rename(columns={'country': 'ctry'}).reset_index()
            dfAggr = dfAggr.merge(dfm, how='outer', on=['sector', 'signalDate'])

            dfm = dfSctrAtt[dfSctrAtt['factor'].isin(['market'] + rfactors)][
                ['sector', 'factor', 'signalDate', 'cumcontrib']]
            dfm = dfm.groupby(['sector', 'signalDate']).agg({'cumcontrib': sum}).reset_index(). \
                rename(columns={'cumcontrib': 'risk'})
            dfAggr = dfAggr.merge(dfm, how='outer', on=['sector', 'signalDate'])

            dfThemes = dfSctrAtt[dfSctrAtt['factor'].isin(themes)][['sector', 'signalDate', 'factor', 'cumcontrib']]. \
                rename(columns={'factor': 'theme'})

            dct = dict()
            dct['Sctr-AggrAlphaTheme'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'themes', 'sector', title='AggrAlphaThemeContrib')
            dct['Sctr-AggrSpecret'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'specret', 'sector', title='AggrSpecretContrib')
            dct['Sctr-AggrAlphaSpread'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'spread', 'sector', title='AggrAlphaSpreadContrib')
            dctReport['@@Sctr-Aggr01'] = dct

            dct = dict()
            dct['Sctr-AggrCtryContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'ctry', 'sector', title='AggrCtryContrib')
            dct['Sctr-AggrIndContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'ind', 'sector', title='AggrIndContrib')
            dct['Sctr-AggrSizeContrib'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'size', 'sector', title='AggrSizeContrib')
            dct['Sctr-AggrRisk'] = \
                PlotUtil.plot(dfAggr, 'signalDate', 'risk', 'sector', title='AggrRiskContrib')
            dctReport['@@Sctr-Aggr02'] = dct

            categories = ['themes', 'specret', 'spread', 'ctry', 'ind', 'size', 'risk']

            sectors = dfAggr['sector'].unique().tolist()
            sectors.sort()

            for sector in sectors:
                dct = dict()
                dct['AggrContrib-{}'.format(sector)] = \
                    PlotUtil.dfPlot(dfAggr[dfAggr['sector'] == sector], 'signalDate', categories,
                                    title='AggrContrib ({})'.format(sector))
                dct['AlphaThemeContrib-{}'.format(sector)] = \
                    PlotUtil.plot(dfThemes[dfThemes['sector'] == sector], 'signalDate', 'cumcontrib', 'theme',
                                  title='AlphaThemeContrib ({})'.format(sector))
                dctReport[sector] = dct
        except Exception as ee:
            PyLog.info('dch::compileTradingAttributionReport sector-factor ERR({})'.format(ee))

        return dctReport

    @classmethod
    def compileTradingLSAttributionReport(cls, simConfig):

        PyLog.info("Compiling trading long/short attribution report...")

        dctReport = dict()

        # facretName = 'facretw'
        # specretName = 'specretw'
        # returnHorizon = 5

        facretName = 'facretd'
        specretName = 'specretd'
        returnHorizon = 1

        dfReturns = simConfig.loadDailyReturnsFile()

        dfCtry = cls.getModelCountryFrame()

        dfAttrib = list()
        dfPnL = list()
        for signalDate in dfReturns['signalDate']:

            dfPositions = simConfig.loadPositionFile(signalDate)
            dfPositions = dfPositions.assign(
                side=['SHORT' if x < 0 else 'LONG' for x in (dfPositions['bodWeights'] + dfPositions['eodWeights'])])

            ## total P&L by category ------------------------------------------------------------------
            dfCateg = SignalMgr.getFrame(['industry', 'sector', 'size_bucket'], signalDate). \
                drop(columns='signalDate').rename(columns={'size_bucket': 'size'}). \
                merge(dfCtry, how='outer', on='assetKey')
            ## augment missing category -----------------
            dfm = pd.DataFrame([{'assetKey': '@BEIGE9', 'industry': 'biotechnology',
                                 'sector': 'HLTH', 'country': 'XH'}])
            dfCateg = pd.concat([dfm, dfCateg], ignore_index=True).drop_duplicates(subset='assetKey')
            ## ------------------------------------------

            dfCateg = dfPositions.merge(dfCateg, how='left', on='assetKey')
            dfCateg = dfCateg.assign(
                totalPnLUSD=dfCateg['pnlHoldUSD'].fillna(0.0) - dfCateg['fcostUSD'].fillna(0.0)
                            - dfCateg['bcostUSD'].fillna(0.0) - dfCateg['linearCostUSD'].fillna(0.0)
                            - dfCateg['impactCostUSD'].fillna(0.0))
            for category in ['country', 'industry', 'sector', 'size']:
                dfm = dfCateg.groupby([category, 'side']).agg({'totalPnLUSD': sum}).reset_index(). \
                    assign(category=category, signalDate=signalDate). \
                    rename(columns={category: 'item'})
                dfPnL.append(dfm)

        dfPnL = pd.concat(dfPnL, ignore_index=True)
        dfPnL = dfPnL.sort_values(by='signalDate', ascending=True)
        dfPnL = dfPnL.groupby(['category', 'item', 'side']). \
            apply(lambda dfm: dfm.assign(cumPnLUSD=dfm['totalPnLUSD'].cumsum())). \
            reset_index(drop=True)
        dfm = dfPnL.sort_values(by='signalDate', ascending=False). \
            drop_duplicates(subset=['category', 'item'], keep='first'). \
            groupby('category').agg({'cumPnLUSD': sum}).reset_index(). \
            rename(columns={'cumPnLUSD': 'grossPnL'})
        dfPnL = dfPnL.merge(dfm, how='left', on='category')
        dfPnL = dfPnL.assign(pnlContrib=dfPnL['cumPnLUSD'] / abs(dfPnL['grossPnL']))

        dct = dict()
        for category in ['country', 'industry', 'sector', 'size']:
            pfm = dfPnL[dfPnL['category'] == category]
            pfm = pfm.assign(item=pfm['item'] + ':' + pfm['side'])
            dct['P&L Contrib - {}'.format(category)] = PlotUtil.plot(pfm, 'signalDate', 'pnlContrib', 'item')

        dctReport['@@pnl'] = dct

        dfCost = dfReturns.assign(fcost=-dfReturns['fcost'],
                                  bcost=-dfReturns['bcost'],
                                  linearCost=-dfReturns['linearCostUSD'] / dfReturns['bodNAV'],
                                  impactCost=-dfReturns['impactCostUSD'] / dfReturns['bodNAV'])
        dfCost = dfCost[['signalDate', 'fcost', 'bcost', 'linearCost', 'impactCost']].set_index('signalDate').stack(). \
            reset_index().rename(columns={'level_1': 'factor', 0: 'contrib'})
        dfCost = dfCost.sort_values(by='signalDate', ascending=True)
        dfCost = dfCost.groupby('factor').apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
            reset_index(drop=True)
        dfCost = dfCost.assign(fexp=np.NaN, facret=np.NaN)
        columns = ['signalDate', 'factor', 'fexp', 'facret', 'contrib', 'cumcontrib']
        dfAttrib = pd.concat([dfAttrib[columns], dfCost[columns]], ignore_index=True)

        dct = dict()
        alphaThemes = FactorModel.getAlphaThemes(Strategy.getModelName(simConfig.stratName))
        pfm = dfAttrib[dfAttrib['factor'].isin(alphaThemes)]
        dct['AlphaThemeContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        dfAggr = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        dfAggr = dfAggr.sort_values(by='signalDate', ascending=True)
        dfAggr = dfAggr.assign(alphaThemes=dfAggr['contrib'].cumsum()).drop(columns='contrib')

        dfAggr = dfAggr.merge(dfAttrib[dfAttrib['factor'] == 'specret'][['signalDate', 'cumcontrib']]. \
                              rename(columns={'cumcontrib': 'specret'}), how='left', on='signalDate')

        pfm = dfAggr.assign(spread=dfAggr['alphaThemes'] + dfAggr['specret'])

        dct['AggrAlphaThemeContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', ['alphaThemes', 'spread'])
        dct['SpecretContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'specret')

        riskThemes = ['market'] + FactorModel.getRiskThemes(Strategy.getModelName(simConfig.stratName))
        pfm = dfAttrib[dfAttrib['factor'].isin(riskThemes)][['signalDate', 'factor', 'cumcontrib']]
        dfm = pfm[pfm['factor'] == 'market'].rename(columns={'cumcontrib': 'market'}).drop(columns='factor'). \
            merge(
            pfm[pfm['factor'] == 'sbeta_market'].rename(columns={'cumcontrib': 'sbeta_market'}).drop(columns='factor'),
            how='outer', on='signalDate')
        dfm = dfm.assign(factor='net_market', cumcontrib=dfm['market'] + dfm['sbeta_market'])
        pfm = pd.concat([pfm, dfm[['signalDate', 'factor', 'cumcontrib']]])
        dct['RiskThemeContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        pfm = pfm[pfm['factor'] != 'net_market'].groupby('signalDate').aggregate({'cumcontrib': sum}).reset_index()
        dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'riskThemes'}), how='left', on='signalDate')

        dctReport['@@themes'] = dct

        dct = dict()
        dctAggr = dict()

        pfm = dfAttrib[[x.startswith('country_') for x in dfAttrib['factor']]]
        dct['CountryTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        pfm = pfm.sort_values(by='signalDate', ascending=True)
        pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
        dctAggr['AggrCountryTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
        dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'country'}), how='left', on='signalDate')

        pfm = dfAttrib[[x.startswith('ind_') for x in dfAttrib['factor']]]
        dct['IndustryTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        pfm = pfm.sort_values(by='signalDate', ascending=True)
        pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
        dctAggr['AggryIndustryTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
        dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'industry'}), how='left', on='signalDate')

        pfm = dfAttrib[[x.startswith('size_') for x in dfAttrib['factor']]]
        dct['SizeTiltContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        pfm = pfm.sort_values(by='signalDate', ascending=True)
        pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
        dctAggr['AggrSizeTiltContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')
        dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'size'}), how='left', on='signalDate')

        costs = ['fcost', 'bcost', 'linearCost', 'impactCost']
        pfm = dfAttrib[dfAttrib['factor'].isin(costs)]
        dct['CostContrib'] = PlotUtil.plot(pfm, 'signalDate', 'cumcontrib', 'factor')

        pfm = pfm.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        pfm = pfm.sort_values(by='signalDate', ascending=True)
        pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
        dfAggr = dfAggr.merge(pfm.rename(columns={'cumcontrib': 'costs'}), how='left', on='signalDate')

        categories = ['alphaThemes', 'riskThemes', 'specret', 'country', 'industry', 'size', 'costs']
        dctAggr['CategContrib'] = PlotUtil.dfPlot(dfAggr, 'signalDate', categories)

        dctReport['@@categories'] = dct
        dctReport['@@categAggr'] = dctAggr

        pfm = dfAttrib.groupby('signalDate').aggregate({'contrib': sum}).reset_index()
        pfm = pfm.sort_values(by='signalDate', ascending=True)
        pfm = pfm.assign(cumcontrib=pfm['contrib'].cumsum())
        dct = dict()
        dct['CompositeContrib - log'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')

        pfm = pfm.assign(cumcontrib=np.exp(pfm['cumcontrib']) - 1)
        dct['CompositeContrib'] = PlotUtil.dfPlot(pfm, 'signalDate', 'cumcontrib')

        dctReport['@@composite'] = dct

        return dctReport, dfAttrib, dfPnL

    ## generate optimal portfolios in parallel ------------------------------------------------------------

    @classmethod
    def __runOptimal__(cls, dct):

        tradeDate = dct['tradeDate']
        simConfig = dct['simConfig']
        prefix = 'optimal_portfolio'

        rebalfile = simConfig.rebalFile(tradeDate=tradeDate, prefix=prefix)

        if (not dct['overwrite']) and FileUtil.exists(rebalfile):
            PyLog.info('Skipping {}'.format(PyDate.asDate(tradeDate)))
            riskfile = simConfig.riskReportFile(tradeDate=tradeDate, prefix=prefix)
            if dct['generateRiskReport'] and (not FileUtil.exists(riskfile)):
                dctOptResult = FileUtil.load(rebalfile)
                dctRiskReport = Rebalance.generateRiskReport(dctOptResult, weightCol='optimalWeights')
                simConfig.writeRiskReport(dctRiskReport, tradeDate=tradeDate, prefix=prefix)
        else:
            # PyLog.info('Generating optimal portfolio for {}'.format(PyDate.asISO(signalDate)))
            signalDate = PyDate.prevWeekday(tradeDate)
            dctOptResult = Rebalance.runOptimal(rebalConfig=simConfig.rebalConfig, signalDate=signalDate,
                                                solver=simConfig.solver, maxIter=simConfig.maxIter,
                                                cycleIter=simConfig.cycleIter, verbose=False, checkDCP=False, tradeRestrictions = simConfig.tr)
            simConfig.writeRebalFile(dctOptResult, tradeDate=tradeDate, prefix=prefix)
            if dct['generateRiskReport']:
                dctRiskReport = Rebalance.generateRiskReport(dctOptResult, weightCol='optimalWeights')
                simConfig.writeRiskReport(dctRiskReport, tradeDate=tradeDate, prefix=prefix)

    @classmethod
    def runOptimal(cls, simConfig, startTD=None, endTD=None, overwrite=False, generateRiskReport=True):
        startTD = simConfig.startTD if startTD is None else startTD
        endTD = simConfig.endTD if endTD is None else endTD
        tradeDate = simConfig.startTD
        lstTD = list()
        while PyDate.le(tradeDate, endTD):
            lstTD.append(tradeDate)
            tradeDate = PyDate.plusWeekdays(tradeDate, simConfig.rebalIntervalWD)
        lstTD = [x for x in lstTD if PyDate.ge(x, startTD) and PyDate.le(x, endTD)]
        dfm = pd.DataFrame({'tradeDate': lstTD, 'simConfig': simConfig, 'overwrite': overwrite,
                            'generateRiskReport': generateRiskReport})
        dfm = dfm.sort_values(by='tradeDate', ascending=False)
        for dct in dfm.to_dict(orient='records'):
            Simulation.__runOptimal__(dct)

    @classmethod
    def runOptimalParallel(cls, simConfig, overwrite=False, generateRiskReport=True,
                           maxWorkers=int(os.cpu_count() / 2), rebalDatesOnly=False):

        if rebalDatesOnly:
            tradeDate = simConfig.startTD
            lstTD = list()
            while PyDate.le(tradeDate, simConfig.endTD):
                lstDates.append(tradeDate)
                tradeDate = PyDate.plusWeekdays(tradeDate, simConfig.rebalIntervalWD)
        else:
            lstTD = PyDate.sequenceWeekday(simConfig.startTD, simConfig.endTD)

        dfm = pd.DataFrame({'tradeDate': lstTD, 'simConfig': simConfig, 'overwrite': overwrite,
                            'generateRiskReport': generateRiskReport})
        dfm = dfm.sort_values(by='tradeDate', ascending=False)

        with concurrent.futures.ProcessPoolExecutor(max_workers=maxWorkers) as executor:
            executor.map(cls.__runOptimal__, dfm.to_dict(orient='records'))

    ## generate risk reports in parallel ---------------------------------------------------------------------

    @classmethod
    def __generateRiskReport__(cls, dct):
        simConfig = dct['simConfig']
        tradeDate = dct['tradeDate']
        prefix = dct['prefix']
        riskfile = simConfig.riskReportFile(tradeDate=tradeDate, prefix=prefix)
        if not dct['refresh'] and FileUtil.exists(riskfile):
            PyLog.info('    Skipping risk report for {}'.format(PyDate.asISO(dct['tradeDate'])))
        else:
            if dct['fileType'] == 'positions_posttrade':
                dfPositions = simConfig.loadPositionFile(tradeDate=tradeDate, prefix=prefix, step='posttrade')
                portWeights = FrameUtil.toSeries(dfPositions, keyCol='assetKey', valCol='eodWeights')
            elif dct['fileType'] == 'rebalance':
                dctOptResult = simConfig.loadRebalFile(tradeDate=tradeDate, prefix=prefix)
                portWeights = FrameUtil.toSeries(dctOptResult['dfWeights'], keyCol='assetKey', valCol='postOptWeights')
            PyLog.info("Generating risk report for {}".format(PyDate.asISO(tradeDate)))
            rebalConfig = simConfig.rebalConfig
            signalDate = PyDate.prevWeekday(tradeDate)
            dctRiskReport = RiskReport.runPortfolio(port=portWeights,
                                                    signalDate=signalDate,
                                                    alphaName=rebalConfig.alphaName,
                                                    fcostName=rebalConfig.fcostName,
                                                    bcostName=rebalConfig.bcostName,
                                                    rmodelName=rebalConfig.rmodelName,
                                                    envName=Strategy.getModelName(rebalConfig.stratName),
                                                    minWeight=rebalConfig.holdingThreshold,
                                                    maxReturnDate=PyDate.today())
            simConfig.writeRiskReport(dctRiskReport, tradeDate=tradeDate, prefix=prefix)

    @classmethod
    def generateRiskReports(cls, simConfig, prefix=None, startTD=None, endTD=None,
                            refresh=False, maxWorkers=os.cpu_count()):
        prefix = simConfig.prefix if prefix is None else prefix
        fileType = 'positions_posttrade' if prefix == 'trading_portfolio' else 'rebalance'
        dfFiles = simConfig.getFileFrame(prefix=prefix, fileType=fileType)
        if startTD is not None:
            dfFiles = dfFiles[dfFiles['tradeDate'] >= PyDate.asDate(startTD)]
        if endTD is not None:
            dfFiles = dfFiles[dfFiles['tradeDate'] <= PyDate.asDate(endTD)]
        dfFiles = dfFiles.assign(simConfig=simConfig,
                                 prefix=prefix,
                                 refresh=refresh)
        if maxWorkers > 1:
            with concurrent.futures.ProcessPoolExecutor(max_workers=maxWorkers) as executor:
                executor.map(cls.__generateRiskReport__, dfFiles.to_dict(orient='records'))
        else:
            for dct in dfFiles.to_dict(orient='records'):
                cls.__generateRiskReport__(dct)

    @classmethod
    def generateDailyReturns(cls, simConfig, save=True, csvname=None):

        # -----------------------------------------------------------------------------------------------
        # Generate daily returns
        # -----------------------------------------------------------------------------------------------

        # Note, the simulation assumes that we execute at the end of the tradeDate (signalDate + 1)
        # so the portfolio is subject to the holding returns of the tradeDate

        dfNAV = simConfig.loadNAVFrame()

        dfRet = dfNAV.assign(
            fcost=dfNAV['fcostUSD'] / dfNAV['bodNAV'],
            bcost=dfNAV['bcostUSD'] / dfNAV['bodNAV'],
            tcost=dfNAV['tcostUSD'] / dfNAV['bodNAV'],
            holdret=dfNAV['pnlHoldUSD'] / dfNAV['bodNAV'],
            logret=np.log(dfNAV['eodNAV']) - np.log(dfNAV['bodNAV']),
            periodret=dfNAV['eodNAV'] / dfNAV['bodNAV'] - 1)

        dfRet = dfRet.sort_values(by='signalDate', ascending=True)
        dfRet = dfRet.assign(cumlogret=dfRet['logret'].cumsum())
        dfRet = dfRet.assign(cumret=np.exp(dfRet['cumlogret']) - 1)

        ## exception for Sim125 =========================================================================
        if simConfig.simDir == '/mnt/signal/simulation/20210907_simulation_sim125':
            dfRet.loc[dfRet['signalDate'] == PyDate.asDate(20150915), 'periodret'] = -0.019457
        if simConfig.simDir == '/mnt/signal/simulation/20211031_simulation_prod':
            dfRet.loc[dfRet['signalDate'] == PyDate.asDate(20150915), 'periodret'] = -0.019457
        ## ==============================================================================================

        if save:
            simConfig.writeDailyReturnsFile(dfRet)

        if csvname is not None:
            output = TextUtil.df2text(dfRet, delim=',', align=False, filename=csvname, digits=6)

        return dfRet

    @classmethod
    def generatePositionReport(cls, simConfig, tradeDate, filename=None):

        dfPositions = simConfig.loadPositionFile(tradeDate, step='posttrade')
        pfm = SignalMgr.get('price_frame_latest', tradeDate)
        dfPositions = dfPositions.merge(pfm[['assetKey', 'priceCloseUSD', 'priceClose', 'quoteCurrency', 'exchRate']],
                                        how='left', on='assetKey')
        mframe = SignalMgr.getStatic('model_universe_frame')
        dfPositions = dfPositions.merge(mframe[['assetKey', 'RkdSEDOL', 'Name']], how='left', on='assetKey')
        cframe = cls.getModelCountryFrame().rename(columns={'country': 'modelCtry'})
        dfPositions = dfPositions.merge(cframe[['assetKey', 'modelCtry']], how='left', on='assetKey')
        dfPositions = dfPositions.assign(
            numShares=np.round(dfPositions['eodValUSD'] / dfPositions['priceCloseUSD']).astype(int))

        columns = ['assetKey', 'RkdSEDOL', 'Name', 'modelCtry', 'quoteCurrency',
                   'priceClose', 'priceCloseUSD', 'exchRate', 'eodValUSD', 'eodWeights', 'numShares']
        result = dfPositions[columns].rename(columns={'RkdSEDOL': 'SEDOL', 'Name': 'securityName'})
        result = result.sort_values(by='eodWeights', ascending=False)

        if filename is None:
            filename = os.path.join(os.path.expanduser('~'),
                                    'sample_portfolio_{}.txt'.format(PyDate.asString(tradeDate)))

        output = TextUtil.df2text(result, delim='|', lbuffer='', filename=filename, digits=6, align=False)

        return result


    @classmethod
    def generateMonthlyAttributionReport(cls, simConfig, month, outDir=None):

        if outDir is None:
            outDir = '/data/reports/{}/{}_monthly_attribution_{}'. \
                format(PyEnvironment.username(), simConfig.subdir, PyMonth.asString(month))
        if not FileUtil.exists(outDir):
            FileUtil.createDir(outDir)

        dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = simConfig.loadAttributionFile2()


        dfm = dfPnL[dfPnL['tradeDate'] >= PyMonth.firstWeekday(month)]
        dfm = dfm[dfm['tradeDate'] <= PyMonth.lastWeekday(month)]
        dfm = dfm.sort_values(by='tradeDate', ascending=True).groupby('item'). \
            apply(lambda dfm: dfm.assign(cumPnLUSD=dfm['totalPnLUSD'].cumsum())).reset_index(drop=True)
        columns = ['tradeDate', 'category', 'item', 'totalPnLUSD', 'cumPnLUSD']
        output = TextUtil.df2text(dfm[columns], delim=',', lbuffer='', align=False, digits=6,
                                  filename=os.path.join(outDir, 'pnl.csv'))

        dframe = dfAttrib[dfAttrib['tradeDate'] >= PyMonth.firstWeekday(month)]
        dframe = dframe[dframe['tradeDate'] <= PyMonth.lastWeekday(month)]
        dframe = dframe.sort_values(by='tradeDate', ascending=True)
        dframe = dframe.groupby('factor').apply(
            lambda dfm: dfm.assign(cumret=dfm['facret'].cumsum(), cumcontrib=dfm['contrib'].cumsum())). \
            reset_index(drop=True)

        columns = ['tradeDate', 'category', 'factor', 'fexp', 'facret', 'cumret', 'contrib', 'cumcontrib']
        output = TextUtil.df2text(dframe[columns], delim=',', lbuffer='', align=False, digits=6,
                                  filename=os.path.join(outDir, 'attrib.csv'))

        dfAggr = dframe.groupby(['tradeDate', 'category']).agg({'cumcontrib': sum}).reset_index()

        output = TextUtil.df2text(dfAggr, delim=',', lbuffer='', align=False, digits=6,
                                  filename=os.path.join(outDir, 'aggr.csv'))

        dctReport = dict()

        ## Links to the underlying data -------------------------------------------------------------------------
        dctReport['Underlying Data'] = PyHtml.insertTable(
            pd.DataFrame({'Return Attribution (csv)': PyHtml.insertLink('attrib.csv', 'Return Attribution (csv)'),
                          'Category Returns (csv)': PyHtml.insertLink('aggr.csv', 'Category Returns (csv)'),
                          'P&L Attribution (csv)': PyHtml.insertLink('pnl.csv', 'P&L Attribution (csv)'),
                          'Country Attribution (csv)': PyHtml.insertLink('ctryatt.csv', 'Country Attribution (csv)')}),
            tableTitle='Underlying Data (csv)', colHeading=False, width=1200)[0]

        dct = dict()
        dfm = dfAggr.groupby('tradeDate').agg({'cumcontrib': sum}).reset_index()
        dct['@@Cumulative Returns'] = \
            PlotUtil.dfPlot(dfm, 'tradeDate', 'cumcontrib', ylabel='cumcontrib', title='Cumulative Returns')
        dfm = dfAggr[dfAggr['tradeDate'] == dfAggr['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dct['@@Category Contribution Table'] = \
            PyHtml.insertTable(dfm[['category', 'cumcontrib']], numDigits=4, tableTitle='Category Contributions')[0]
        dct['@@Category Contribution Chart'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'cumcontrib', 'category',
                          ylabel='cumcontrib', title='Category Contributions')
        dctReport['Category Contributions'] = dct


        dct = dict()
        pframe = dframe[[x.startswith('alpha_') or x == 'specret' for x in dframe['category']]]
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dct['@@Alpha Theme Contribution Table'] = \
            PyHtml.insertTable(dfm[['factor', 'cumcontrib']], numDigits=4, tableTitle='Alpha Theme Contributions')[0]
        dct['@@Alpha Theme Exposures'] = \
            PlotUtil.plot(pframe[~pframe['fexp'].isnull()], 'tradeDate', 'fexp', 'factor',
                          ylabel='exposures', title='Alpha Theme Exposures')
        dct['@@Alpha Theme Returns'] = \
            PlotUtil.plot(pframe[~pframe['cumret'].isnull()], 'tradeDate', 'cumret', 'factor',
                          ylabel='cumret', title='Alpha Theme Returns')
        dct['@@Alpha Theme Contributions'] = \
            PlotUtil.plot(pframe[~pframe['cumcontrib'].isnull()], 'tradeDate', 'cumcontrib', 'factor',
                          ylabel='cumcontrib', title='Alpha Theme Contributions')
        dctReport['Alpha Theme Contributions'] = dct

        dct = dict()
        pframe = dframe[dframe['category'].isin(['risk_exposure'])]
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dct['@@Risk Theme Contribution Table'] = \
            PyHtml.insertTable(dfm[['factor', 'cumcontrib']], numDigits=4, tableTitle='Risk Theme Contributions')[0]
        dct['@@Risk Theme Exposures'] = \
            PlotUtil.plot(pframe[~pframe['fexp'].isnull()], 'tradeDate', 'fexp', 'factor',
                          ylabel='exposures', title='Risk Theme Exposures')
        dct['@@Risk Theme Returns'] = \
            PlotUtil.plot(pframe[~pframe['cumret'].isnull()], 'tradeDate', 'cumret', 'factor',
                          ylabel='cumret', title='Risk Theme Returns')
        dct['@@Risk Theme Contributions'] = \
            PlotUtil.plot(pframe[~pframe['cumcontrib'].isnull()], 'tradeDate', 'cumcontrib', 'factor',
                          ylabel='cumcontrib', title='Risk Theme Contributions')
        dctReport['Risk Theme Contributions'] = dct


        dct = dict()
        pframe = dframe[dframe['category'].isin(['tilt_COUNTRY'])]
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dct['@@Country Tilt Contribution Table'] = \
            PyHtml.insertTable(dfm[['factor', 'cumcontrib']], numDigits=4, tableTitle='Country Tilt Contributions')[0]
        dct['@@Country Tilt Exposures'] = \
            PlotUtil.plot(pframe[~pframe['fexp'].isnull()], 'tradeDate', 'fexp', 'factor',
                          ylabel='exposures', title='Country Tilt Exposures')
        dct['@@Country Tilt Returns'] = \
            PlotUtil.plot(pframe[~pframe['cumret'].isnull()], 'tradeDate', 'cumret', 'factor',
                          ylabel='cumret', title='Country Tilt Returns')
        dct['@@Country Tilt Contributions'] = \
            PlotUtil.plot(pframe[~pframe['cumcontrib'].isnull()], 'tradeDate', 'cumcontrib', 'factor',
                          ylabel='cumcontrib', title='Country Tilt Contributions')
        dctReport['Country Tilt Contributions'] = dct

        dct = dict()
        pframe = dframe[dframe['category'].isin(['tilt_INDUSTRY'])]
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dfm = dfm[['factor', 'cumcontrib']]
        dfm = pd.concat([dfm.head(6), dfm.tail(5)], ignore_index=True)
        dfm.iloc[5, 0] = '...'
        dfm.iloc[5, 1] = np.NaN
        dct['@@Industry Tilt Contribution Table'] = \
            PyHtml.insertTable(dfm, numDigits=4, tableTitle='Industry Tilt Contributions')[0]
        dct['@@Industry Tilt Exposures'] = \
            PlotUtil.plot(pframe[~pframe['fexp'].isnull()], 'tradeDate', 'fexp', 'factor',
                          ylabel='exposures', title='Industry Tilt Exposures')
        dct['@@Industry Tilt Returns'] = \
            PlotUtil.plot(pframe[~pframe['cumret'].isnull()], 'tradeDate', 'cumret', 'factor',
                          ylabel='cumret', title='Industry Tilt Returns')
        dct['@@Industry Tilt Contributions'] = \
            PlotUtil.plot(pframe[~pframe['cumcontrib'].isnull()], 'tradeDate', 'cumcontrib', 'factor',
                          ylabel='cumcontrib', title='Industry Tilt Contributions')
        dctReport['Industry Tilt Contributions'] = dct

        dct = dict()
        pframe = dframe[dframe['category'].isin(['tilt_SIZE'])]
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()].sort_values(by='cumcontrib', ascending=False)
        dct['@@Size Tilt Contribution Table'] = \
            PyHtml.insertTable(dfm[['factor', 'cumcontrib']], numDigits=4, tableTitle='Size Tilt Contributions')[0]
        dct['@@Size Tilt Exposures'] = \
            PlotUtil.plot(pframe[~pframe['fexp'].isnull()], 'tradeDate', 'fexp', 'factor',
                          ylabel='exposures', title='Size Tilt Exposures')
        dct['@@Size Tilt Returns'] = \
            PlotUtil.plot(pframe[~pframe['cumret'].isnull()], 'tradeDate', 'cumret', 'factor',
                          ylabel='cumret', title='Size Tilt Returns')
        dct['@@Size Tilt Contributions'] = \
            PlotUtil.plot(pframe[~pframe['cumcontrib'].isnull()], 'tradeDate', 'cumcontrib', 'factor',
                          ylabel='cumcontrib', title='Size Tilt Contributions')
        dctReport['Size Tilt Contributions'] = dct


        ## attribution by country ================================================================================
        dctReport['@@country-factor'] = PyHtml.insertHeadline('Country-Factor Attribution')[0]

        dframe = dfCtryAtt[dfCtryAtt['tradeDate'] >= PyMonth.firstWeekday(month)]
        dframe = dframe[dframe['tradeDate'] <= PyMonth.lastWeekday(month)]
        dframe = dframe.sort_values(by=['tradeDate', 'country'])
        dframe = dframe.groupby(['country', 'factor']). \
            apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())).reset_index(drop=True)

        columns = ['tradeDate', 'country', 'category', 'factor', 'fexp', 'facret', 'contrib', 'cumcontrib']
        output = TextUtil.df2text(dframe[columns], delim=',', lbuffer='', align=False, digits=6,
                                  filename=os.path.join(outDir, 'ctryatt.csv'))

        modelName = Strategy.getModelName(simConfig.stratName)
        themes = FactorModel.getAlphaThemes(modelName)
        rfactors = FactorModel.getRiskThemes(modelName)

        dfAggr = dframe[dframe['factor'].isin(themes)]
        dfAggr = dfAggr.groupby(['country', 'tradeDate']). \
            agg({'cumcontrib': sum}).reset_index().rename(columns={'cumcontrib': 'themes'})
        dfAggr = dfAggr.merge(dframe[dframe['factor'] == 'specret'][['country', 'tradeDate', 'cumcontrib']]. \
                              rename(columns={'cumcontrib': 'specret'}), how='left', on=['country', 'tradeDate'])
        dfAggr = dfAggr.assign(spread=dfAggr['themes'] + dfAggr['specret'])

        dfm = dframe[[x.split('_')[0] in ['country', 'ind', 'size'] for x in dframe['factor']]]
        dfm = dfm.assign(category=[x.split('_')[0] for x in dfm['factor']])
        dfm = dfm[['country', 'tradeDate', 'category', 'cumcontrib']]
        dfm = dfm.groupby(['country', 'category', 'tradeDate']).agg({'cumcontrib': sum}).reset_index()
        dfm = dfm.pivot(index=['country', 'tradeDate'], columns='category', values='cumcontrib'). \
            rename(columns={'country': 'ctry'}).reset_index()
        dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'tradeDate'])

        dfm = dframe[dframe['factor'].isin(['market'] + rfactors)][
            ['country', 'factor', 'tradeDate', 'cumcontrib']]
        dfm = dfm.groupby(['country', 'tradeDate']).agg({'cumcontrib': sum}).reset_index(). \
            rename(columns={'cumcontrib': 'risk'})
        dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'tradeDate'])

        dfThemes = dframe[dframe['factor'].isin(themes)][['country', 'tradeDate', 'factor', 'cumcontrib']]. \
            rename(columns={'factor': 'theme'})

        dct = dict()
        dct['AggrAlphaTheme'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'themes', 'country', title='AggrAlphaThemeContrib')
        dct['AggrSpecret'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'specret', 'country', title='AggrSpecretContrib')
        dct['AggrAlphaSpread'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'spread', 'country', title='AggrAlphaSpreadContrib')
        dctReport['@@Aggr01'] = dct

        dct = dict()
        dct['AggrCtryContrib'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'ctry', 'country', title='AggrCtryContrib')
        dct['AggrIndContrib'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'ind', 'country', title='AggrIndContrib')
        dct['AggrSizeContrib'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'size', 'country', title='AggrSizeContrib')
        dct['AggrRisk'] = \
            PlotUtil.plot(dfAggr, 'tradeDate', 'risk', 'country', title='AggrRiskContrib')
        dctReport['@@Aggr02'] = dct

        categories = ['themes', 'specret', 'spread', 'ctry', 'ind', 'size', 'risk']

        try:
            for country in ['CN', 'XH', 'HK', 'JP', 'KR', 'TW', 'AU', 'IN']:
                dct = dict()
                dct['AggrContrib-{}'.format(country)] = \
                    PlotUtil.dfPlot(dfAggr[dfAggr['country'] == country], 'tradeDate', categories,
                                    title='AggrContrib ({})'.format(country))
                dct['AlphaThemeContrib-{}'.format(country)] = \
                    PlotUtil.plot(dfThemes[dfThemes['country'] == country], 'tradeDate', 'cumcontrib', 'theme',
                                  title='AlphaThemeContrib ({})'.format(country))
                dctReport[country] = dct

        except Exception as ee:
            PyLog.info(traceback.format_exc())


        htmlname = os.path.join(outDir, 'monthly_attribution_report.html')
        PyLog.info(f"generateMonthlyAttributionReport will write html report to {htmlname}")
        PyHtml.standardReport(dctReport, filename=htmlname,
                              title='Monthly Attribution Report ({})'.format(PyMonth.asString(month)),
                              font=2, halign='center', header=None)

    @classmethod
    def generateTradingAttributionReport(cls, simConfig, outDir=None, suffix=None, refresh=False, updateInterval=5,
                                         startTD=None, endTD=None):

        debug = True
        if debug: PyLog.info('generateTradingAttributionReport 0')

        if outDir is None:
            outDir = '/data/reports/{}/{}_attribution'.format(PyEnvironment.username(), simConfig.subdir)
        if suffix is not None:
            outDir = outDir + '_' + suffix
        if not FileUtil.exists(outDir):
            FileUtil.createDir(outDir)

        if debug: PyLog.info('generateTradingAttributionReport 1')

        dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = \
            cls.compileTradingAttributionFrame2(simConfig, refresh=refresh, updateInterval=updateInterval)

        # dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = simConfig.loadAttributionFile2()

        #        startTD = 20240219
        if debug: PyLog.info('Simulate.py.generateTradingAttributionReport --> startTD ({})'.format(startTD))
        if debug: PyLog.info('generateTradingAttributionReport 2')

        if startTD is not None:
            try:
                if debug: PyLog.info('generateTradingAttributionReport 2.1')
                startTD = PyDate.asDate(startTD)
                dfAttrib = dfAttrib[dfAttrib['tradeDate'] >= startTD]
                dfAttrib = dfAttrib.sort_values(by=['factor', 'tradeDate'])
                if debug: PyLog.info('generateTradingAttributionReport 2.2')
                dfAttrib = dfAttrib.groupby('factor'). \
                    apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
                    reset_index(drop=True)
                if debug: PyLog.info('generateTradingAttributionReport 2.3')
                dfPnL = dfPnL[dfPnL['tradeDate'] >= startTD]
                dfPnL = dfPnL.sort_values(by=['category', 'item', 'tradeDate'])
                dfPnL = dfPnL.groupby(['category', 'item']). \
                    apply(lambda dfm: dfm.assign(cumPnLUSD=dfm['totalPnLUSD'].cumsum())). \
                    reset_index(drop=True)
                if debug: PyLog.info('generateTradingAttributionReport 2.4')
                dfCtryAtt = dfCtryAtt[dfCtryAtt['tradeDate'] >= startTD]
                dfCtryAtt = dfCtryAtt.sort_values(by=['country', 'factor', 'tradeDate'])
                dfCtryAtt = dfCtryAtt.groupby(['country', 'factor']). \
                    apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
                    reset_index(drop=True)
                if debug: PyLog.info('generateTradingAttributionReport 2.5')
                dfSctrAtt = dfSctrAtt[dfSctrAtt['tradeDate'] >= startTD]
                dfSctrAtt = dfSctrAtt.sort_values(by=['sector', 'factor', 'tradeDate'])
                dfSctrAtt = dfSctrAtt.groupby(['sector', 'factor']). \
                    apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
                    reset_index(drop=True)
            except Exception as ee:
                PyLog.info('generateTradingAttributionReport StartTD block ERR({})'.format(ee))

        if debug: PyLog.info('generateTradingAttributionReport 3')

        if endTD is not None:
            endTD = PyDate.asDate(endTD)
            # dfAttrib = dfAttrib[dfAttrib['signalDate'] <= endSD]
            # dfPnL = dfPnL[dfPnL['signalDate'] <= endSD]
            # dfCtryAtt = dfCtryAtt[dfCtryAtt['signalDate'] <= endSD]
            dfAttrib = dfAttrib[dfAttrib['tradeDate'] <= endTD]
            dfPnL = dfPnL[dfPnL['tradeDate'] <= endTD]
            dfCtryAtt = dfCtryAtt[dfCtryAtt['tradeDate'] <= endTD]
            dfSctrAtt = dfSctrAtt[dfSctrAtt['tradeDate'] <= endTD]

        if debug: PyLog.info('generateTradingAttributionReport 4')

        pd.set_option('display.max_columns', None)

        filename = os.path.join(outDir, 'attrib.csv')
        output = TextUtil.df2text(dfAttrib, filename=filename, delim=',', lbuffer='', align=False, digits=6)

        filename = os.path.join(outDir, 'pnl.csv')
        output = TextUtil.df2text(dfPnL, filename=filename, delim=',', lbuffer='', align=False, digits=6)

        filename = os.path.join(outDir, 'ctryatt.csv')
        output = TextUtil.df2text(dfCtryAtt, filename=filename, delim=',', lbuffer='', align=False, digits=6)

        filename = os.path.join(outDir, 'sctratt.csv')
        output = TextUtil.df2text(dfSctrAtt, filename=filename, delim=',', lbuffer='', align=False, digits=6)


        try:
            dctAttrReport = cls.compileTradingAttributionReport(simConfig, dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt)
            filename = os.path.join(outDir, 'trading_portfolio_attribution_report.html')
            PyHtml.standardReport(dctAttrReport, filename=filename, title='Attribution Report',
                                  font=2, halign='center', header=None)
        except Exception as ee:
            PyLog.info(traceback.format_exc())

    @classmethod
    def compileTradingAttributionFrame2(cls, simConfig, refresh=False, updateInterval=5):
        debug = True
        PyLog.info("Compiling attribution frame...")

        facretName = 'facretd'
        specretName = 'specretd'
        returnHorizon = 1


        dfReturns = simConfig.loadDailyReturnsFile()


        if not refresh:
            dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = simConfig.loadAttributionFile2()

        attribCols = ['signalDate', 'tradeDate', 'factor', 'fexp', 'facret', 'contrib']
        pnlCols = ['signalDate', 'tradeDate', 'category', 'item', 'totalPnLUSD']
        ctryCols = ['signalDate', 'tradeDate', 'country', 'factor', 'fexp', 'facret', 'contrib']
        sctrCols = ['signalDate', 'tradeDate', 'sector', 'factor', 'fexp', 'facret', 'contrib']


        if (not refresh) and (len(dfAttrib) > 0):
            # minDate = PyDate.minusWeekdays(dfAttrib['signalDate'].max(), updateInterval)
            # minDate = max(minDate, dfReturns['signalDate'].min())
            # dfAttrib = [dfAttrib[dfAttrib['signalDate'] < minDate][attribCols]]
            # dfPnL = [dfPnL[dfPnL['signalDate'] < minDate][pnlCols]]
            # dfCtryAtt = [dfCtryAtt[dfCtryAtt['signalDate'] < minDate][ctryCols]]
            # dfSctrAtt = [dfSctrAtt[dfSctrAtt['signalDate'] < minDate][sctrCols]]
            minTD = PyDate.minusWeekdays(dfAttrib['tradeDate'].max(), updateInterval)
            minTD = max(minTD, dfReturns['tradeDate'].min())
            dfAttrib = [dfAttrib[dfAttrib['tradeDate'] < minTD][attribCols]]
            dfPnL = [dfPnL[dfPnL['tradeDate'] < minTD][pnlCols]]
            dfCtryAtt = [dfCtryAtt[dfCtryAtt['tradeDate'] < minTD][ctryCols]]
            dfSctrAtt = [dfSctrAtt[dfSctrAtt['tradeDate'] < minTD][sctrCols]]
        else:
            # minDate = dfReturns['signalDate'].min()
            minTD = dfReturns['tradeDate'].min()
            dfAttrib = list()
            dfPnL = list()
            dfCtryAtt = list()
            dfSctrAtt = list()

        # maxDate = PyDate.prevWeekday(PyDate.weekdayLE(PyDate.today()))
        # lstDates = dfReturns[(dfReturns['signalDate'] >= minDate) & (dfReturns['signalDate'] <= maxDate)]\
        #     ['signalDate'].tolist()
        maxTD = PyDate.weekdayLE(PyDate.today())
        lstTD = dfReturns[(dfReturns['tradeDate'] >= minTD) & (dfReturns['tradeDate'] <= maxTD)]['tradeDate'].tolist()

        dfCtry = cls.getModelCountryFrame()


        for tradeDate in lstTD:

            try:
                signalDate = PyDate.prevWeekday(tradeDate)


                bodNAV = dfReturns[dfReturns['tradeDate'] == tradeDate]['bodNAV'].iloc[0]
                dfPositions = simConfig.loadPositionFile(tradeDate=tradeDate, step='posttrade')
                dfPositions = dfPositions.set_index(dfPositions['assetKey'])
                dfPositions.index.name = None


                ## total P&L by category ------------------------------------------------------------------
                dfCateg = SignalMgr.getFrame(['industry', 'sector', 'size_bucket'], signalDate). \
                    drop(columns='signalDate').rename(columns={'size_bucket': 'size'}). \
                    merge(dfCtry, how='outer', on='assetKey')
                dfCateg = dfPositions.merge(dfCateg, how='left', on='assetKey')
                for category in ['country', 'industry', 'sector', 'size']:
                    dfm = dfCateg.groupby(category).agg({'totalPnLUSD': sum}).reset_index(). \
                        assign(category=category, signalDate=signalDate, tradeDate=tradeDate). \
                        rename(columns={category: 'item'})
                    dfPnL.append(dfm)

                ## factor attribution ---------------------------------------------------------------------
                returnDate = PyDate.plusWeekdays(tradeDate, returnHorizon - 1)

                facret = SignalMgr.get(facretName, returnDate)
                specret = SignalMgr.get(specretName, returnDate)


                rmodel = SignalMgr.get('risk_model', signalDate)


                fload = rmodel['fload']


                dfPositions = dfPositions.assign(
                    avgWeights=(dfPositions['bodWeights'].fillna(0.0) + dfPositions['eodWeights'].fillna(0.0)) / 2.0)
                dfPositions = dfPositions.sort_values(by='avgWeights', ascending=True)

                assets = list(set(dfPositions['assetKey']).intersection(set(specret.index)))
                fload = fload.reindex(assets).reindex(dfPositions['assetKey']).fillna(0.0)
                ## this should take care of exchange holidays and trading halts

                specret = specret.reindex(dfPositions['assetKey']).fillna(0.0)
                weights = dfPositions['avgWeights'].fillna(0.0)

                factors = fload.columns.tolist()


                ## factor contributions
                fcontrib = fload * weights.to_numpy().reshape(len(weights), 1) \
                           * facret.reindex(factors).fillna(0.0).to_numpy()

                ## specret contributions
                scontrib = specret.reindex(dfPositions['assetKey']).fillna(0.0) \
                           * FrameUtil.toSeries(dfPositions, keyCol='assetKey', valCol='avgWeights').fillna(0.0)

                ## cost contributions
                ccontrib = -dfPositions[['fcostUSD', 'bcostUSD', 'linearCostUSD', 'impactCostUSD']]. \
                    set_index(dfPositions['assetKey']) / bodNAV
                ccontrib = np.log(1 + ccontrib)
                ccontrib.columns = [x.replace('USD', '') for x in ccontrib.columns]

                ## security-level discrepancy (trade timing, execution price, etc)
                dcontrib = np.log(1 + (dfPositions['pnlHoldUSD'] - dfPositions['fcostUSD']
                                       - dfPositions['bcostUSD'] - dfPositions['tcostUSD']) / bodNAV) \
                           - fcontrib.sum(axis=1) - scontrib - ccontrib.sum(axis=1)


                ## residual returns
                residret = dfReturns[dfReturns['signalDate'] == signalDate]['logret'].iloc[0] \
                           - (fcontrib.sum().sum() + scontrib.sum() + ccontrib.sum().sum() + dcontrib.sum())
                rcontrib = abs(dfPositions['avgWeights']) / abs(dfPositions['avgWeights']).sum() * residret

                ## fcontrib.sum().sum() + scontrib.sum() + ccontrib.sum().sum() + dcontrib.sum() + rcontrib.sum()
                ## should match logret from dfReturns

                dfContrib = fcontrib.assign(specret=scontrib)
                dfContrib = pd.concat([dfContrib, ccontrib], axis=1)
                dfContrib = dfContrib.assign(sdiscrepancy=dcontrib)
                dfContrib = dfContrib.assign(pdiscrepancy=rcontrib)

                ## dfContrib.sum().sum() should match logret from dfReturns

                fexp = fload * weights.to_numpy().reshape(len(weights), 1)
                dfm = fexp.sum(axis=0).reset_index().rename(columns={'index': 'factor', 0: 'fexp'})
                dfm = dfm.merge(facret.reset_index().rename(columns={'index': 'factor', 0: 'facret'}),
                                how='left', on='factor')
                dfm = dfm.assign(fexp=dfm['fexp'].fillna(0.0), facret=dfm['facret'].fillna(0.0))
                dfm = dfm.merge(dfContrib.sum(axis=0).reset_index().rename(columns={'index': 'factor', 0: 'contrib'}),
                                how='outer', on='factor')
                dfm = dfm.assign(signalDate=signalDate, tradeDate=tradeDate)

                dfAttrib.append(dfm[attribCols])

                ## country factor attribution -----------------------------------------------------------------------
                dfm = fexp.merge(dfCtry, how='left', on='assetKey'). \
                    groupby('country').apply(lambda dfm: dfm[fexp.columns].sum(axis=0)). \
                    unstack().reset_index().rename(columns={'level_0': 'factor', 0: 'fexp'})
                dfm = dfm.merge(facret.reset_index().rename(columns={'index': 'factor', 0: 'facret'}),
                                how='left', on='factor')
                dfm = dfm.assign(fexp=dfm['fexp'].fillna(0.0), facret=dfm['facret'].fillna(0.0))
                dfm = dfm.merge(dfContrib.merge(dfCtry, how='left', on='assetKey'). \
                                groupby('country').apply(lambda dfm: dfm[dfContrib.columns].sum(axis=0)). \
                                unstack().reset_index().rename(columns={'level_0': 'factor', 0: 'contrib'}),
                                how='outer', on=['country', 'factor'])
                dfm = dfm.assign(signalDate=signalDate, tradeDate=tradeDate)

                dfCtryAtt.append(dfm[ctryCols])

                ## sector factor attribution -----------------------------------------------------------------------
                sector = SignalMgr.get('sector', signalDate)
                dfm = fexp.assign(sector=sector.reindex(fexp.index)). \
                    groupby('sector').apply(lambda dfm: dfm[fexp.columns].sum(axis=0)). \
                    unstack().reset_index().rename(columns={'level_0': 'factor', 0: 'fexp'})
                dfm = dfm.merge(facret.reset_index().rename(columns={'index': 'factor', 0: 'facret'}),
                                how='left', on='factor')
                dfm = dfm.assign(fexp=dfm['fexp'].fillna(0.0), facret=dfm['facret'].fillna(0.0))
                dfm = dfm.merge(dfContrib.assign(sector=sector.reindex(dfContrib.index)). \
                                groupby('sector').apply(lambda dfm: dfm[dfContrib.columns].sum(axis=0)). \
                                unstack().reset_index().rename(columns={'level_0': 'factor', 0: 'contrib'}),
                                how='outer', on=['sector', 'factor'])
                dfm = dfm.assign(signalDate=signalDate, tradeDate=tradeDate)


                dfSctrAtt.append(dfm[sctrCols])

            except Exception as ee:
                PyLog.info(
                    'dch::compileTradingAttributionFrame2 big block ERR({})  signalDate({})'.format(ee, signalDate))


        dfAttrib = pd.concat(dfAttrib, ignore_index=True)
        dfAttrib = dfAttrib.sort_values(by='tradeDate')
        dfAttrib = dfAttrib.groupby('factor'). \
            apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())).reset_index(drop=True)

        dfPnL = pd.concat(dfPnL, ignore_index=True)
        dfPnL = dfPnL.sort_values(by='tradeDate', ascending=True)
        dfPnL = dfPnL.groupby(['category', 'item']). \
            apply(lambda dfm: dfm.assign(cumPnLUSD=dfm['totalPnLUSD'].cumsum())). \
            reset_index(drop=True)

        dfCtryAtt = pd.concat(dfCtryAtt, ignore_index=True)
        dfCtryAtt = dfCtryAtt.sort_values(by=['tradeDate', 'country'])
        dfCtryAtt = dfCtryAtt.groupby(['country', 'factor']). \
            apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())).reset_index(drop=True)

        dfSctrAtt = pd.concat(dfSctrAtt, ignore_index=True)
        dfSctrAtt = dfSctrAtt.sort_values(by=['tradeDate', 'sector'])
        dfSctrAtt = dfSctrAtt.groupby(['sector', 'factor']). \
            apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())).reset_index(drop=True)

        dfAttrib = cls.categorizeFactors(dfAttrib, factorCol='factor', categCol='category')
        dfCtryAtt = cls.categorizeFactors(dfCtryAtt, factorCol='factor', categCol='category')
        dfSctrAtt = cls.categorizeFactors(dfSctrAtt, factorCol='factor', categCol='category')

        simConfig.backupAttributionFile2()
        simConfig.writeAttributionFile2(dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt)

        return dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt

    @classmethod
    def compileMonthlyAttributions(cls, simConfig, startTD=None, endTD=None):
        ## Used for ClockTower (202206) / Blue Pool (202207)
        dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = simConfig.loadAttributionFile2()
        if startTD is not None:
            dfAttrib = dfAttrib[dfAttrib['tradeDate'] >= PyDate.asDate(startTD)]
        if endTD is not None:
            dfAttrib = dfAttrib[dfAttrib['tradeDate'] <= PyDate.asDate(endTD)]

        dframe = dfAttrib[['tradeDate', 'factor', 'category', 'contrib']].rename(columns={'contrib': 'logcontrib'})

        dfm = dframe[[not x.startswith('alpha_') for x in dframe['category']]]
        dfm = dfm.groupby(['tradeDate', 'category']).agg({'logcontrib': sum}).reset_index()
        dframe = pd.concat([
            dframe[[x.startswith('alpha_') for x in dframe['category']]][['tradeDate', 'factor', 'logcontrib']]. \
                rename(columns={'factor': 'category'}), dfm], ignore_index=True)
        dframe = dframe.assign(month=[PyMonth.asNumeric(x) for x in dframe['tradeDate']])
        dframe = dframe.groupby(['month', 'category']).agg({'logcontrib': sum}).reset_index()

        dfGross = dframe.sort_values(by='month', ascending=True).groupby('category'). \
            apply(lambda dfm: dfm.assign(cumlogcontrib=dfm['logcontrib'].cumsum())).reset_index(drop=True)
        dfGross = dfGross.assign(cumcontrib=np.exp(dfGross['cumlogcontrib']) - 1)

        dfAlloc = list()
        for tradeDate in dfAttrib['tradeDate'].unique():
            dct = simConfig.loadRiskReport(tradeDate=tradeDate)
            dfm = dct['factorRisk'][['factor', 'pctcontrib']]
            dfm = dfm.assign(pctcontrib=dfm['pctcontrib'] / dfm['pctcontrib'].sum())
            dfm = dfm.assign(tradeDate=tradeDate)
            dfAlloc.append(dfm)
        dfAlloc = pd.concat(dfAlloc, ignore_index=True)

        dfUnexp = dfAttrib[dfAttrib['category'] == 'unexplained'].groupby('tradeDate'). \
            agg({'contrib': sum}).reset_index().rename(columns={'contrib': 'unexplained'})

        dframe = dfAttrib[dfAttrib['category'] != 'unexplained'][['tradeDate', 'factor', 'category', 'contrib']]. \
            rename(columns={'contrib': 'logcontrib'})
        dframe = dframe.merge(dfAlloc, how='left', on=['tradeDate', 'factor'])
        dframe = dframe.merge(dfUnexp, how='left', on='tradeDate')
        dframe = dframe.assign(
            netlogcontrib=dframe['logcontrib'] + dframe['pctcontrib'].fillna(0.0) * dframe['unexplained'])

        dfm = dframe[[not x.startswith('alpha_') for x in dframe['category']]].groupby(['tradeDate', 'category']). \
            agg({'netlogcontrib': sum}).reset_index()
        dframe = pd.concat([
            dframe[[x.startswith('alpha_') for x in dframe['category']]][['tradeDate', 'factor', 'netlogcontrib']]. \
                rename(columns={'factor': 'category'}), dfm], ignore_index=True)
        dframe = dframe.assign(month=[PyMonth.asNumeric(x) for x in dframe['tradeDate']])

        dfNet = dframe.groupby(['month', 'category']).agg({'netlogcontrib': sum}).reset_index()
        dfNet = dfNet.sort_values(by='month', ascending=True). \
            groupby('category').apply(lambda dfm: dfm.assign(cumlogcontrib=dfm['netlogcontrib'].cumsum())). \
            reset_index(drop=True)
        dfNet = dfNet.assign(cumcontrib=np.exp(dfNet['cumlogcontrib']) - 1)

        return dfGross, dfNet

    @classmethod
    def categorizeFactors(cls, dframe, factorCol='factor', categCol='category'):
        dframe[categCol] = ['tilt_COUNTRY' if x.startswith('country_') else
                            'tilt_INDUSTRY' if x.startswith('ind_') else
                            'tilt_SIZE' if x.startswith('size_') else
                            'risk_exposure' if x in ['market', 'sbeta_market'] else
                            'cost' if x in ['bcost', 'fcost', 'linearCost', 'impactCost'] else
                            'alpha_MOMENTUM' if x in ['momentum', 'linkage'] else
                            'alpha_REVERSAL' if x in ['reversal'] else
                            'alpha_REVISION' if x in ['revision'] else
                            'alpha_QUALITY' if x in ['quality', 'stability', 'fmom'] else
                            'alpha_VALUE' if x in ['value_gc', 'value_liq', 'neglect'] else
                            'unexplained' if x in ['specret', 'sdiscrepancy', 'pdiscrepancy'] else
                            x for x in dframe[factorCol]]
        return dframe

    @classmethod
    def getModelCountryFrame(cls):
        cframe = SignalMgr.getStatic('model_country').reset_index(). \
            rename(columns={'index': 'assetKey', 0: 'country'})
        cframe = pd.concat([cframe, pd.DataFrame([{'assetKey': '@ENNOS1', 'country': 'TW'},
                                                  {'assetKey': '@BEIGE9', 'country': 'XH'},
                                                  {'assetKey': '@FOXCO1', 'country': 'TW'}])], ignore_index=True)
        cframe = cframe.drop_duplicates(subset='assetKey')
        return cframe

    @classmethod
    def getQuoteCountryFrame(cls):
        cframe = cls.getModelCountryFrame()
        cframe.loc[cframe['country'] == 'XH', 'country'] = 'HK'
        return cframe

    @classmethod
    def drawdowns(cls, simConfig, threshold=0.05):
        dfReturns = cls.generateDailyReturns(simConfig, save=False)

        dfReturns = dfReturns.sort_values(by='signalDate', ascending=True)
        dfReturns = dfReturns.assign(year=[x.year for x in dfReturns['tradeDate']])
        dfReturns = dfReturns.assign(
            highwater=[dfReturns['cumret'].iloc[:(n + 1)].max() for n in range(len(dfReturns))])
        dfReturns = dfReturns.assign(highwater=Filter.bound(dfReturns['highwater'].fillna(0.0), lower=0.0))
        dfReturns = dfReturns.assign(
            drawdown=np.exp(np.log(dfReturns['cumret'] + 1) - np.log(dfReturns['highwater'] + 1)) - 1)

        dframe = dfReturns[dfReturns['drawdown'] < -threshold]
        dframe = dframe.assign(highwaterTD=None)
        for n in range(len(dframe)):
            signalDate = dframe['signalDate'].iloc[n]
            highwater = dframe['highwater'].iloc[n]
            highwaterTD = dfReturns[(dfReturns['signalDate'] < signalDate) & (dfReturns['cumret'] == highwater)][
                'tradeDate'].max()
            dframe.loc[dframe['signalDate'] == signalDate, 'highwaterTD'] = highwaterTD

        dframe = dframe.sort_values(by='drawdown', ascending=True)
        dframe = dframe.drop_duplicates(subset='highwaterTD')
        dframe = dframe.sort_values(by='tradeDate', ascending=True)

        return dframe[['highwaterTD', 'tradeDate', 'drawdown']]


class MonthlyAttributionReport:
    ### dch this was the original monthly attribution report sheet
    #    templateKey = '1e7PvIUPjMCc-9Kxeg2CuhkYSzuh5IUcTiZd8b5paB40'
    #    folderID = '1Drg9VQ9ILkzCpIXNWd5zHN_isP7R8M-1'

    ## dch my temp copy
    templateKey = '1TrZrvGIlIF4NVdBa9hZCn9CjMiX0ZdFmXaW6IioJfLo'
    folderID = '1Y_Jo1ktmbSelvPqFxHDWI2FQeBMwaXm_'

    def __init__(self, simConfig, month):

        self.simConfig = simConfig
        self.month = month
        self.sDate = PyMonth.firstWeekday(month)
        self.eDate = min(simConfig.endTD, PyMonth.lastWeekday(month))
        self.updateTime = datetime.datetime.now()

        PyLog.info(f"MonthlyAttributionReport month {month} sdate {self.sDate} edate {self.eDate}")
        dfAttrib, dfPnL, dfCtryAtt, dfSctrAtt = simConfig.loadAttributionFile2()

        dfAttrib = dfAttrib[dfAttrib['tradeDate'] >= self.sDate]
        dfAttrib = dfAttrib[dfAttrib['tradeDate'] <= self.eDate]
        dfAttrib = dfAttrib[['tradeDate', 'category', 'factor', 'fexp', 'facret', 'contrib']]. \
            sort_values(by=['factor', 'tradeDate'], ascending=True). \
            groupby('factor').apply(lambda dfm: dfm.assign(cumret=dfm['facret'].cumsum(),
                                                           cumcontrib=dfm['contrib'].cumsum())). \
            reset_index(drop=True)
        dfAttrib = dfAttrib[['tradeDate', 'category', 'factor', 'fexp', 'facret', 'cumret', 'contrib', 'cumcontrib']]
        self.dfAttrib = dfAttrib

        self.dfCateg = dfAttrib.groupby(['tradeDate', 'category']).agg({'cumcontrib': sum}).reset_index()

        dfPnL = dfPnL[dfPnL['tradeDate'] >= self.sDate]
        dfPnL = dfPnL[dfPnL['tradeDate'] <= self.eDate]
        dfPnL = dfPnL[['tradeDate', 'category', 'item', 'totalPnLUSD']]. \
            sort_values(by=['item', 'tradeDate'], ascending=True). \
            groupby('item').apply(lambda dfm: dfm.assign(cumPnLUSD=dfm['totalPnLUSD'].cumsum())). \
            reset_index(drop=True)
        self.dfPnL = dfPnL

        dfCtryAtt = dfCtryAtt[dfCtryAtt['tradeDate'] >= self.sDate]
        dfCtryAtt = dfCtryAtt[dfCtryAtt['tradeDate'] <= self.eDate]
        dfCtryAtt = dfCtryAtt[['tradeDate', 'country', 'category', 'factor', 'fexp', 'facret', 'contrib']]. \
            sort_values(by=['country', 'factor', 'tradeDate'], ascending=True). \
            groupby(['country', 'factor']).apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
            reset_index(drop=True)
        self.dfCtryAtt = dfCtryAtt

        dfSctrAtt = dfSctrAtt[dfSctrAtt['tradeDate'] >= self.sDate]
        dfSctrAtt = dfSctrAtt[dfSctrAtt['tradeDate'] <= self.eDate]
        dfSctrAtt = dfSctrAtt[['tradeDate', 'sector', 'category', 'factor', 'fexp', 'facret', 'contrib']]. \
            sort_values(by=['sector', 'factor', 'tradeDate'], ascending=True). \
            groupby(['sector', 'factor']).apply(lambda dfm: dfm.assign(cumcontrib=dfm['contrib'].cumsum())). \
            reset_index(drop=True)
        self.dfSctrAtt = dfSctrAtt

    def copySpreadsheet(self):

        gdrive = GoogleDrive()
        tgtName = 'SimProd Monthly Attribution Report - {}'.format(self.month)
        dfFiles = gdrive.getFileList(self.folderID)
        if len(dfFiles) > 0:
            dfFiles = dfFiles[dfFiles['title'] == tgtName]


        output = gdrive.gdrive.auth.service.files(). \
            copy(fileId=self.templateKey,
                 body={'parents': [{'kind': 'drive#file', 'id': self.folderID}], 'title': tgtName}).execute()

        if len(dfFiles) > 0:
            for fileID in dfFiles['id']:
                gdrive.gdrive.auth.service.files().delete(fileId=fileID).execute()

    def updateAll(self, interval=30):

        try:
            self.updatePnLTab()
        except Exception as ee:
            PyLog.info(traceback.format_exc())

        try:
            PyUtil.sleep(interval)
            self.updateFactorAttribTab()
        except Exception as ee:

            PyLog.info(traceback.format_exc())

        try:
            PyUtil.sleep(interval)
            self.updateStrategyDataTab()
        except Exception as ee:
            PyLog.info(traceback.format_exc())

        try:
            PyUtil.sleep(interval)
            self.updateCountryDataTabs(interval=interval)
        except Exception as ee:
            PyLog.info(traceback.format_exc())

        try:
            PyUtil.sleep(interval)
            self.updatePositionsTab(interval=interval)
        except Exception as ee:
            PyLog.info(traceback.format_exc())

        try:
            self.copySpreadsheet()
        except Exception as ee:
            PyLog.info(traceback.format_exc())

    def updatePnLTab(self):
        PyLog.info('Updating P&L Tab')
        ## PnL tab
        dframe = self.dfPnL.copy()
        dframe = dframe.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dframe['tradeDate']]).fillna('')
        wsName = 'PnL'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))
        output = wsheet.spreadsheet.values_clear('{}!A4:E2000'.format(wsName))
        output = wsheet.update('A4', dframe.values.tolist())

    def updateFactorAttribTab(self):
        PyLog.info('Updating FactorAttrib Tab')
        ## FactorAttrib tab
        dframe = self.dfAttrib.copy()
        dframe = dframe.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dframe['tradeDate']]).fillna('')
        wsName = 'FactorAttrib'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))
        output = wsheet.spreadsheet.values_clear('{}!A4:H2000'.format(wsName))
        output = wsheet.update('A4', dframe.values.tolist())

    def updateCategoryAttribTab(self):
        PyLog.info('Updating CategoryAttrib Tab')
        ## CategoryAttrib tab
        dframe = self.dfCateg.copy()
        dframe = dframe.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dframe['tradeDate']]).fillna('')
        wsName = 'CategoryAttrib'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))
        output = wsheet.spreadsheet.values_clear('{}!A4:C2000'.format(wsName))
        output = wsheet.update('A4', dframe.values.tolist())

    def compileMonthlyAttributionSubFrame(self, pframe, column):

        dfm = pframe[~pframe[column].isnull()].copy()
        columns = dfm[dfm['tradeDate'] == dfm['tradeDate'].max()]. \
            sort_values(by=column, ascending=False)['factor'].tolist()
        try:
            dfm = dfm.pivot(index='tradeDate', columns='factor', values=column)[columns].reset_index().sort_values(
                by='tradeDate', ascending=True)
        except Exception as e:
            PyLog.info(
                "dch::Simulate.py.compileMonthlyAttributionSubFrame :--> Failed to pivot on dfm.  Index=tradeDate, columns=factor.  Reset index and sort_values by tradeDate e({})".format(
                    e))
            try:
                PyLog.info(f"dfm with dupees len:{len(dfm)}")
                dfm.drop_duplicates(inplace=True, subset=['tradeDate', 'factor'], keep='last')
                PyLog.info("dropped duplicates updated dfm to len({})".format(len(dfm)))
                dfm = dfm.pivot(index='tradeDate', columns='factor', values=column)[columns].reset_index().sort_values(
                    by='tradeDate', ascending=True)
            except Exception as ef:
                PyLog.info(dfm.index.tolist())

        dfm.columns.name = None
        dfm = dfm.assign(tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        return dfm

    def updateStrategyDataTab(self):
        PyLog.info('Updating Strategy Data Tab')
        ## Strategy Data
        wsName = 'Strategy Data'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet

        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))

        ## MTD cumret -------------------------------------------------------------------------------------------
        dframe = self.dfAttrib.groupby('tradeDate').agg({'cumcontrib': sum}).reset_index()
        dframe = dframe.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dframe['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A5:B30'.format(wsName))
        output = wsheet.update('A5', dframe.values.tolist())

        ## Category Contributions -------------------------------------------------------------------------------
        dframe = self.dfCateg[self.dfCateg['tradeDate'] == self.dfCateg['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False).drop(columns='tradeDate')
        output = wsheet.spreadsheet.values_clear('{}!A34:B44'.format(wsName))
        output = wsheet.update('A34', dframe.values.tolist())

        categories = dframe['category'].tolist()
        dframe = self.dfCateg.pivot(index='tradeDate', columns='category', values='cumcontrib')[categories]. \
            reset_index()
        dframe.columns.name = None
        dframe = dframe.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dframe['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!D33:O59'.format(wsName))
        output = wsheet.update('D33', [dframe.columns.tolist()] + dframe.values.tolist())

        ## Alpha Themes ------------------------------------------------------------------------------------------
        pframe = self.dfAttrib[[x.startswith('alpha_') or y == 'specret'
                                for x, y in zip(self.dfAttrib['category'], self.dfAttrib['factor'])]]

        ## Alpha Theme Contributions
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False)

        dfm = dfm[['factor', 'cumcontrib']].fillna('')

        output = wsheet.spreadsheet.values_clear('{}!A63:B73'.format(wsName))
        output = wsheet.update('A63', dfm.values.tolist())

        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumcontrib')
        tgtRow = 62

        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))

        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Alpha Theme Exposures
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'fexp')
        tgtRow = 91
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Alpha Theme Returns
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumret')
        tgtRow = 120
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Risk Themes ------------------------------------------------------------------------------------------
        pframe = self.dfAttrib[self.dfAttrib['category'].isin(['risk_exposure'])]

        ## Risk Theme Contributions
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False)
        dfm = dfm[['factor', 'cumcontrib']].fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A150:B160'.format(wsName))
        output = wsheet.update('A150', dfm.values.tolist())

        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumcontrib')
        tgtRow = 149
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Risk Theme Exposures
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'fexp')
        tgtRow = 178
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Risk Theme Returns
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumret')
        tgtRow = 207
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Country Tilts ----------------------------------------------------------------------------------------
        pframe = self.dfAttrib[self.dfAttrib['category'].isin(['tilt_COUNTRY'])]

        ## Country Tilt Contributions
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False)
        dfm = dfm[['factor', 'cumcontrib']].fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A237:B250'.format(wsName))
        output = wsheet.update('A237', dfm.values.tolist())

        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumcontrib')
        tgtRow = 236
        output = wsheet.spreadsheet.values_clear('{}!D{}:R{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Country Tilt Exposures
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'fexp')
        tgtRow = 265
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Country Tilt Returns
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumret')
        tgtRow = 294
        output = wsheet.spreadsheet.values_clear('{}!D{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Industry Tilts ---------------------------------------------------------------------------------------
        pframe = self.dfAttrib[self.dfAttrib['category'].isin(['tilt_INDUSTRY'])]

        ## Industry Tilt Contributions
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False)
        dfm = dfm[['factor', 'cumcontrib']].fillna('')
        industries = dfm['factor'].head(5).tolist() + dfm['factor'].tail(5).tolist()
        dfm = pd.concat([dfm, pd.DataFrame({'factor': 'total', 'cumcontrib': dfm['cumcontrib'].sum()}, index=[0])],
                        ignore_index=True)
        dfm = dfm[dfm['factor'].isin(industries + ['total'])]
        output = wsheet.spreadsheet.values_clear('{}!A324:B334'.format(wsName))
        output = wsheet.update('A324', dfm.values.tolist())

        pframe = pframe[pframe['factor'].isin(industries)]

        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumcontrib')
        tgtRow = 323
        output = wsheet.spreadsheet.values_clear('{}!D{}:N{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Industry Tilt Exposures
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'fexp')
        tgtRow = 352
        output = wsheet.spreadsheet.values_clear('{}!D{}:N{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Industry Tilt Returns
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumret')
        tgtRow = 381
        output = wsheet.spreadsheet.values_clear('{}!D{}:N{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Size Tilts ---------------------------------------------------------------------------------------
        pframe = self.dfAttrib[self.dfAttrib['category'].isin(['tilt_SIZE'])]

        ## Size Tilt Contributions
        dfm = pframe[pframe['tradeDate'] == pframe['tradeDate'].max()]. \
            sort_values(by='cumcontrib', ascending=False)
        dfm = dfm[['factor', 'cumcontrib']].fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A411:B414'.format(wsName))
        output = wsheet.update('A411', dfm.values.tolist())

        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumcontrib')
        tgtRow = 410
        output = wsheet.spreadsheet.values_clear('{}!D{}:H{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Size Tilt Exposures
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'fexp')
        tgtRow = 439
        output = wsheet.spreadsheet.values_clear('{}!D{}:H{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

        ## Industry Tilt Returns
        dfm = self.compileMonthlyAttributionSubFrame(pframe, 'cumret')
        tgtRow = 468
        output = wsheet.spreadsheet.values_clear('{}!D{}:H{}'.format(wsName, tgtRow, tgtRow + 26))
        output = wsheet.update('D{}'.format(tgtRow), [dfm.columns.tolist()] + dfm.values.tolist())

    def updateCountryDataTabs(self, interval=30):

        PyLog.info('Updating Country Data Tab')

        modelName = Strategy.getModelName(self.simConfig.stratName)
        themes = FactorModel.getAlphaThemes(modelName)
        rfactors = FactorModel.getRiskThemes(modelName)

        dfAggr = self.dfCtryAtt[self.dfCtryAtt['factor'].isin(themes)]
        dfAggr = dfAggr.groupby(['country', 'tradeDate']). \
            agg({'cumcontrib': sum}).reset_index().rename(columns={'cumcontrib': 'themes'})
        dfAggr = dfAggr.merge(self.dfCtryAtt[self.dfCtryAtt['factor'] == 'specret'] \
                                  [['country', 'tradeDate', 'cumcontrib']]. \
                              rename(columns={'cumcontrib': 'specret'}), how='left', on=['country', 'tradeDate'])
        dfAggr = dfAggr.assign(spread=dfAggr['themes'] + dfAggr['specret'])

        dfm = self.dfCtryAtt[[x.split('_')[0] in ['country', 'ind', 'size'] for x in self.dfCtryAtt['factor']]].copy()
        dfm = dfm.assign(category=[x.split('_')[0] for x in dfm['factor']])
        dfm = dfm[['country', 'tradeDate', 'category', 'cumcontrib']]
        dfm = dfm.groupby(['country', 'category', 'tradeDate']).agg({'cumcontrib': sum}).reset_index()
        dfm = dfm.pivot(index=['country', 'tradeDate'], columns='category', values='cumcontrib'). \
            rename(columns={'country': 'ctry'}).reset_index()
        dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'tradeDate'])

        PyLog.info(dfm)

        try:
            dfm = self.dfCtryAtt[self.dfCtryAtt['factor'].isin(['market'] + rfactors)][
                ['country', 'factor', 'tradeDate', 'cumcontrib']]
            dfm = dfm.groupby(['country', 'tradeDate']).agg({'cumcontrib': sum}).reset_index().rename(
                columns={'cumcontrib': 'risk'})
            PyLog.info(dfm)
            PyLog.info(dfAggr)
            dfm.drop_duplicates(inplace=True, subset=['tradeDate', 'country'], keep='last')
            dfAggr.drop_duplicates(inplace=True, subset=['tradeDate', 'country'], keep='last')
            PyLog.info(dfm)
            PyLog.info(dfAggr)
            dfAggr = dfAggr.merge(dfm, how='outer', on=['country', 'tradeDate'])

        except Exception as e:
            PyLog.info("dch::Simulate.py.updateCountryDataTabs e({})".format(e))

        wsName = 'Country Aggr Data'

        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))


        def updatePanel(tgtCol, tgtRow):
            countries = dfAggr.sort_values(by='tradeDate', ascending=False). \
                drop_duplicates('country').sort_values(by=tgtCol, ascending=False)['country'].tolist()
            pfm = dfAggr[['country', 'tradeDate', tgtCol]]. \
                pivot(index='tradeDate', columns='country', values=tgtCol)[countries].reset_index()
            pfm.columns.name = None
            pfm = pfm.assign(tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in pfm['tradeDate']]).fillna('')
            output = wsheet.spreadsheet.values_clear('{}!A{}:O{}'.format(wsName, tgtRow, tgtRow + 26))
            output = wsheet.update('A{}'.format(tgtRow), [pfm.columns.tolist()] + pfm.values.tolist())

        updatePanel(tgtCol='themes', tgtRow=4)
        updatePanel(tgtCol='specret', tgtRow=33)
        updatePanel(tgtCol='spread', tgtRow=62)
        updatePanel(tgtCol='ctry', tgtRow=91)
        updatePanel(tgtCol='ind', tgtRow=120)
        updatePanel(tgtCol='size', tgtRow=149)
        updatePanel(tgtCol='risk', tgtRow=178)

        PyUtil.sleep(interval)

        ## Country Detail Data -------------------------------------------------------------------------------

        dfThemes = self.dfCtryAtt[self.dfCtryAtt['factor'].isin(themes)] \
            [['country', 'tradeDate', 'factor', 'cumcontrib']]. \
            rename(columns={'factor': 'theme'})

        wsName = 'Country Detail Data'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))

        categories = ['themes', 'specret', 'spread', 'ctry', 'ind', 'size', 'risk']
        countries = ['CN', 'XH', 'HK', 'JP', 'KR', 'TW', 'AU', 'IN']

        tgtRow = 4
        for country in countries:
            pfm = dfAggr[dfAggr['country'] == country][['country', 'tradeDate'] + categories]
            columns = pfm.sort_values(by='tradeDate').iloc[-1][categories].sort_values(ascending=False).index.tolist()
            pfm = pfm[['country', 'tradeDate'] + columns]
            pfm = pfm.assign(tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in pfm['tradeDate']]).fillna('')
            output = wsheet.spreadsheet.values_clear('{}!A{}:I{}'.format(wsName, tgtRow, tgtRow + 26))
            output = wsheet.update('A{}'.format(tgtRow), [pfm.columns.tolist()] + pfm.values.tolist())
            pfm = dfThemes[dfThemes['country'] == country].sort_values(by='tradeDate', ascending=False)
            themes = pfm.drop_duplicates('theme').sort_values(by='cumcontrib', ascending=False)['theme'].tolist()
            pfm = pfm.pivot(index='tradeDate', columns='theme', values='cumcontrib')[themes].reset_index(). \
                sort_values(by='tradeDate')
            pfm.columns.name = None
            pfm = pfm.assign(tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in pfm['tradeDate']]).fillna('')
            output = wsheet.spreadsheet.values_clear('{}!K{}:U{}'.format(wsName, tgtRow, tgtRow + 26))
            output = wsheet.update('K{}'.format(tgtRow), [pfm.columns.tolist()] + pfm.values.tolist())
            tgtRow += 28

    def updatePositionsTab(self, interval=30):

        PyLog.info('Updating Positions Tab')

        lstDates = PyDate.sequenceWeekday(self.sDate, self.eDate)
        dfPositions = list()
        for tradeDate in lstDates:
            pfm = self.simConfig.loadPositionFile(tradeDate=tradeDate, step='posttrade')
            pfm = pfm.assign(avgWeights=(pfm['bodWeights'].fillna(0.0) + pfm['eodWeights'].fillna(0.0)) / 2.0)
            pfm = pfm.assign(tradeDate=tradeDate)
            dfPositions.append(pfm[['tradeDate', 'assetKey', 'avgWeights', 'pnlHoldUSD', 'pnlFxUSD', 'totalPnLUSD']])

        dfPositions = pd.concat(dfPositions, ignore_index=True)
        dfPositions = dfPositions.assign(avgGrossExp=abs(dfPositions['avgWeights']))
        assets = dfPositions['assetKey'].unique().tolist()
        dfPositions = pd.DataFrame({
            'tradeDate': np.repeat(lstDates, len(assets)),
            'assetKey': len(lstDates) * assets}). \
            merge(dfPositions, how='left', on=['tradeDate', 'assetKey']).fillna(0.0)
        dfAggr = dfPositions.groupby('assetKey'). \
            agg({'avgWeights': np.mean, 'avgGrossExp': np.mean,
                 'pnlHoldUSD': sum, 'pnlFxUSD': sum, 'totalPnLUSD': sum}). \
            reset_index()
        mframe = SignalMgr.getStatic('model_universe_frame')
        mframe = mframe.assign(ticker=mframe['RkdTicker'].fillna(mframe['PermIDTicker']))
        mframe = mframe.assign(sector=SignalMgr.get('sector', lstDates[-1]).reindex(mframe['assetKey']).tolist())
        mframe = mframe.assign(industry=SignalMgr.get('industry', lstDates[-1]).reindex(mframe['assetKey']).tolist())
        dfAggr = dfAggr.merge(mframe[['assetKey', 'ticker', 'ModelCntry', 'sector', 'industry', 'Name']],
                              how='left', on='assetKey'). \
            rename(columns={'ModelCntry': 'modelCtry', 'Name': 'securityName'})

        wsName = 'Positions'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.month)
        output = wsheet.update('B1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))
        dfAggr = dfAggr.sort_values(by='totalPnLUSD', ascending=False)
        columns = ['assetKey', 'ticker', 'modelCtry', 'sector', 'industry', 'securityName',
                   'avgWeights', 'avgGrossExp', 'pnlHoldUSD', 'pnlFxUSD', 'totalPnLUSD']
        dfAggr = dfAggr[columns]
        output = wsheet.spreadsheet.values_clear('{}!A3:K2000'.format(wsName))
        output = wsheet.update('A3', [dfAggr.columns.tolist()] + dfAggr.values.tolist())

        PyUtil.sleep(interval)

        ## write summary to Strategy tab ----------------------------------------------------------------

        wsName = 'Strategy Summary'
        wsheet = GoogleSheets(spreadsheetKey=self.templateKey, worksheetName=wsName).wsheet

        # dfCtry = dfAggr.groupby('modelCtry').\
        #     agg({'avgWeights': sum, 'avgGrossExp': sum,
        #          'pnlHoldUSD': sum, 'pnlFxUSD': sum, 'totalPnLUSD': sum}).reset_index().\
        #     sort_values(by='totalPnLUSD', ascending=False)

        dfCtry = dfAggr.groupby('modelCtry'). \
            agg({'avgWeights': sum, 'avgGrossExp': sum, 'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        output = wsheet.spreadsheet.values_clear('{}!L5:Q19'.format(wsName))
        output = wsheet.update('L5', [dfCtry.columns.tolist()] + dfCtry.values.tolist())

        dfSctr = dfAggr.groupby('sector'). \
            agg({'avgWeights': sum, 'avgGrossExp': sum, 'totalPnLUSD': sum}).reset_index(). \
            sort_values(by='totalPnLUSD', ascending=False)
        output = wsheet.spreadsheet.values_clear('{}!S5:X19'.format(wsName))
        output = wsheet.update('S5', [dfSctr.columns.tolist()] + dfSctr.values.tolist())


class MonthlyMarketingSpreadsheet:
    spreadsheetKey = '10HO4M6y2IO2hF21a0M6rbRkffGpYyekGLIiytRhuty4'

    def __init__(self, simConfig):
        self.simConfig = simConfig
        self.updateTime = datetime.datetime.now()
        self.cachefile = os.path.join(simConfig.simDir, 'monthly_marketing_spreadsheet_cache.plz4')

    def compileCategoryExposureFrames(self):

        startTD = self.simConfig.startTD
        endTD = PyMonth.lastWeekday(PyMonth.prevMonth(PyMonth.now()))

        lstCtry = list()
        lstSctr = list()
        lstSize = list()

        if FileUtil.exists(self.cachefile):
            dct = FileUtil.load(self.cachefile)
            startTD = min([dct['dfCtry']['tradeDate'].max(),
                           dct['dfSctr']['tradeDate'].max(),
                           dct['dfSize']['tradeDate'].max()])
            startTD = PyMonth.firstWeekday(startTD)
            dfm = dct['dfCtry']
            lstCtry.append(dfm[dfm['tradeDate'] < startTD])
            dfm = dct['dfSctr']
            lstSctr.append(dfm[dfm['tradeDate'] < startTD])
            dfm = dct['dfSize']
            lstSize.append(dfm[dfm['tradeDate'] < startTD])

        lstTD = PyDate.sequenceWeekday(startTD, endTD)
        sectorMap = Industry.getMap()[['industry', 'sector']].drop_duplicates(subset='industry')
        for tradeDate in lstTD:
            dctRisk = self.simConfig.loadRiskReport(tradeDate=tradeDate)
            if dctRisk != {}:
                dfExpo = dctRisk['exposures']
                GMV = dfExpo[dfExpo['factor'] == 'market']['gross'].iloc[0]
                dfCtry = dfExpo[[x.startswith('country_') for x in dfExpo['factor']]]
                dfCtry = dfCtry[['factor', 'net', 'gross']]
                dfCtry = dfCtry.assign(country=[x.split('_')[-1] for x in dfCtry['factor']])
                dfCtry = dfCtry[['country', 'net', 'gross']]
                dfCtry = dfCtry.assign(tradeDate=tradeDate)
                dfCtry = dfCtry.assign(net=dfCtry['net'] / GMV)
                dfCtry = dfCtry.assign(gross=dfCtry['gross'] / GMV)
                dfCtry = dfCtry[['tradeDate', 'country', 'net', 'gross']]
                lstCtry.append(dfCtry)
                dfIndu = dfExpo[[x.startswith('ind_') for x in dfExpo['factor']]]
                dfIndu = dfIndu[['factor', 'net', 'gross']]
                dfIndu = dfIndu.assign(industry=[re.sub('^ind_', '', x) for x in dfIndu['factor']])
                dfIndu = dfIndu.merge(sectorMap, how='left', on='industry')
                dfSctr = dfIndu.groupby('sector').agg({'net': sum, 'gross': sum}).reset_index()
                dfSctr = dfSctr.assign(tradeDate=tradeDate)
                dfSctr = dfSctr.assign(net=dfSctr['net'] / GMV)
                dfSctr = dfSctr.assign(gross=dfSctr['gross'] / GMV)
                dfSctr = dfSctr[['tradeDate', 'sector', 'net', 'gross']]
                lstSctr.append(dfSctr)
                dfSize = dfExpo[[x.startswith('size_') for x in dfExpo['factor']]]
                dfSize = dfSize[['factor', 'net', 'gross']]
                dfSize = dfSize.assign(size=[x.split('_')[-1] for x in dfSize['factor']])
                dfSize = dfSize[['size', 'net', 'gross']]
                dfSize = dfSize.assign(tradeDate=tradeDate)
                dfSize = dfSize.assign(net=dfSize['net'] / GMV)
                dfSize = dfSize.assign(gross=dfSize['gross'] / GMV)
                dfSize = dfSize[['tradeDate', 'size', 'net', 'gross']]
                lstSize.append(dfSize)

        dfCtry = pd.concat(lstCtry, ignore_index=True)
        lstDates = dfCtry['tradeDate'].unique().tolist()
        lstCtry = dfCtry['country'].unique().tolist()
        dfCtry = pd.DataFrame({
            'tradeDate': len(lstCtry) * lstDates,
            'country': np.repeat(lstCtry, len(lstDates))}). \
            merge(dfCtry, how='left', on=['tradeDate', 'country']). \
            fillna(0.0).sort_values(by=['tradeDate', 'country'])
        dfSctr = pd.concat(lstSctr, ignore_index=True)
        lstDates = dfSctr['tradeDate'].unique().tolist()
        lstSctr = dfSctr['sector'].unique().tolist()
        dfSctr = pd.DataFrame({
            'tradeDate': len(lstSctr) * lstDates,
            'sector': np.repeat(lstSctr, len(lstDates))}). \
            merge(dfSctr, how='left', on=['tradeDate', 'sector']). \
            fillna(0.0).sort_values(by=['tradeDate', 'sector'])
        dfSize = pd.concat(lstSize, ignore_index=True)
        lstDates = dfSize['tradeDate'].unique().tolist()
        lstSize = dfSize['size'].unique().tolist()
        dfSize = pd.DataFrame({
            'tradeDate': len(lstSize) * lstDates,
            'size': np.repeat(lstSize, len(lstDates))}). \
            merge(dfSize, how='left', on=['tradeDate', 'size']). \
            fillna(0.0).sort_values(by=['tradeDate', 'size'])

        FileUtil.save({'dfCtry': dfCtry, 'dfSctr': dfSctr, 'dfSize': dfSize}, filename=self.cachefile)

        return dfCtry, dfSctr, dfSize

    def updateSpreadsheet(self, month, interval=10):

        dfCtry, dfSctr, dfSize = self.compileCategoryExposureFrames()

        dfCtry = dfCtry[dfCtry['tradeDate'] <= PyMonth.lastWeekday(month)]
        dfSctr = dfSctr[dfSctr['tradeDate'] <= PyMonth.lastWeekday(month)]
        dfSize = dfSize[dfSize['tradeDate'] <= PyMonth.lastWeekday(month)]

        ## Country Chart Data

        PyLog.info('Updating country chart data')

        wsName = 'CtryData'
        wsheet = GoogleSheets(spreadsheetKey=self.spreadsheetKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))

        group1 = ['CN', 'JP', 'HK', 'XH']

        dfm = dfCtry[dfCtry['country'].isin(group1)]
        columns = dfm[dfm['tradeDate'] == dfm['tradeDate'].max()]. \
            sort_values(by='gross', ascending=False)['country'].tolist()
        dfm = dfm.pivot(index='tradeDate', columns='country', values='gross'). \
            reset_index().sort_values(by='tradeDate', ascending=False)[['tradeDate'] + columns]
        dfm.columns.name = None
        dfm = dfm.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A4:E3000'.format(wsName))
        output = wsheet.update('A4', [dfm.columns.tolist()] + dfm.values.tolist())
        PyUtil.sleep(interval)

        group2 = ['AU', 'KR', 'TW', 'NZ']

        dfm = dfCtry[dfCtry['country'].isin(group2)]
        columns = dfm[dfm['tradeDate'] == dfm['tradeDate'].max()]. \
            sort_values(by='gross', ascending=False)['country'].tolist()
        dfm = dfm.pivot(index='tradeDate', columns='country', values='gross'). \
            reset_index().sort_values(by='tradeDate', ascending=False)[['tradeDate'] + columns]
        dfm.columns.name = None
        dfm = dfm.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!G4:K3000'.format(wsName))
        output = wsheet.update('G4', [dfm.columns.tolist()] + dfm.values.tolist())
        PyUtil.sleep(interval)

        group3 = ['IN', 'TH', 'ID', 'SG', 'MY', 'PH']

        dfm = dfCtry[dfCtry['country'].isin(group3)]
        columns = dfm[dfm['tradeDate'] == dfm['tradeDate'].max()]. \
            sort_values(by='gross', ascending=False)['country'].tolist()
        dfm = dfm.pivot(index='tradeDate', columns='country', values='gross'). \
            reset_index().sort_values(by='tradeDate', ascending=False)[['tradeDate'] + columns]
        dfm.columns.name = None
        dfm = dfm.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!M4:T3000'.format(wsName))
        output = wsheet.update('M4', [dfm.columns.tolist()] + dfm.values.tolist())
        PyUtil.sleep(interval)

        ## Sector Chart Data

        PyLog.info('Updating sector chart data')

        wsName = 'SctrData'
        wsheet = GoogleSheets(spreadsheetKey=self.spreadsheetKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))

        columns = dfSctr[dfSctr['tradeDate'] == dfSctr['tradeDate'].max()]. \
            sort_values(by='gross', ascending=False)['sector'].tolist()
        dfm = dfSctr.pivot(index='tradeDate', columns='sector', values='gross'). \
            reset_index().sort_values(by='tradeDate', ascending=False)[['tradeDate'] + columns]
        dfm.columns.name = None
        dfm = dfm.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A4:M3000'.format(wsName))
        output = wsheet.update('A4', [dfm.columns.tolist()] + dfm.values.tolist())
        PyUtil.sleep(interval)

        ## Size Chart Data

        PyLog.info('Updating size chart data')

        wsName = 'SizeData'
        wsheet = GoogleSheets(spreadsheetKey=self.spreadsheetKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))

        columns = dfSize[dfSize['tradeDate'] == dfSize['tradeDate'].max()]. \
            sort_values(by='gross', ascending=False)['size'].tolist()
        dfm = dfSize.pivot(index='tradeDate', columns='size', values='gross'). \
            reset_index().sort_values(by='tradeDate', ascending=False)[['tradeDate'] + columns]
        dfm.columns.name = None
        dfm = dfm.assign(
            tradeDate=[PyDate.span(GoogleSheets.BASEDT, x) for x in dfm['tradeDate']]).fillna('')
        output = wsheet.spreadsheet.values_clear('{}!A4:F3000'.format(wsName))
        output = wsheet.update('A4', [dfm.columns.tolist()] + dfm.values.tolist())
        PyUtil.sleep(interval)

        ## Summary Table

        PyLog.info('Updating summary tables')

        wsName = 'Summary'
        wsheet = GoogleSheets(spreadsheetKey=self.spreadsheetKey, worksheetName=wsName).wsheet
        output = wsheet.update('A1', self.updateTime.strftime('updated: %m/%d/%Y %H:%M:%S'))
        output = wsheet.update('B2', PyDate.span(GoogleSheets.BASEDT, dfCtry['tradeDate'].min()))
        output = wsheet.update('B3', PyDate.span(GoogleSheets.BASEDT, dfCtry['tradeDate'].max()))

        dfm = dfCtry.assign(meanNet=dfCtry['net'], minNet=dfCtry['net'], maxNet=dfCtry['net'],
                            meanGross=dfCtry['gross'], minGross=dfCtry['gross'], maxGross=dfCtry['gross']). \
            groupby('country'). \
            agg({'meanNet': np.mean, 'minNet': np.min, 'maxNet': np.max,
                 'meanGross': np.mean, 'minGross': np.min, 'maxGross': np.max}).reset_index(). \
            sort_values(by='meanGross', ascending=False)
        output = wsheet.spreadsheet.values_clear('{}!A7:H21'.format(wsName))
        output = wsheet.update('A7', dfm.values.tolist())
        PyUtil.sleep(interval)

        dfm = dfSctr.assign(meanNet=dfSctr['net'], minNet=dfSctr['net'], maxNet=dfSctr['net'],
                            meanGross=dfSctr['gross'], minGross=dfSctr['gross'], maxGross=dfSctr['gross']). \
            groupby('sector'). \
            agg({'meanNet': np.mean, 'minNet': np.min, 'maxNet': np.max,
                 'meanGross': np.mean, 'minGross': np.min, 'maxGross': np.max}).reset_index(). \
            sort_values(by='meanGross', ascending=False)
        output = wsheet.spreadsheet.values_clear('{}!A26:H37'.format(wsName))
        output = wsheet.update('A26', dfm.values.tolist())
        PyUtil.sleep(interval)

        dfm = dfSize.assign(meanNet=dfSize['net'], minNet=dfSize['net'], maxNet=dfSize['net'],
                            meanGross=dfSize['gross'], minGross=dfSize['gross'], maxGross=dfSize['gross']). \
            groupby('size'). \
            agg({'meanNet': np.mean, 'minNet': np.min, 'maxNet': np.max,
                 'meanGross': np.mean, 'minGross': np.min, 'maxGross': np.max}).reset_index(). \
            sort_values(by='meanGross', ascending=False)
        output = wsheet.spreadsheet.values_clear('{}!A45:H49'.format(wsName))
        output = wsheet.update('A45', dfm.values.tolist())


## ==============================================================================================================
#   Configurations:
#


USER='prod' if not 'USER' in os.environ else os.environ.get('USER')
config_path = f"/home/{USER}/gqr/model/model/trading/trading_configs/"
config_files = sorted(glob.glob(f"{config_path}/*.cfg"))
#print(f'Simulation.py:: iterate through available config files:: ({config_files})')
for fil in config_files:
#    print(f'Simulation.py: Go read config file ({fil})')
    with open(fil, 'r') as fh:
        exec(fh.read())
#
#


# A note on these "deepcopy" configs.
# You need to run setPaths().
# Soem variables are duped between Rebal and Simulation, see both.

# EGConfigBaseline : for running a close to our paper sim
EGConfigBaseline = copy.deepcopy(EGConfigPaper)
EGConfigBaseline.shortName = 'EGBaseline'
EGConfigBaseline.subdir = 'EGBaseline'
EGConfigBaseline.spreadsheetKey = "1TUof5jtP9utei4BcPBqrOuk-Q5obe_3zhDmbaXHk6Fs"
EGConfigBaseline.startTD = PyDate.asDate('20190101')
EGConfigBaseline.endTD = PyDate.prevWeekday('20240820')
EGConfigBaseline.startingNAV = 100_000_000
EGConfigBaseline.setPaths(EGConfigBaseline.subdir)
EGConfigBaseline.memo = f"EGConfigBaseline: NAV:{EGConfigBaseline.startingNAV}|{EGConfigBaseline.startTD} to {EGConfigBaseline.endTD}"

# A few spare simconfigs similar to the paper trading for running scenarios
EGConfigBaseline1 = copy.deepcopy(EGConfigPaper)
EGConfigBaseline1.shortName = 'EGB1'
EGConfigBaseline1.subdir = 'EGBaseline1'
EGConfigBaseline1.spreadsheetKey = "1YplpfThVn56-TIogGhI3c52I_nOCAD20U04b4czmKfI"
#EGConfigBaseline1.spreadsheetKey = None
EGConfigBaseline1.startTD = PyDate.asDate('20210101')
EGConfigBaseline1.endTD = PyDate.prevWeekday('20240901')
EGConfigBaseline1.startingNAV = 500_000_000
EGConfigBaseline1.trimOverborrows = True
EGConfigBaseline1.tradeRestrictions = False
EGConfigBaseline1.applyPostOptTrimming = True
EGConfigBaseline1.holdingThreshold = 0.0001
EGConfigBaseline1.rebalConfig.holdingThreshold = 0.0001
EGConfigBaseline1.gamma = 15
EGConfigBaseline1.rebalConfig.gamma = EGConfigBaseline1.gamma
# max risk var target
EGConfigBaseline1.maxRisk = 0.10
EGConfigBaseline1.rebalConfig.maxRisk = EGConfigBaseline1.maxRisk
#  max ADV day execution + position
EGConfigBaseline1.maxAdvProp = 0.15
EGConfigBaseline1.maxLiquidityBoundLong = 0.35
EGConfigBaseline1.maxLiquidityBoundShort = 0.35
EGConfigBaseline1.rebalConfig.maxAdvProp = EGConfigBaseline1.maxAdvProp
EGConfigBaseline1.rebalConfig.maxLiquidityBoundLong = EGConfigBaseline1.maxLiquidityBoundLong
EGConfigBaseline1.rebalConfig.maxLiquidityBoundShort = EGConfigBaseline1.maxLiquidityBoundShort
EGConfigBaseline1.memo = f"EGConfigBaseline1: NAV:{EGConfigBaseline1.startingNAV}|{EGConfigBaseline1.startTD} to {EGConfigBaseline1.endTD}"
EGConfigBaseline1.setPaths(EGConfigBaseline1.subdir)


EGConfigBaseline2 = copy.deepcopy(EGConfigPaper)
EGConfigBaseline2.shortName = 'EGB2'
EGConfigBaseline2.subdir = 'EGBaseline2'
EGConfigBaseline2.tradeUniverseName=None
EGConfigBaseline2.rebalConfig.tradeUniverseName=None
EGConfigBaseline2.spreadsheetKey = "1X0fGtBeHrf2SuRMsyAt2KRYNOPGt1xaL0fmOSWLLuKQ"
#EGConfigBaseline2.spreadsheetKey = None
EGConfigBaseline2.startTD = PyDate.asDate('20210101')
EGConfigBaseline2.endTD = PyDate.prevWeekday('20240901')
EGConfigBaseline2.startingNAV = 100_000_000
EGConfigBaseline2.trimOverborrow = True
EGConfigBaseline2.tradeRestrictions = False
EGConfigBaseline2.applyPostOptTrimming = True
EGConfigBaseline2.holdingThreshold =  0.0001
EGConfigBaseline2.rebalConfig.holdingThreshold = 0.0001
EGConfigBaseline2.gamma = 15
EGConfigBaseline2.rebalConfig.gamma = EGConfigBaseline2.gamma
# max risk var target
EGConfigBaseline2.maxRisk = 0.10
EGConfigBaseline2.rebalConfig.maxRisk = EGConfigBaseline2.maxRisk
#  max ADV day execution + position
EGConfigBaseline2.maxAdvProp = 0.15
EGConfigBaseline2.maxLiquidityBoundLong = 0.35
EGConfigBaseline2.maxLiquidityBoundShort = 0.35
EGConfigBaseline2.rebalConfig.maxAdvProp = EGConfigBaseline2.maxAdvProp
EGConfigBaseline2.rebalConfig.maxLiquidityBoundLong = EGConfigBaseline2.maxLiquidityBoundLong
EGConfigBaseline2.rebalConfig.maxLiquidityBoundShort = EGConfigBaseline2.maxLiquidityBoundShort
EGConfigBaseline2.memo = f"EGConfigBaseline2: NAV:{EGConfigBaseline2.startingNAV}|{EGConfigBaseline2.startTD} to {EGConfigBaseline2.endTD}"
EGConfigBaseline2.setPaths(EGConfigBaseline2.subdir)


EGConfigBaseline3 = copy.deepcopy(EGConfigProd1)
EGConfigBaseline3.shortName = 'EGB3'
EGConfigBaseline3.subdir = 'EGBaseline3'
EGConfigBaseline3.spreadsheetKey = "1HJI7KNnDbTy6tEETrpDyMloiO27PNDVJS1hjiaRQLPI"
EGConfigBaseline3.startTD = PyDate.asDate('20210101')
EGConfigBaseline3.endTD = PyDate.prevWeekday('20240901')
EGConfigBaseline3.tradeRestrictions = False
EGConfigBaseline3.memo = f"EGConfigBaseline3: NAV:{EGConfigBaseline3.startingNAV}|g:{EGConfigBaseline3.gamma}|{EGConfigBaseline3.startTD} to {EGConfigBaseline3.endTD}"
EGConfigBaseline3.setPaths(EGConfigBaseline3.subdir)


EGConfigBaseline4 = copy.deepcopy(EGConfigProd1)
EGConfigBaseline4.shortName = 'EGB4'
EGConfigBaseline4.subdir = 'EGBaseline4'
EGConfigBaseline4.spreadsheetKey = "15GRa1tt-F7uYmXaSWPGO5wGWQAquyeOkHYIzs0eRzpM"
EGConfigBaseline4.startTD = PyDate.asDate('20210101')
EGConfigBaseline4.endTD = PyDate.prevWeekday('20240901')
EGConfigBaseline4.gamma = 35
EGConfigBaseline4.rebalConfig.gamma = 35
EGConfigBaseline4.tradeRestrictions = False
EGConfigBaseline4.memo = f"EGConfigBaseline4: NAV:{EGConfigBaseline4.startingNAV}|g:{EGConfigBaseline4.gamma}|{EGConfigBaseline4.startTD} to {EGConfigBaseline4.endTD}"
EGConfigBaseline4.setPaths(EGConfigBaseline4.subdir)
