diff --git a/code/common/backtest.q b/code/common/backtest.q new file mode 100644 index 000000000..833113bb2 --- /dev/null +++ b/code/common/backtest.q @@ -0,0 +1,52 @@ +\d .backtest + +/ Params to be passed to .backtest.run to kick off backtest, edit to fit usecase +test:`name`version`tabs`sts`ets`replayinterval`timer`timerinterval`timerfunc!(`;1;`;0Np;0Np;0Nn;0b;0Nn;`); +initRan:0b; + +/ TO BE DELETED, TESTING ONLY +test:`name`version`tabs`sts`ets`replayinterval`timer`timerinterval`timerfunc!(`vwappublisher;1;`trade;2026.01.22D00:00:00.00;2026.01.22D01:00:00.00;0Nn;1b;0D00:10:00.00;`.vwapsub.logvwap); + +init:{[] + requiredProcs:`backtestdb`backtestpub; + .servers.registerfromdiscovery[requiredProcs;1b]; + .backtest.rdbh:neg first exec w from .servers.SERVERS where procname=`backtestdb; + .backtest.pubh:neg first exec w from .servers.SERVERS where procname=`backtestpub; + `.u.pub set .backtest.pub; + .backtest.initRan:1b; + }; + +/ Receive full message from datareplay, extract details from msg before running msg func +extractmessage:{[msgs] + .dbg.msg:msgs; + msg:msgs`msg; + .backtest.simtime:msgs`time; + .backtest.name:first msg; + value msg + }; + +pub:{[t;d] + .dbg.pub:(t;d); + rdbh(`upd;`output;(.z.p;id;simtime;name;d)); + }; + +/ To run backtest, optional where +run:{[params] + params:validaterun[params]; + / Random guid generated to match config to output + .backtest.id:first -1?0Ng; + / Kick off backtest from backtestpub which will replay the data back through the process running backtest + pubh(`.backtest.datareplay;params;.backtest.id); + }; + +validaterun:{[params] + if[.proc.procname=`backtestpub;'"Backtest should be ran from the process you are backtesting not backtest instance itself"]; + if[not initRan;'"Please run .backtest.init to override functions to backtest before running .backtest.run";]; + if[not all (key[test]except `where) in key params;'"Please ensure all mandatory params have been populated";]; + if[count where null `replayinterval _params;'"Not all mandatory keys have been populated"]; + / Remove optional where, when not required + if[`where in key params; if[not count params`where;params:`where _params]]; + params + }; + +\d . diff --git a/code/common/datareplay.q b/code/common/datareplay.q index 0a67eb3e8..8b736b355 100644 --- a/code/common/datareplay.q +++ b/code/common/datareplay.q @@ -6,18 +6,19 @@ getBuckets:{[s;e;p](s+p*til(ceiling 1+e%p)-(ceiling s%p))} // params[`t] is table data // params[`tc] is time column to cut on // params[`tn] is table name -// params[`interval] is the time interval to bucket the messages into. +// params[`replayinterval] is the data time interval to bucket the messages into. tableDataToDataStream:{[params] + .dbg.params:params; // Sort table by time column. params[`t]:params[`tc] xasc delete date from params[`t]; // get all times from table t_times:params[`t][params[`tc]]; - $[not null params[`interval]; + $[not null params[`replayinterval]; [ // if there is an interval, bucket messages into this interval - // make bukets of ten second intervals - times:getBuckets[params[`sts];params[`ets];params[`interval]]; + // make buckets of ten second intervals + times:getBuckets[params[`sts];params[`ets];params[`replayinterval]]; // put start time in fornt of t_times t_times:params[`sts],t_times; @@ -38,7 +39,7 @@ tableDataToDataStream:{[params] // Return table of times and message chunks -1_([]time:time;msg:{(`upd;x;y)}[params[`tn]] each msgs) ]; - // if there is no intevral, cut by distinct time. + // if there is no interval, cut by distinct time. ([] time:distinct t_times; msg:{(`upd;x;$[1