Hello! I am trying to aggregate trading transaction history in tabulator table.
-
I make an API call to get transaction record(s); there is a limit of 100 records per call (within any specified timeframe). Hence I need to create a local data set that continuously aggregates new transactions which are sent to the table.
-
I got as far as the function below: grab certain amount of of records every minute (enough records assures sufficient overlap so nothing is missed) and use unique "trade_id" index ((tempObj.trade_id = obj.id) to update/append the set. The approach is working (with a few glitches) as long as the data is getting saved in memory - when I try to switch saving to local file, the data set no longer gets appended, the entire set just gets replaced (i.e. always capped at 100).
-
I want to switch to using MySQL instead file, both to overcome problem and because I expect the data to grow indefinity, so seems like DB is better suited. Unfortunately my MySQL skills are non-existent; I created the db and connected to it with node-red (that's about it). Hoping someone can help with this step. Thanks!
var check = flow.get('recordKeeper', 'file');
var keyTemp = flow.get('recordKeys', 'file');
// var check = context.get('recordKeeper');
// var keyTemp = context.get('recordKeys');
const input = msg.payload.filter(s => s.status.type == "stop_loss_finished" || s.status.type == "finished" || s.status.type == "panic_sold");
var tempObj ={};
var tempArr = [];
for(let obj of input){
if(!(check.hasOwnProperty(obj.id))){
tempObj = {};
tempObj.pair = (String(obj.pair)).replace("USDT_", "");
tempObj.trade_id = obj.id;
tempObj.profit_usd = Number(obj.profit.usd).toFixed(2);
tempObj.profit_percent = obj.profit.percent;
tempObj.position = Number(obj.position.total.value).toFixed(0);
tempObj.volume = obj.data.current_price.quote_volume;
tempObj.close_price = obj.data.average_close_price;
tempObj.closed = obj.data.closed_at;
duration = new Date(obj.data.closed_at) - new Date(obj.data.created_at)
var milliseconds = Math.floor((duration % 1000) / 100),
seconds = Math.floor((duration / 1000) % 60),
minutes = Math.floor((duration / (1000 * 60)) % 60),
hours = Math.floor((duration / (1000 * 60 * 60)) % 24);
hours = (hours < 10) ? "0" + hours : hours;
minutes = (minutes < 10) ? "0" + minutes : minutes;
seconds = (seconds < 10) ? "0" + seconds : seconds;
tempObj.dur = hours + ":" + minutes;
if (obj.status.type === "stop_loss_finished"){tempObj.status = "SL";}
else if (obj.status.type === "finished"){tempObj.status = "TP";}
else if (obj.status.type === "panic_sold"){tempObj.status = "EX";}
else {tempObj.status = obj.status.type}
if (obj.stop_loss.enabled) {
tempObj.to_sl = ((1 - obj.stop_loss.conditional.price.value / obj.data.average_enter_price) * 100).toFixed(2);
}
if (obj.status.type == "stop_loss_finished") {
tempObj.S_L = (((obj.data.average_enter_price - obj.stop_loss.conditional.price.value) / obj.data.average_enter_price) * 100).toFixed(2);
}
if (obj.status.type == "finished") {
tempObj.T_P = (((obj.take_profit.steps[0].price.value - obj.data.average_enter_price) / obj.data.average_enter_price) * 100).toFixed(2);
}
tempArr = obj.note.split("_");
tempObj.frames = tempArr[0];
tempObj.gain = tempArr[1];
tempObj.gain_st = tempArr[2];
tempObj.freq = tempArr[3];
tempObj.analysis = tempArr[4];
tempObj.tp_a = tempArr[5];
tempObj.tp_trl_a = tempArr[6];
tempObj.sl_a = tempArr[7];
tempObj.sl_trl_a = tempArr[8];
tempObj.tm_a = tempArr[9];
tempObj.close = tempArr[10];
tempObj.crypt = tempArr[11];
tempObj.exch = tempArr[12];
tempObj.v = tempArr[13];
check[obj.id] = tempObj;
keyTemp.push(obj.id);
}
}
flow.set('recordKeeper', check, 'file');
flow.set('recordKeys', keyTemp, 'file');
// context.set('recordKeeper', check);
// context.set('recordKeys', keyTemp);
msg = {};
msg.payload = [];
for(let k of keyTemp){
msg.payload.push(check[k]);
}
return msg;