Removing Duplicates from Array

I am getting array of objects every 10 minutes as data. In the very first minute the data looks like this.

[ 
  {"centre_id":"1111", "name": "A123", "age": "18"},
  {"centre_id":"2222", "name": "B123", "age": "18"},
  {"centre_id":"3333", "name": "C123", "age": "18"},
  {"centre_id":"4444", "name": "D123", "age": "18"}
]

Second message

[ 
  {"centre_id":"1111", "name": "A123", "age": "18"},
  {"centre_id":"2222", "name": "B123", "age": "18"},
  {"centre_id":"5555", "name": "E123", "age": "18"},
  {"centre_id":"6666", "name": "F123", "age": "18"}
]

Third minute.

[ 
  {"centre_id":"7777", "name": "Y123", "age": "18"},
  {"centre_id":"2222", "name": "B123", "age": "18"},
  {"centre_id":"9999", "name": "T123", "age": "18"},
  {"centre_id":"6666", "name": "F123", "age": "18"}
]

As you can see there are elements repeating in this data. I want to delete the repeated objects with the same centre_id from not just the previous message but all the messages received in 24 hours time. For example when the second message arrives the payload after eliminating the duplicate objects would be.

[
  {"centre_id":"5555", "name": "E123", "age": "18"},
  {"centre_id":"6666", "name": "F123", "age": "18"},
]

And after the third message will be

[ 
  {"centre_id":"7777", "name": "Y123", "age": "18"},
  {"centre_id":"9999", "name": "T123", "age": "18"},
]

I hope someone can help.

Would 3 and 4 not be duplicates of the first message?

Where/how are you storing the information?

Yes you are right, it was my small mistake, I have corrected the draft

Then this example should work for you

[{"id":"c15ec8dc.e02f78","type":"inject","z":"c74669a0.6a34f8","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[    {\"centre_id\":\"1111\", \"name\": \"A123\", \"age\": \"18\"},   {\"centre_id\":\"2222\", \"name\": \"B123\", \"age\": \"18\"},   {\"centre_id\":\"3333\", \"name\": \"C123\", \"age\": \"18\"},   {\"centre_id\":\"4444\", \"name\": \"D123\", \"age\": \"18\"} ]","payloadType":"json","x":130,"y":2180,"wires":[["5a9abeff.86d76"]]},{"id":"5a9abeff.86d76","type":"change","z":"c74669a0.6a34f8","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload[$not(centre_id in $flowContext(\"center24h_id\"))]","tot":"jsonata"},{"t":"set","p":"center24h_id","pt":"flow","to":"$append(\t   $flowContext(\"center24h_id\"),\t   $$.payload.centre_id\t)\t","tot":"jsonata"}],"action":"","property":"","from":"","to":"","reg":false,"x":360,"y":2200,"wires":[["1908944a.067e3c"]]},{"id":"bf7bd7b6.0f3728","type":"inject","z":"c74669a0.6a34f8","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[    {\"centre_id\":\"1111\", \"name\": \"A123\", \"age\": \"18\"},   {\"centre_id\":\"2222\", \"name\": \"B123\", \"age\": \"18\"},   {\"centre_id\":\"5555\", \"name\": \"E123\", \"age\": \"18\"},   {\"centre_id\":\"6666\", \"name\": \"F123\", \"age\": \"18\"} ]","payloadType":"json","x":130,"y":2220,"wires":[["5a9abeff.86d76"]]},{"id":"8ea2c4b8.415c48","type":"inject","z":"c74669a0.6a34f8","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[    {\"centre_id\":\"7777\", \"name\": \"Y123\", \"age\": \"18\"},   {\"centre_id\":\"2222\", \"name\": \"B123\", \"age\": \"18\"},   {\"centre_id\":\"9999\", \"name\": \"T123\", \"age\": \"18\"},   {\"centre_id\":\"6666\", \"name\": \"F123\", \"age\": \"18\"} ]","payloadType":"json","x":130,"y":2260,"wires":[["5a9abeff.86d76"]]},{"id":"1908944a.067e3c","type":"switch","z":"c74669a0.6a34f8","name":"","property":"payload","propertyType":"msg","rules":[{"t":"nempty"}],"checkall":"true","repair":false,"outputs":1,"x":520,"y":2240,"wires":[["9ff9c53a.cb8f48"]]},{"id":"9ff9c53a.cb8f48","type":"debug","z":"c74669a0.6a34f8","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","statusVal":"","statusType":"auto","x":650,"y":2200,"wires":[]},{"id":"de57e6a9.6554f","type":"inject","z":"c74669a0.6a34f8","name":"clear every 24 hours","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"","payloadType":"date","x":180,"y":2360,"wires":[["3ad68faf.7b1778"]]},{"id":"3ad68faf.7b1778","type":"change","z":"c74669a0.6a34f8","name":"","rules":[{"t":"set","p":"center24h_id","pt":"flow","to":"[]","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":440,"y":2360,"wires":[[]]}]

You clear the context array every 24 hours.
Or if you want you could add a time value to each id, and remove when id is greater than 24 hours old.

Actually i get the data from an API and I am not storing this in this system. I just want to process the data coming in and then make some notifications.

It seems that there has been a change in the format of data coming and I would appreciate your help in sorting that. The change is as follows.
1st message.

[
  {
    "center_id":1111,
    "name":"AAAAA",
    "sessions":[
      {
        "session_id":"A111",
        "date":"19-07-2021"
      },
      {
        "session_id":"B111",
        "date":"20-07-2021"
      },
      {
        "session_id":"C111",
        "date":"21-07-2021"
      }
    ]
  },
  {
    "center_id":2222,
    "name":"BBBB",
    "sessions":[
      {
        "session_id":"A222",
        "date":"19-07-2021"
      },
      {
        "session_id":"B222",
        "date":"20-07-2021"
      }
    ]
  },
  {
    "center_id":3333,
    "name":"CCCC",
    "sessions":[
      {
        "session_id":"C111",
        "date":"19-07-2021"
      }
    ]
  }
]

In the previous case we didn't have any session ids and so we just had to eliminate the repeating message by looking at the session_id and eliminating I want to split the array into the following format.
1st output assuming that any of the session_ids is not repeating.

  {
    "center_id":1111,
    "name":"AAAAA",
    "sessions":[
      {
        "session_id":"A111",
        "date":"19-07-2021"
      }

2nd ouput

    "center_id":1111,
    "name":"AAAAA",
    "sessions":[
      {
        "session_id":"B111",
        "date":"19-07-2021"
      }

3rd output

  {
    "center_id":1111,
    "name":"AAAAA",
    "sessions":[
      {
        "session_id":"C111",
        "date":"19-07-2021"
      }

This is just the example for first object in the array this is to continue with other elements also.

Here is an example with javascript how you can split and restructure the original msg.

[{"id":"534d3c8475e49cf4","type":"inject","z":"4895ea10b4ee9ead","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[   {     \"center_id\":1111,     \"name\":\"AAAAA\",     \"sessions\":[       {         \"session_id\":\"A111\",         \"date\":\"19-07-2021\"       },       {         \"session_id\":\"B111\",         \"date\":\"20-07-2021\"       },       {         \"session_id\":\"C111\",         \"date\":\"21-07-2021\"       }     ]   },   {     \"center_id\":2222,     \"name\":\"BBBB\",     \"sessions\":[       {         \"session_id\":\"A222\",         \"date\":\"19-07-2021\"       },       {         \"session_id\":\"B222\",         \"date\":\"20-07-2021\"       }     ]   },   {     \"center_id\":3333,     \"name\":\"CCCC\",     \"sessions\":[       {         \"session_id\":\"C111\",         \"date\":\"19-07-2021\"       }     ]   } ]","payloadType":"json","x":260,"y":980,"wires":[["7a098efeb56cc28b","3c670cdc32ad3980"]]},{"id":"7a098efeb56cc28b","type":"function","z":"4895ea10b4ee9ead","name":"","func":"let result = []\n\nmsg.payload.forEach(el => {\n\n    el.sessions.forEach(session => {\n        result.push({\n            center_id: el.center_id,\n            name: el.name,\n            sessions: [\n                {\n                    session_id: session.session_id,\n                    date: session.date\n                }]\n        })\n    })\n})\n\nmsg.payload = result;\n\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":440,"y":980,"wires":[["da4005ed36f072d2"]]},{"id":"3c670cdc32ad3980","type":"debug","z":"4895ea10b4ee9ead","name":"1","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":420,"y":880,"wires":[]},{"id":"da4005ed36f072d2","type":"debug","z":"4895ea10b4ee9ead","name":"2","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":630,"y":980,"wires":[]}]

What i didnt understand fully is the conditions for removing duplicate session_ids ?
is there a chance for duplicate session_ids from your api ?
can you give as again some examples because to tell you the truth i didnt understand the rules from your first post.

1 Like

The sessions ids are unique but the data coming from the api is repeated. Instead of giving just the new data alone, the api adds the new info to the last part of the previous message. This adds on till a day or so, then only the data starts fresh. I appreciate the help

Well that makes it more difficult. If i understamd correctly try this

[{"id":"c15ec8dc.e02f78","type":"inject","z":"c74669a0.6a34f8","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[{\"center_id\":1111,\"name\":\"AAAAA\",\"sessions\":[{\"session_id\":\"A111\",\"date\":\"19-07-2021\"},{\"session_id\":\"B111\",\"date\":\"20-07-2021\"},{\"session_id\":\"C111\",\"date\":\"21-07-2021\"}]},{\"center_id\":2222,\"name\":\"BBBB\",\"sessions\":[{\"session_id\":\"A222\",\"date\":\"19-07-2021\"},{\"session_id\":\"B222\",\"date\":\"20-07-2021\"}]},{\"center_id\":3333,\"name\":\"CCCC\",\"sessions\":[{\"session_id\":\"C111\",\"date\":\"19-07-2021\"}]}]","payloadType":"json","x":130,"y":2180,"wires":[["5a9abeff.86d76"]]},{"id":"5a9abeff.86d76","type":"change","z":"c74669a0.6a34f8","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"$map(payload,function($v){\t    $merge($each($v,function($V,$K){\t        {$K:$type($V)=\"array\"? $V[$not(session_id in $flowContext(\"center24h_id\"))] : $V}\t}))})[$exists(sessions)]\t\t\t","tot":"jsonata"},{"t":"set","p":"center24h_id","pt":"flow","to":"$distinct($append(\t   $flowContext(\"center24h_id\"),\t   $$.payload[*].sessions[*].session_id\t))","tot":"jsonata"}],"action":"","property":"","from":"","to":"","reg":false,"x":360,"y":2200,"wires":[["1908944a.067e3c"]]},{"id":"bf7bd7b6.0f3728","type":"inject","z":"c74669a0.6a34f8","name":"","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[{\"center_id\":1111,\"name\":\"AAAAA\",\"sessions\":[{\"session_id\":\"A111\",\"date\":\"19-07-2021\"},{\"session_id\":\"B111\",\"date\":\"20-07-2021\"},{\"session_id\":\"D111\",\"date\":\"21-07-2021\"}]},{\"center_id\":2222,\"name\":\"BBBB\",\"sessions\":[{\"session_id\":\"A222\",\"date\":\"19-07-2021\"},{\"session_id\":\"D222\",\"date\":\"20-07-2021\"}]},{\"center_id\":3333,\"name\":\"CCCC\",\"sessions\":[{\"session_id\":\"D111\",\"date\":\"19-07-2021\"}]}]","payloadType":"json","x":130,"y":2220,"wires":[["5a9abeff.86d76"]]},{"id":"1908944a.067e3c","type":"switch","z":"c74669a0.6a34f8","name":"","property":"payload","propertyType":"msg","rules":[{"t":"nempty"}],"checkall":"true","repair":false,"outputs":1,"x":520,"y":2240,"wires":[["9ff9c53a.cb8f48"]]},{"id":"9ff9c53a.cb8f48","type":"debug","z":"c74669a0.6a34f8","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","statusVal":"","statusType":"auto","x":650,"y":2200,"wires":[]},{"id":"de57e6a9.6554f","type":"inject","z":"c74669a0.6a34f8","name":"clear every 24 hours","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"","payloadType":"date","x":180,"y":2360,"wires":[["3ad68faf.7b1778"]]},{"id":"3ad68faf.7b1778","type":"change","z":"c74669a0.6a34f8","name":"","rules":[{"t":"set","p":"center24h_id","pt":"flow","to":"[]","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":440,"y":2360,"wires":[[]]}]

The part with elimination works but it seems that the restructuring of the payload to reduce the session id to just one is not working.

lets say in the morning you make the first request to the api .. you get some data back .. each element with a unique session_id ..
later on during the same day you make another request .. you get the same morning data plus some appended data to the array.
so you want with each request to the api to check and get only the new data and also to restructure the data to a more useful format?

it would be good, if you could, to share some real data

here's my attempt .. in msg.payload you get the unique new data between requests ..
you still need to implement the logic to delete the saved sessionData from Context .. possibly with an inject node at the end of the day in order to have Context cleared for the next day.

[{"id":"534d3c8475e49cf4","type":"inject","z":"4895ea10b4ee9ead","name":"1st request","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[   {     \"center_id\":1111,     \"name\":\"AAAAA\",     \"sessions\":[       {         \"session_id\":\"A111\",         \"date\":\"19-07-2021\"       },       {         \"session_id\":\"B111\",         \"date\":\"20-07-2021\"       },       {         \"session_id\":\"C111\",         \"date\":\"21-07-2021\"       }     ]   },   {     \"center_id\":2222,     \"name\":\"BBBB\",     \"sessions\":[       {         \"session_id\":\"A222\",         \"date\":\"19-07-2021\"       },       {         \"session_id\":\"B222\",         \"date\":\"20-07-2021\"       }     ]   },   {     \"center_id\":3333,     \"name\":\"CCCC\",     \"sessions\":[       {         \"session_id\":\"C111\",         \"date\":\"19-07-2021\"       }     ]   } ]","payloadType":"json","x":290,"y":460,"wires":[["7a098efeb56cc28b","3c670cdc32ad3980"]]},{"id":"7a098efeb56cc28b","type":"function","z":"4895ea10b4ee9ead","name":"","func":"let restructuredData = []\nlet sessionData = flow.get('sessionData')\nlet uniqueData = []\n\n// restructuredData - loop through every element of payload and every el in sessions\nmsg.payload.forEach(el => {\n    el.sessions.forEach(session => {\n        restructuredData.push({\n            center_id: el.center_id,\n            name: el.name,\n            session_id: session.session_id,\n            date: session.date\n        })\n    })\n})\n\n// check if Context has any previous data saved \nif (sessionData && sessionData.length > 0) {\n\n    // make arr of only savedIds to make it easier for some()\n    let savedIds = sessionData.map(v => v.session_id)\n    // loop through and check every element in new restructuredData if its unique session_id doesnt exist \n    restructuredData.forEach(d => {\n        if (!savedIds.some(v => v == d.session_id)) {\n            uniqueData.push(d)\n        }\n    })\n    sessionData = sessionData.concat(uniqueData)\n    // node.warn(uniqueData);\n    flow.set(\"sessionData\", sessionData);\n    msg.payload = uniqueData;\n    return msg;\n\n}\n// no data in context / save first data\nelse {\n    flow.set(\"sessionData\", restructuredData); \n    msg.payload = restructuredData;\n    return msg;\n}\n\n","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":520,"y":520,"wires":[["da4005ed36f072d2"]]},{"id":"3c670cdc32ad3980","type":"debug","z":"4895ea10b4ee9ead","name":"1","active":false,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":510,"y":600,"wires":[]},{"id":"da4005ed36f072d2","type":"debug","z":"4895ea10b4ee9ead","name":"2","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":690,"y":520,"wires":[]},{"id":"2474195798573f55","type":"comment","z":"4895ea10b4ee9ead","name":"api requests","info":"","x":290,"y":380,"wires":[]},{"id":"a5e710f2510b7c79","type":"inject","z":"4895ea10b4ee9ead","name":"2nd request","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[{\"center_id\":1111,\"name\":\"AAAAA\",\"sessions\":[{\"session_id\":\"A111\",\"date\":\"19-07-2021\"},{\"session_id\":\"B111\",\"date\":\"20-07-2021\"},{\"session_id\":\"C111\",\"date\":\"21-07-2021\"}]},{\"center_id\":2222,\"name\":\"BBBB\",\"sessions\":[{\"session_id\":\"A222\",\"date\":\"19-07-2021\"},{\"session_id\":\"B222\",\"date\":\"20-07-2021\"}]},{\"center_id\":3333,\"name\":\"CCCC\",\"sessions\":[{\"session_id\":\"C111\",\"date\":\"19-07-2021\"}]},{\"center_id\":4444,\"name\":\"DDDD\",\"sessions\":[{\"session_id\":\"D111\",\"date\":\"19-07-2021\"},{\"session_id\":\"D222\",\"date\":\"19-07-2021\"}]}]","payloadType":"json","x":290,"y":560,"wires":[["7a098efeb56cc28b","3c670cdc32ad3980"]]},{"id":"0b9ca46e72b2d304","type":"inject","z":"4895ea10b4ee9ead","name":"3rd request","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[{\"center_id\":1111,\"name\":\"AAAAA\",\"sessions\":[{\"session_id\":\"A111\",\"date\":\"19-07-2021\"},{\"session_id\":\"B111\",\"date\":\"20-07-2021\"},{\"session_id\":\"C111\",\"date\":\"21-07-2021\"}]},{\"center_id\":2222,\"name\":\"BBBB\",\"sessions\":[{\"session_id\":\"A222\",\"date\":\"19-07-2021\"},{\"session_id\":\"B222\",\"date\":\"20-07-2021\"}]},{\"center_id\":3333,\"name\":\"CCCC\",\"sessions\":[{\"session_id\":\"C111\",\"date\":\"19-07-2021\"}]},{\"center_id\":4444,\"name\":\"DDDD\",\"sessions\":[{\"session_id\":\"D111\",\"date\":\"19-07-2021\"},{\"session_id\":\"D222\",\"date\":\"19-07-2021\"}]},{\"center_id\":5555,\"name\":\"EEEE\",\"sessions\":[{\"session_id\":\"E111\",\"date\":\"19-07-2021\"},{\"session_id\":\"E222\",\"date\":\"19-07-2021\"},{\"session_id\":\"E333\",\"date\":\"19-07-2021\"}]}]","payloadType":"json","x":290,"y":660,"wires":[["7a098efeb56cc28b","3c670cdc32ad3980"]]}]
1 Like

That worked, Thanks and apologies for the late response. We have been experimenting with various data patterns and now there has been a small change in the data being provided with the api. The messages looks like below.

[
    {
        "center_id": 1111,
        "name": "AAAAA",
        "fee_type": "Paid",
        "sessions": [
            {
                "session_id": "A111",
                "date": "19-07-2021",
                "medicine": "Med1"
            },
            {
                "session_id": "A112",
                "date": "19-07-2021",
                "medicine": "Med2"
            },            
            {
                "session_id": "B111",
                "date": "20-07-2021",
                "medicine": "Med2"
            },
            {
                "session_id": "C111",
                "date": "21-07-2021",
                "medicine": "Med1"
            }
        ],
        "Med_fees": [
            {
                "medicine": "Med1",
                "fee":"100"
            },
            {
                "medicine": "Med2",
                "fee":"200"
            }]
    },
    {
        "center_id": 2222,
        "name": "BBBB",
        "fee_type": "Free",        
        "sessions": [
            {
                "session_id": "A222",
                "date": "19-07-2021",
                "medicine": "Med1"
            },
            {
                "session_id": "B222",
                "date": "20-07-2021",
                "medicine": "Med1"
            }
        ]
    },
    {
        "center_id": 3333,
        "name": "CCCC",
        "fee_type": "Paid",
        "sessions": [
            {
                "session_id": "C111",
                "date": "19-07-2021",
                "medicine": "Med1"
            }],
        "Med_fees": [
            {
                "medicine": "Med1",
                "fee":"500"
            }
            ]
    }
]

Here the basic thing has not changed i,e we have to filter repeated messages using the session_id but some new elements has been introduced which should be added in the restructured data. For example each array has element "fee_type" which can be paid or free. If paid it will have one more array added as the element which shows the fees for the medicine type mentioned in the sessions. In the restructured data for each session we have to add the medicine type and corresponding price. If the fee_type is free then make the corresponding price as 0.

The final outcome for each session_id should look like below.

{
	"center_id":1111,
	"name":"AAAAA",
	"session_id":"A111",
	"date":"19-07-2021",
	"medicine":"Med1",
	"Fee_type":"Paid",
	"fee":100
}
{
	"center_id":1111,
	"name":"AAAAA",
	"session_id":"A112",
	"date":"19-07-2021",
	"medicine":"Med2",
	"Fee_type":"Paid",
	"fee":200
}

For free

{
	"center_id":2222,
	"name":"BBBB",
	"session_id":"A222",
	"date":"19-07-2021",
	"medicine":"Med2",
	"Fee_type":"Free",
	"fee":0
}

Some modification in the forEach loop steps so with each iteration to also find the appropriate fee

let restructuredData = []
let sessionData = flow.get('sessionData')
let uniqueData = []

// restructuredData - loop through every element of payload and every el in sessions
msg.payload.forEach(el => {
    el.sessions.forEach(session => {

        restructuredData.push({
            center_id: el.center_id,
            name: el.name,
            session_id: session.session_id,
            date: session.date,
            medicine: session.medicine,
            fee_type: el.fee_type,
            fee: (el.fee_type == 'Paid') ? Number(el.Med_fees.find(x => x.medicine === session.medicine).fee) : 0
        })
    })
})

// check if Context has any previous data saved 
if (sessionData && sessionData.length > 0) {

    // make arr of only savedIds to make it easier for some()
    let savedIds = sessionData.map(v => v.session_id)
    // loop through and check every element in new restructuredData if its unique session_id doesnt exist 
    restructuredData.forEach(d => {
        if (!savedIds.some(v => v == d.session_id)) {
            uniqueData.push(d)
        }
    })
    sessionData = sessionData.concat(uniqueData)
    // node.warn(uniqueData);
    flow.set("sessionData", sessionData);
    msg.payload = uniqueData;
    return msg;

}
// no data in context / save first data
else {
    flow.set("sessionData", restructuredData); 
    msg.payload = restructuredData;
    return msg;
}


1 Like

Awesome, i works...thanks for the quick response.
@E1cid Thanks bro

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.