When downloding files from an FTP site using node-red-contrib-ftp-download (node) - Node-RED node The 1st time it is run I get a meaasge, the 2nd time it runs I get 2 messsges, a repeat of the 1st one and a new one for the 2nd run. Run the flow a 3rd time and I get a 3rd copy of the 1st message, a 2nd copy of the 2nd message and a new 3rd message.
It seems to cache the previous messages and regurgitate them.
See screen recording and below image.
Any ideas why this behaviour is seen
This example is using rebix test https://test.rebex.net/ but I get exactly the same behaviour from our internal server
Flow script below
[{"id":"e8a6870e46d12ef8","type":"advanced-ftp","z":"e5989d7ff278b3f6","ftp":"c1d364c1b06ff603","operation":"list","dataType":"binary","filename":"","localFilename":"","workingDir":"/pub/example","oldPath":"","newPath":"","command":"","recursive":false,"useCompression":false,"throwError":true,"showError":true,"name":"List files in Outbound","x":500,"y":900,"wires":[["2f75b21051691173"]]},{"id":"484bc807c69d332b","type":"inject","z":"e5989d7ff278b3f6","name":"Download NB PO","props":[{"p":"payload"},{"p":"tsStart","v":"","vt":"date"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"","payloadType":"date","x":180,"y":900,"wires":[["e8a6870e46d12ef8"]]},{"id":"2f75b21051691173","type":"split","z":"e5989d7ff278b3f6","name":"Split the FTP file array","splt":"\\n","spltType":"str","arraySplt":1,"arraySpltType":"len","stream":false,"addname":"","x":183,"y":1000,"wires":[["72169bb43ea58d77"]]},{"id":"72169bb43ea58d77","type":"switch","z":"e5989d7ff278b3f6","name":"Identify the files we need","property":"payload.name","propertyType":"msg","rules":[{"t":"cont","v":"KeyGen","vt":"str"},{"t":"nempty"}],"checkall":"false","repair":false,"outputs":2,"x":493,"y":1000,"wires":[["fb6544f1f05a4d78"],["5004b4ca22be0d0f"]]},{"id":"fb6544f1f05a4d78","type":"function","z":"e5989d7ff278b3f6","name":"Move filename to payload for join","func":"msg.payload = msg.payload.name;\n\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":220,"y":1080,"wires":[["f266bf2336834da7"]]},{"id":"5004b4ca22be0d0f","type":"function","z":"e5989d7ff278b3f6","name":"Assigns NA for later filtering","func":"msg.payload='NA';\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":200,"y":1140,"wires":[["f266bf2336834da7"]]},{"id":"f266bf2336834da7","type":"join","z":"e5989d7ff278b3f6","name":"","mode":"auto","build":"object","property":"payload","propertyType":"msg","key":"topic","joiner":"\\n","joinerType":"str","accumulate":true,"timeout":"","count":"","reduceRight":false,"reduceExp":"","reduceInit":"","reduceInitType":"","reduceFixup":"","x":470,"y":1100,"wires":[["3c191f23b601d3f8"]]},{"id":"3c191f23b601d3f8","type":"function","z":"e5989d7ff278b3f6","name":"Filter out the files we need","func":"var filtArr = msg.payload;\n\n//const retArr = filtArr.filter(filterFiles)\nconst tmpArr = filtArr.filter(filterFiles)\nconst retArr = tmpArr.map(filepaths);\n\n//let retObj = [];\n\n// for (let i = 0; i < retArr.length; i++) {\n// //obj[i] = arr[i];\n// let srcfile = '/outbound/archive/' + retArr[i];\n// let destfile = '/media/share/sftpSAPTest/NB-SAP/Inbound/Staging' + retArr[i];\n// let cc = {'src':srcfile, 'dest':destfile};\n// retObj.push(cc);\n// }\n\n// msg.test = retObj;\n//msg.filesToDownload = retArr;\nmsg.payload = retArr;\n\n\nreturn msg;\n\n\nconst arrayToObject = (arr) => {\n let obj = {};\n for (let i = 0; i < arr.length; i++) {\n obj[i] = arr[i];\n }\n return obj;\n};\n\n\nfunction filterFiles(fname){\n return fname != 'NA';\n}\nfunction filepaths(fname) {\n return '/pub/example/' + fname;\n}","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":190,"y":1200,"wires":[["10cd70d1b1b5364c"]]},{"id":"10cd70d1b1b5364c","type":"ftp-download","z":"e5989d7ff278b3f6","server":"a0654230a6945c41","files":"payload","directory":"/media/share/sftpSAPTest/NB-SAP/Inbound/Staging/","name":"Download the files","output":"payload","filesType":"msg","directoryType":"str","x":430,"y":1200,"wires":[["df1620b04cd1b369"]]},{"id":"df1620b04cd1b369","type":"debug","z":"e5989d7ff278b3f6","name":"debug 124","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"true","targetType":"full","statusVal":"","statusType":"auto","x":670,"y":1140,"wires":[]},{"id":"c1d364c1b06ff603","type":"advanced-ftp-config","host":"test.rebex.net","port":"","secure":false,"secureOptions":"","user":"demo","connTimeout":"","pasvTimeout":"","keepalive":"","name":""},{"id":"a0654230a6945c41","type":"ftp-download-server","host":"test.rebex.net","port":"21","user":"demo","connTimeout":"10000","pasvTimeout":"10000","keepalive":"10000","name":""}]
PS. If I disable the download node and wire the massage to the "Filter Out files we need" function node, I only ever get 1 message returned, so defo teh download node forcing teh regurgitation.