Tigo Energy PV combined API to InfluxDB

Obviously, that is what I said. I thought you might have a go at filling in the missing bit. Something like this should do it.

[{"id":"3da4222.005e05e","type":"inject","z":"1e323fba.79cef8","name":"Sample data","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"\"DATETIME,84177508\\n2021/05/30 07:00:00,247\\n2021/05/30 07:01:00,244\\n2021/05/30 07:02:00,251\\n2021/05/30 07:03:00,251\\n2021/05/30 07:04:00,255\\n2021/05/30 07:05:00,248\\n2021/05/30 07:06:00,263\\n2021/05/30 07:07:00,275\\n2021/05/30 07:08:00,309\\n2021/05/30 07:09:00,338\\n2021/05/30 07:10:00,309\\n2021/05/30 07:11:00,263\\n2021/05/30 07:12:00,262\\n2021/05/30 07:13:00,275\\n2021/05/30 07:14:00,325\\n2021/05/30 07:15:00,344\\n2021/05/30 07:16:00,340\\n2021/05/30 07:17:00,322\\n2021/05/30 07:18:00,320\\n2021/05/30 07:19:00,352\\n2021/05/30 07:20:00,348\\n2021/05/30 07:21:00,297\\n2021/05/30 07:22:00,270\\n2021/05/30 07:23:00,242\\n2021/05/30 07:24:00,252\\n2021/05/30 07:25:00,252\\n2021/05/30 07:26:00,261\\n2021/05/30 07:27:00,261\\n2021/05/30 07:28:00,255\\n2021/05/30 07:29:00,243\\n2021/05/30 07:30:00,243\\n2021/05/30 07:31:00,245\\n2021/05/30 07:32:00,239\\n2021/05/30 07:33:00,234\\n2021/05/30 07:34:00,231\\n2021/05/30 07:35:00,231\\n2021/05/30 07:36:00,227\\n2021/05/30 07:37:00,224\\n\"","payloadType":"json","x":170,"y":660,"wires":[["33196695.e91212"]]},{"id":"33196695.e91212","type":"function","z":"1e323fba.79cef8","name":"","func":"let measurement = \"the_measurement\"  // set this to your measurement\n// convert to array by splitting on newline\nlet values = msg.payload.split(\"\\n\")\n// discard the first one which is not required\nvalues.shift()\n// map the array elements to the format required by influx batch node\nvalues = values.map(function(value) {\n    let splits = value.split(\",\")\n    //node.send({payload: value})\n    return {\n        measurement: measurement, \n        fields: { value: Number(splits[1]) },\n        timestamp: Date.parse(splits[0])\n    }\n})\n\nmsg.payload = values\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":340,"y":660,"wires":[["9a082a91.e21ec"]]},{"id":"9a082a91.e21ec","type":"debug","z":"1e323fba.79cef8","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","statusVal":"","statusType":"auto","x":530,"y":660,"wires":[]}]

Almost there. I am getting error when try to send to Influx.

TypeError: val.slice is not a function

Is that coming from the influx node?

Are you using the influx batch node?

If so then send the message that is going to the influx node to a debug node and show us what it shows.

Is that coming from the influx node?

Yes, it was but I was using influx out.

Are you using the influx batch node?

I was using influxdb out but corrected this now and new error:

Error: A 400 Bad Request error occurred: {"error":"unable to parse 'PV Production value=309 1622872800000': invalid field format\nunable to parse 'PV Production value=308 1622872860000': invalid field format\nunable to parse 'PV Production value=308 1622872920000': invalid field format\nunable to parse 'PV Production value=303 1622872980000': invalid field format\nunable to parse 'PV Production value=297 1622873040000': invalid field format\nunable to parse 'PV Production value=287 1622873100000': invalid field format\nunable to parse 'PV Production value=279 1622873160000': invalid field format\nunable to parse 'PV Production value=281 1622873220000': invalid field format\nunable to parse 'PV Production value=281 1622873280000': invalid field format\nunable to parse 'PV Production value=279 1622873340000': invalid field format\nunable to parse 'PV Production value=274 1622873400000': invalid field format\nunable to parse 'PV Production value=274 1622873460000': invalid field format\nunable to parse 'PV Production value=273 1622873520000': invalid field format\nunable to parse 'PV Production value=273 1622873580000': invalid field format\nunable to parse 'PV Production value=281 1622873640000': invalid field format\nunable to parse 'PV Production value=288 1622873700000': invalid field format\nunable to parse 'PV Production value=296 1622873760000': invalid field format\nunable to parse 'PV Production value=296 1622873820000': invalid field format\nunable to parse 'PV Production value=295 1622873880000': invalid field format\nunable to parse 'PV Production value=296 1622873940000': invalid field format\nunable to parse 'PV Production value=283 1622874000000': invalid field format\nunable to parse 'PV Production value=271 1622874060000': invalid field format\nunable to parse 'PV Production value=266 1622874120000': invalid field format\nunable to parse 'PV Production value=240 1622874180000': invalid field format\nunable to parse 'PV Production value=217 1622874240000': invalid field format\nunable to parse 'PV Production value=205 1622874300000': invalid field format\nunable to parse 'PV Production value=217 1622874360000': invalid field format\nunable to parse 'PV Production value=217 1622874420000': invalid field format\nunable to parse 'PV Production value=218 1622874480000': invalid field format\nunable to parse 'PV Production value=217 1622874540000': invalid field format\nunable to parse 'PV Production value=219 1622874600000': invalid field format\nunable to parse 'PV Production value=229 1622874660000': invalid field format\nunable to parse 'PV Production value=251 1622874720000': invalid field format\nunable to parse 'PV Production value=262 1622874780000': invalid field format\nunable to parse 'PV Production value=255 1622874840000': invalid field format\nunable to parse 'PV Production value=259 1622874900000': invalid field format\nunable to parse 'PV Production value=285 1622874960000': invalid field format\nunable to parse 'PV Production value=296 1622875020000': invalid field format\nunable to parse 'PV Production value=307 1622875080000': invalid field format\nunable to parse 'PV Production value=330 1622875140000': invalid field format\nunable to parse 'PV Production value=365 1622875200000': invalid field format\nunable to parse 'PV Production value=399 1622875260000': invalid field format\nunable to parse 'PV Production value=433 1622875320000': invalid field format\nunable to parse 'PV Production value=457 1622875380000': invalid field format\nunable to parse 'PV Production value=470 1622875440000': invalid field format\nunable to parse 'PV Production value=503 1622875500000': invalid field format\nunable to parse 'PV Production value=545 1622875560000': invalid field format\nunable to parse 'PV Production value=573 1622875620000': invalid field format\nunable to parse 'PV Production value=620 1622875680000': invalid field format\nunable to parse 'PV Production value=599 1622875740000': invalid field format\nunable to parse 'PV Production value=595 1622875800000': invalid field format\nunable to parse 'PV Production value=626 1622875860000': invalid field format\nunable to parse 'PV Production value=638 1622875920000': invalid field format\nunable to parse 'PV Production value=630 1622875980000': invalid field format\nunable to parse 'PV Production value=619 1622876040000': invalid field format\nunable to parse 'PV Production value=597 1622876100000': invalid field format\nunable to parse 'PV Production value=567 1622876160000': invalid field format\nunable to parse 'PV Production value=533 1622876220000': invalid field format\nunable to parse 'PV Production value=489 1622876280000': invalid field format\nunable to parse 'PV Production value=468 1622876340000': invalid field format\nunable to parse 'PV Production value=464 1622876400000': invalid field format\nunable to parse 'PV Production value=NaN NaN': invalid field format"}


If so then send the message that is going to the influx node to a debug node and show us what it shows.

{"_msgid":"eeb2607e.7f935","topic":"pv_pin","statusCode":200,"headers":{"date":"Sun, 06 Jun 2021 14:04:41 GMT","server":"Apache","access-control-allow-origin":"*","access-control-allow-headers":"Origin, X-Requested-With, Content-Type, Accept","pragma":"public","expires":"0","cache-control":"must-revalidate, post-check=0, pre-check=0","x-frame-options":"sameorigin","content-length":"1482","connection":"close","content-type":"text/csv","x-node-red-request-node":"43f6a7a4"},"responseUrl":"https://api2.tigoenergy.com/api/v3/data/combined?system_id=52705&agg=mi&start=2021-06-5T07:00:00&end=2021-06-5T08:00:00","payload":[{"measurement":"PV Production","fields":{"value":309},"timestamp":1622872800000},{"measurement":"PV Production","fields":{"value":308},"timestamp":1622872860000},{"measurement":"PV Production","fields":{"value":308},"timestamp":1622872920000},{"measurement":"PV Production","fields":{"value":303},"timestamp":1622872980000},{"measurement":"PV Production","fields":{"value":297},"timestamp":1622873040000},{"measurement":"PV Production","fields":{"value":287},"timestamp":1622873100000},{"measurement":"PV Production","fields":{"value":279},"timestamp":1622873160000},{"measurement":"PV Production","fields":{"value":281},"timestamp":1622873220000},{"measurement":"PV Production","fields":{"value":281},"timestamp":1622873280000},{"measurement":"PV Production","fields":{"value":279},"timestamp":1622873340000},{"measurement":"PV Production","fields":{"value":274},"timestamp":1622873400000},{"measurement":"PV Production","fields":{"value":274},"timestamp":1622873460000},{"measurement":"PV Production","fields":{"value":273},"timestamp":1622873520000},{"measurement":"PV Production","fields":{"value":273},"timestamp":1622873580000},{"measurement":"PV Production","fields":{"value":281},"timestamp":1622873640000},{"measurement":"PV Production","fields":{"value":288},"timestamp":1622873700000},{"measurement":"PV Production","fields":{"value":296},"timestamp":1622873760000},{"measurement":"PV Production","fields":{"value":296},"timestamp":1622873820000},{"measurement":"PV Production","fields":{"value":295},"timestamp":1622873880000},{"measurement":"PV Production","fields":{"value":296},"timestamp":1622873940000},{"measurement":"PV Production","fields":{"value":283},"timestamp":1622874000000},{"measurement":"PV Production","fields":{"value":271},"timestamp":1622874060000},{"measurement":"PV Production","fields":{"value":266},"timestamp":1622874120000},{"measurement":"PV Production","fields":{"value":240},"timestamp":1622874180000},{"measurement":"PV Production","fields":{"value":217},"timestamp":1622874240000},{"measurement":"PV Production","fields":{"value":205},"timestamp":1622874300000},{"measurement":"PV Production","fields":{"value":217},"timestamp":1622874360000},{"measurement":"PV Production","fields":{"value":217},"timestamp":1622874420000},{"measurement":"PV Production","fields":{"value":218},"timestamp":1622874480000},{"measurement":"PV Production","fields":{"value":217},"timestamp":1622874540000},{"measurement":"PV Production","fields":{"value":219},"timestamp":1622874600000},{"measurement":"PV Production","fields":{"value":229},"timestamp":1622874660000},{"measurement":"PV Production","fields":{"value":251},"timestamp":1622874720000},{"measurement":"PV Production","fields":{"value":262},"timestamp":1622874780000},{"measurement":"PV Production","fields":{"value":255},"timestamp":1622874840000},{"measurement":"PV Production","fields":{"value":259},"timestamp":1622874900000},{"measurement":"PV Production","fields":{"value":285},"timestamp":1622874960000},{"measurement":"PV Production","fields":{"value":296},"timestamp":1622875020000},{"measurement":"PV Production","fields":{"value":307},"timestamp":1622875080000},{"measurement":"PV Production","fields":{"value":330},"timestamp":1622875140000},{"measurement":"PV Production","fields":{"value":365},"timestamp":1622875200000},{"measurement":"PV Production","fields":{"value":399},"timestamp":1622875260000},{"measurement":"PV Production","fields":{"value":433},"timestamp":1622875320000},{"measurement":"PV Production","fields":{"value":457},"timestamp":1622875380000},{"measurement":"PV Production","fields":{"value":470},"timestamp":1622875440000},{"measurement":"PV Production","fields":{"value":503},"timestamp":1622875500000},{"measurement":"PV Production","fields":{"value":545},"timestamp":1622875560000},{"measurement":"PV Production","fields":{"value":573},"timestamp":1622875620000},{"measurement":"PV Production","fields":{"value":620},"timestamp":1622875680000},{"measurement":"PV Production","fields":{"value":599},"timestamp":1622875740000},{"measurement":"PV Production","fields":{"value":595},"timestamp":1622875800000},{"measurement":"PV Production","fields":{"value":626},"timestamp":1622875860000},{"measurement":"PV Production","fields":{"value":638},"timestamp":1622875920000},{"measurement":"PV Production","fields":{"value":630},"timestamp":1622875980000},{"measurement":"PV Production","fields":{"value":619},"timestamp":1622876040000},{"measurement":"PV Production","fields":{"value":597},"timestamp":1622876100000},{"measurement":"PV Production","fields":{"value":567},"timestamp":1622876160000},{"measurement":"PV Production","fields":{"value":533},"timestamp":1622876220000},{"measurement":"PV Production","fields":{"value":489},"timestamp":1622876280000},{"measurement":"PV Production","fields":{"value":468},"timestamp":1622876340000},{"measurement":"PV Production","fields":{"value":464},"timestamp":1622876400000},{"measurement":"PV Production","fields":{"value":null},"timestamp":null}],"redirectList":[]}

Try it without a space in the measurement name.

Also if you look at the last record in the array you will see that it is empty, so presumably there is an extra newline of the end of the buffer. Change the bit in the function where it discards the first row so that it reads

// discard the first one which is not required
values.shift()
// and the last which is empty
values.pop()

my function:

let measurement = "PV Production"  // set this to your measurement
// convert to array by splitting on newline
let values = msg.payload.split("\n")
// discard the first one which is not required
values.shift()
// and the last which is empty
values.pop()
// map the array elements to the format required by influx batch node
values = values.map(function(value) {
    let splits = value.split(",")
    //node.send({payload: value})
    return {
        measurement: measurement, 
        fields: { value: Number(splits[1]) },
        timestamp: Date.parse(splits[0])
    }
})

msg.payload = values
return msg;

still getting error:


Error: A 400 Bad Request error occurred: {"error":"unable to parse 'PV Production value=309 1622872800000': invalid field format\nunable to parse 'PV Production value=308 1622872860000': invalid field format\nunable to parse 'PV Production value=308 1622872920000': invalid field format\nunable to parse 'PV Production value=303 1622872980000': invalid field format\nunable to parse 'PV Production value=297 1622873040000': invalid field format\nunable to parse 'PV Production value=287 1622873100000': invalid field format\nunable to parse 'PV Production value=279 1622873160000': invalid field format\nunable to parse 'PV Production value=281 1622873220000': invalid field format\nunable to parse 'PV Production value=281 1622873280000': invalid field format\nunable to parse 'PV Production value=279 1622873340000': invalid field format\nunable to parse 'PV Production value=274 1622873400000': invalid field format\nunable to parse 'PV Production value=274 1622873460000': invalid field format\nunable to parse 'PV Production value=273 1622873520000': invalid field format\nunable to parse 'PV Production value=273 1622873580000': invalid field format\nunable to parse 'PV Production value=281 1622873640000': invalid field format\nunable to parse 'PV Production value=288 1622873700000': invalid field format\nunable to parse 'PV Production value=296 1622873760000': invalid field format\nunable to parse 'PV Production value=296 1622873820000': invalid field format\nunable to parse 'PV Production value=295 1622873880000': invalid field format\nunable to parse 'PV Production value=296 1622873940000': invalid field format\nunable to parse 'PV Production value=283 1622874000000': invalid field format\nunable to parse 'PV Production value=271 1622874060000': invalid field format\nunable to parse 'PV Production value=266 1622874120000': invalid field format\nunable to parse 'PV Production value=240 1622874180000': invalid field format\nunable to parse 'PV Production value=217 1622874240000': invalid field format\nunable to parse 'PV Production value=205 1622874300000': invalid field format\nunable to parse 'PV Production value=217 1622874360000': invalid field format\nunable to parse 'PV Production value=217 1622874420000': invalid field format\nunable to parse 'PV Production value=218 1622874480000': invalid field format\nunable to parse 'PV Production value=217 1622874540000': invalid field format\nunable to parse 'PV Production value=219 1622874600000': invalid field format\nunable to parse 'PV Production value=229 1622874660000': invalid field format\nunable to parse 'PV Production value=251 1622874720000': invalid field format\nunable to parse 'PV Production value=262 1622874780000': invalid field format\nunable to parse 'PV Production value=255 1622874840000': invalid field format\nunable to parse 'PV Production value=259 1622874900000': invalid field format\nunable to parse 'PV Production value=285 1622874960000': invalid field format\nunable to parse 'PV Production value=296 1622875020000': invalid field format\nunable to parse 'PV Production value=307 1622875080000': invalid field format\nunable to parse 'PV Production value=330 1622875140000': invalid field format\nunable to parse 'PV Production value=365 1622875200000': invalid field format\nunable to parse 'PV Production value=399 1622875260000': invalid field format\nunable to parse 'PV Production value=433 1622875320000': invalid field format\nunable to parse 'PV Production value=457 1622875380000': invalid field format\nunable to parse 'PV Production value=470 1622875440000': invalid field format\nunable to parse 'PV Production value=503 1622875500000': invalid field format\nunable to parse 'PV Production value=545 1622875560000': invalid field format\nunable to parse 'PV Production value=573 1622875620000': invalid field format\nunable to parse 'PV Production value=620 1622875680000': invalid field format\nunable to parse 'PV Production value=599 1622875740000': invalid field format\nunable to parse 'PV Production value=595 1622875800000': invalid field format\nunable to parse 'PV Production value=626 1622875860000': invalid field format\nunable to parse 'PV Production value=638 1622875920000': invalid field format\nunable to parse 'PV Production value=630 1622875980000': invalid field format\nunable to parse 'PV Production value=619 1622876040000': invalid field format\nunable to parse 'PV Production value=597 1622876100000': invalid field format\nunable to parse 'PV Production value=567 1622876160000': invalid field format\nunable to parse 'PV Production value=533 1622876220000': invalid field format\nunable to parse 'PV Production value=489 1622876280000': invalid field format\nunable to parse 'PV Production value=468 1622876340000': invalid field format\nunable to parse 'PV Production value=464 1622876400000': invalid field format"}

As I said,

No more errors but can not see values in InfluxDB

SELECT mean("value") FROM "PV_Production" WHERE $timeFilter GROUP BY time($__interval) fill(null)

but no data

Did you do that, which I pointed out earlier?

[Edit] If it isn't that then use the influx command line tool to have a look and see what is in the measurement.

@Colin you are a master. Works perfectly except time difference which I can live with for now.

One more think to make it absolutely automatic.
As you can see below the get query I do contain start and end date and time.

https://api2.tigoenergy.com/api/v3/data/combined?system_id=xxxxx&agg=mi&start=2021-06-6T04:00:00&end=2021-06-6T22:00:00

is there a way I can use some node to make it alway today’s date?

What start and end times do you want?

Can be 00:00 - 23:59 but anything between 04:00 and 22:00 will do.

Clear the url in the http request node and pass in the url via msg.url. Add a function node between the inject and the request nodes containing

// get todays date as YYYY-MM-DDT by extracting first 11 characters of ISO format current date/time
let day = new Date().toISOString().substr(0,11)
// build the url
msg.url = `https://api...&start=${day}00:00:00&end=${day}23:59:59`
return msg;

Fill in the dots in the url with the required fixed text, obviously.

This works perfectly Thank you.

One more think. This first function you wrote for me

let measurement = "Panels"  // set this to your measurement
// convert to array by splitting on newline
let values = msg.payload.split("\n")
// discard the first one which is not required
values.shift()
// and the last which is empty
values.pop()
// map the array elements to the format required by influx batch node
values = values.map(function(value) {
    let splits = value.split(",")
    //node.send({payload: value})
    return {
        measurement: measurement, 
        fields: { value: Number(splits[1])},
        timestamp: Date.parse(splits[0])
    }
})

msg.payload = values
return msg;

helps me to retrieve combined data that contain only two columns timestamp and value. There is also API to retrieve per panel data but respond contain more columns and looks like that:

{"_msgid":"f12132a1.f466","ulr":"","statusCode":200,"headers":{"date":"Mon, 07 Jun 2021 16:53:54 GMT","server":"Apache","access-control-allow-origin":"*","access-control-allow-headers":"Origin, X-Requested-With, Content-Type, Accept","pragma":"public","expires":"0","cache-control":"must-revalidate, post-check=0, pre-check=0","x-frame-options":"sameorigin","content-length":"2061","connection":"close","content-type":"text/csv","x-node-red-request-node":"28857214"},"responseUrl":"https://api2.tigoenergy.com/api/v3/data/aggregate?system_id=52705&start=2021-06-2T11:00:00&end=2021-06-2T16:00:00&level=min&param=Pin","payload":"DATETIME,84209502,84209503,84209504,84209505,84209506,84209507,84209508,84209509,84209510,84209511,84209512,84209513\n2021/06/02 11:00:00.000,195,196,197,195,195,199,198,113,115,197,197,197\n2021/06/02 11:01:00.000,197,198,200,197,197,201,200,114,116,199,199,200\n2021/06/02 11:02:00.000,197,198,199,197,197,201,200,114,116,199,199,200\n2021/06/02 11:03:00.000,198,200,201,198,198,202,201,115,116,201,201,201\n2021/06/02 11:04:00.000,198,199,201,198,198,202,201,115,116,200,200,201\n2021/06/02 11:05:00.000,199,200,202,199,199,203,202,115,117,201,201,202\n2021/06/02 11:06:00.000,194,195,197,194,194,198,197,113,114,196,197,197\n2021/06/02 11:07:00.000,200,202,204,201,201,206,204,116,118,203,204,204\n2021/06/02 11:08:00.000,203,204,206,203,203,207,206,118,119,205,205,206\n2021/06/02 11:09:00.000,205,206,208,205,205,209,208,119,120,207,207,208\n2021/06/02 11:10:00.000,207,208,210,207,207,211,210,119,121,209,209,210\n2021/06/02 11:11:00.000,207,208,209,207,206,211,210,119,120,209,209,209\n2021/06/02 11:12:00...","redirectList":[]}

As you can see there is 13 columns divided by comma. Could you modify above function to retrieve value from all columns?

I am sure you could take a go at it yourself. The code inside the map function converts each line in the data into an object to be sent to influx. As you can probably work out ,splits[0] contains the timestamp and splits[1] contains the value after the timestamp. If there are multiple values after the timestamp they will be in splits[1], splits[2] and so on. So if you want them to be written to influx as, for example, value_a, value_b etc then replace the fields line with something like
fields: { value_a: Number(splits[1]), value_b: Number(splits[2]), value_c: Number(splits[3]), .... },
Replace value_a, value_b etc with whatever field names you want.

@Colin I did try it before and now with your advice. I am only getting value in split 1 and NaN in others splits

let measurement = "Panels"  // set this to your measurement
// convert to array by splitting on newline
let values = msg.payload.split("\n")
// discard the first one which is not required
values.shift()
// and the last which is empty
values.pop()
// map the array elements to the format required by influx batch node
values = values.map(function(value) {
    let splits = value.split(",")
    //node.send({payload: value})
    return {
        measurement: measurement, 
        fields: { Panel1: Number(splits[1]), Panel2: Number(splits[2]), Panel3: Number(splits[3]), Panel4: Number(splits[4]), Panel5: Number(splits[5]), Panel6: Number(splits[6]), Panel7: Number(splits[7]), Panel8: Number(splits[8]), Panel9: Number(splits[9]), Panel10: Number(splits[10]), Panel11: Number(splits[11]), Panel12: Number(splits[12])},
        timestamp: Date.parse(splits[0])
    }
})

msg.payload = values
return msg;


Above is my function. Am I missing anything?

Uncomment that line (remove the //), and send the output of the function node to a debug node. Disconnect the influx node. That will send each row as a message. Make sure they look ok.

@Colin please ignore my last respond. I had API pointing to wrong measurements. All works as expected. You are a legend. I will get this all wrapped up and publish as solution.

1 Like

So Thanks to @Colin we have Integration with Tigo Energy and InfluxDB.

Below flow pulls Daily Total, Power summary 1min interval, Power per panel 1min interval, Voltage per panel 1min interval.

[
    {
        "id": "d0455530.3d0f68",
        "type": "tab",
        "label": "TIGO",
        "disabled": false,
        "info": ""
    },
    {
        "id": "9979f3db.00052",
        "type": "http request",
        "z": "d0455530.3d0f68",
        "name": "Tigo - GET combined every minute",
        "method": "GET",
        "ret": "txt",
        "paytoqs": "body",
        "url": "https://api2.tigoenergy.com/api/v3/data/combined?system_id=52705&agg=mi",
        "tls": "",
        "persist": false,
        "proxy": "",
        "authType": "bearer",
        "x": 260,
        "y": 140,
        "wires": [
            [
                "92e981db.9e502"
            ]
        ]
    },
    {
        "id": "fa203d66.3f5e2",
        "type": "debug",
        "z": "d0455530.3d0f68",
        "name": "",
        "active": true,
        "tosidebar": true,
        "console": false,
        "tostatus": false,
        "complete": "true",
        "targetType": "full",
        "statusVal": "",
        "statusType": "auto",
        "x": 570,
        "y": 300,
        "wires": []
    },
    {
        "id": "a58a338e.020f1",
        "type": "inject",
        "z": "d0455530.3d0f68",
        "name": "Tigo combined 1min - GET every 5min",
        "props": [
            {
                "p": "topic",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "*/5 4-21 * * *",
        "once": false,
        "onceDelay": 0.1,
        "topic": "pv_pin",
        "payloadType": "str",
        "x": 240,
        "y": 80,
        "wires": [
            [
                "9979f3db.00052"
            ]
        ]
    },
    {
        "id": "dafb110a.c7652",
        "type": "influxdb batch",
        "z": "d0455530.3d0f68",
        "influxdb": "8c468c1f.45eef",
        "precision": "ms",
        "retentionPolicy": "",
        "name": "",
        "database": "database",
        "precisionV18FluxV20": "ms",
        "retentionPolicyV18Flux": "",
        "org": "organisation",
        "bucket": "bucket",
        "x": 1530,
        "y": 320,
        "wires": []
    },
    {
        "id": "92e981db.9e502",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Converting to influxdb format",
        "func": "let measurement = \"PV_Production\"  // set this to your measurement\n// convert to array by splitting on newline\nlet values = msg.payload.split(\"\\n\")\n// discard the first one which is not required\nvalues.shift()\n// and the last which is empty\nvalues.pop()\n// map the array elements to the format required by influx batch node\nvalues = values.map(function(value) {\n    let splits = value.split(\",\")\n    //node.send({payload: value})\n    return {\n        measurement: measurement, \n        fields: { value: Number(splits[1]) },\n        timestamp: Date.parse(splits[0])\n    }\n})\n\nmsg.payload = values\nreturn msg;\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 280,
        "y": 200,
        "wires": [
            [
                "fa203d66.3f5e2",
                "dafb110a.c7652"
            ]
        ]
    },
    {
        "id": "c255d714.cc71f8",
        "type": "http request",
        "z": "d0455530.3d0f68",
        "name": "Tigo - GET combined Day Total",
        "method": "GET",
        "ret": "txt",
        "paytoqs": "body",
        "url": "https://api2.tigoenergy.com/api/v3/data/combined?system_id=xxxxx&agg=d",
        "tls": "",
        "persist": false,
        "proxy": "",
        "authType": "bearer",
        "x": 270,
        "y": 320,
        "wires": [
            [
                "ceb9933c.3d21a",
                "fa203d66.3f5e2"
            ]
        ]
    },
    {
        "id": "ceb9933c.3d21a",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Converting to influxdb format",
        "func": "let measurement = \"PV_Day_Total\"  // set this to your measurement\n// convert to array by splitting on newline\nlet values = msg.payload.split(\"\\n\")\n// discard the first one which is not required\nvalues.shift()\n// and the last which is empty\nvalues.pop()\n// map the array elements to the format required by influx batch node\nvalues = values.map(function(value) {\n    let splits = value.split(\",\")\n    //node.send({payload: value})\n    return {\n        measurement: measurement, \n        fields: { value: Number(splits[1]) },\n        timestamp: Date.now()\n    }\n})\n\nmsg.payload = values\nreturn msg;\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 280,
        "y": 380,
        "wires": [
            [
                "dafb110a.c7652",
                "fa203d66.3f5e2"
            ]
        ]
    },
    {
        "id": "9211e53e.693cb8",
        "type": "inject",
        "z": "d0455530.3d0f68",
        "name": "Tigo Day Total - GET every 1min",
        "props": [
            {
                "p": "topic",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "*/1 4-21 * * *",
        "once": false,
        "onceDelay": 0.1,
        "topic": "day_total",
        "payloadType": "str",
        "x": 260,
        "y": 260,
        "wires": [
            [
                "c255d714.cc71f8"
            ]
        ]
    },
    {
        "id": "7a36b24d.c39dbc",
        "type": "http request",
        "z": "d0455530.3d0f68",
        "name": "API",
        "method": "GET",
        "ret": "txt",
        "paytoqs": "body",
        "url": "",
        "tls": "",
        "persist": false,
        "proxy": "",
        "authType": "bearer",
        "x": 830,
        "y": 440,
        "wires": [
            [
                "64c4d4c4.7cc16c"
            ]
        ]
    },
    {
        "id": "7f02a72e.cc05f8",
        "type": "inject",
        "z": "d0455530.3d0f68",
        "name": "Tigo per panel POWER 1min - GET every 5min",
        "props": [
            {
                "p": "ulr",
                "v": "",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "*/5 4-21 * * *",
        "once": false,
        "onceDelay": 0.1,
        "topic": "",
        "payloadType": "str",
        "x": 240,
        "y": 440,
        "wires": [
            [
                "2e3c58b3.166778"
            ]
        ]
    },
    {
        "id": "64c4d4c4.7cc16c",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Converting all columns to influxDB",
        "func": "let measurement = \"Panels\"  // set this to your measurement\n// convert to array by splitting on newline\nlet values = msg.payload.split(\"\\n\")\n// discard the first one which is not required\nvalues.shift()\n// and the last which is empty\nvalues.pop()\n// map the array elements to the format required by influx batch node\nvalues = values.map(function(value) {\n    let splits = value.split(\",\")\n    //node.send({payload: value})\n    return {\n        measurement: measurement, \n        fields: { Panel1: Number(splits[1]), Panel2: Number(splits[2]), Panel3: Number(splits[3]), Panel4: Number(splits[4]), Panel5: Number(splits[5]), Panel6: Number(splits[6]), Panel7: Number(splits[7]), Panel8: Number(splits[8]), Panel9: Number(splits[9]), Panel10: Number(splits[10]), Panel11: Number(splits[11]), Panel12: Number(splits[12])},\n        timestamp: Date.parse(splits[0])\n    }\n})\n\nmsg.payload = values\nreturn msg;\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 260,
        "y": 500,
        "wires": [
            [
                "dafb110a.c7652",
                "fa203d66.3f5e2"
            ]
        ]
    },
    {
        "id": "2e3c58b3.166778",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Unject date NOW to the URL",
        "func": "// get todays date as YYYY-MM-DDT by extracting first 11 characters of ISO format current date/time\nlet day = new Date().toISOString().substr(0,11)\n// build the url\nmsg.url = `https://api2.tigoenergy.com/api/v3/data/aggregate?system_id=xxxxx&agg=mi&start=${day}00:00:00&end=${day}23:59:59&level=min&param=Pin`\nreturn msg;\n\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 600,
        "y": 440,
        "wires": [
            [
                "7a36b24d.c39dbc"
            ]
        ]
    },
    {
        "id": "bc6659c.e2452a8",
        "type": "http request",
        "z": "d0455530.3d0f68",
        "name": "API",
        "method": "GET",
        "ret": "txt",
        "paytoqs": "body",
        "url": "",
        "tls": "",
        "persist": false,
        "proxy": "",
        "authType": "bearer",
        "x": 850,
        "y": 580,
        "wires": [
            [
                "9171d37e.db874"
            ]
        ]
    },
    {
        "id": "2581dd43.28fa72",
        "type": "inject",
        "z": "d0455530.3d0f68",
        "name": "Tigo per panel VOLTAGE 1min - GET every 5min",
        "props": [
            {
                "p": "ulr",
                "v": "",
                "vt": "str"
            }
        ],
        "repeat": "",
        "crontab": "*/5 4-21 * * *",
        "once": false,
        "onceDelay": 0.1,
        "topic": "",
        "payloadType": "str",
        "x": 250,
        "y": 580,
        "wires": [
            [
                "e963b4bb.3e5df8"
            ]
        ]
    },
    {
        "id": "9171d37e.db874",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Converting all columns to influxDB",
        "func": "let measurement = \"Panels_Voltage\"  // set this to your measurement\n// convert to array by splitting on newline\nlet values = msg.payload.split(\"\\n\")\n// discard the first one which is not required\nvalues.shift()\n// and the last which is empty\nvalues.pop()\n// map the array elements to the format required by influx batch node\nvalues = values.map(function(value) {\n    let splits = value.split(\",\")\n    //node.send({payload: value})\n    return {\n        measurement: measurement, \n        fields: { Panel1v: Number(splits[1]), Panel2v: Number(splits[2]), Panel3v: Number(splits[3]), Panel4v: Number(splits[4]), Panel5v: Number(splits[5]), Panel6v: Number(splits[6]), Panel7v: Number(splits[7]), Panel8v: Number(splits[8]), Panel9v: Number(splits[9]), Panel10v: Number(splits[10]), Panel11v: Number(splits[11]), Panel12v: Number(splits[12])},\n        timestamp: Date.parse(splits[0])\n    }\n})\n\nmsg.payload = values\nreturn msg;\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 280,
        "y": 640,
        "wires": [
            [
                "dafb110a.c7652",
                "fa203d66.3f5e2"
            ]
        ]
    },
    {
        "id": "e963b4bb.3e5df8",
        "type": "function",
        "z": "d0455530.3d0f68",
        "name": "Unject date NOW to the URL",
        "func": "// get todays date as YYYY-MM-DDT by extracting first 11 characters of ISO format current date/time\nlet day = new Date().toISOString().substr(0,11)\n// build the url\nmsg.url = `https://api2.tigoenergy.com/api/v3/data/aggregate?system_id=xxxxx&agg=mi&start=${day}00:00:00&end=${day}23:59:59&level=min&param=Vin`\nreturn msg;\n\n\n",
        "outputs": 1,
        "noerr": 0,
        "initialize": "",
        "finalize": "",
        "libs": [],
        "x": 620,
        "y": 580,
        "wires": [
            [
                "bc6659c.e2452a8"
            ]
        ]
    },
    {
        "id": "8c468c1f.45eef",
        "type": "influxdb",
        "hostname": "192.168.10.22",
        "port": "8086",
        "protocol": "http",
        "database": "hassio",
        "name": "IngluxDB-HassioDB",
        "usetls": false,
        "tls": "",
        "influxdbVersion": "1.x",
        "url": "http://192.168.10.22:8086",
        "rejectUnauthorized": true
    }
]

In the flow above you have to specify your system ID in each node with URL and your auth token.

To get token run below command

curl -v -u "{username}:{password}" "https://api2.tigoenergy.com/api/v3/users/login"

In the answer you will receive your auth token.

1 Like