Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PluginID] Add support for over 255 pluginIDs #5083

Open
wants to merge 27 commits into
base: mega
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
234decd
[PluginID] Add support for over 255 pluginIDs
TD-er Jul 8, 2024
b125d9a
[Build] Fix building on ESP8266
TD-er Jul 8, 2024
094594e
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Jul 10, 2024
b816de8
[ESP-IDF5.x] Update to latest SDK build (fixes for webserver timeout)
TD-er Jul 10, 2024
ddd2249
[Math] Improve intuitively matching & printing floating point values
TD-er Jul 11, 2024
5c8ee71
[Build] Reduce bin size on ESP8266 Collection D
TD-er Jul 12, 2024
b4aad5e
[Build] Reduce build size, disabling float print optimization on ESP8266
TD-er Jul 12, 2024
098485e
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Jul 13, 2024
cbe732d
[GPS] Improve GPS time stability
TD-er Jul 15, 2024
3c527cf
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Jul 15, 2024
d5142a6
[Controller Settings] Reserve bits for TLS type in controller settings
TD-er Jul 15, 2024
b4ff34e
[GPS] Improve GPS resolution
TD-er Jul 19, 2024
9550e4f
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Jul 27, 2024
f05d924
[Build] Remove CUL reader from ESP8266 collection D build due to size
TD-er Jul 27, 2024
8c84651
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Aug 22, 2024
ab0880e
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Sep 24, 2024
8668f8f
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Sep 26, 2024
f601433
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Sep 28, 2024
f14b359
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Sep 28, 2024
d42db9d
[GPIO] Fix build warnings on ESP8266 for GPIO direction
TD-er Sep 29, 2024
4a0504b
[ESPEasy p2p] Fix sending task value data on ESP8266
TD-er Sep 29, 2024
1f4e3fa
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Sep 29, 2024
3f86f3d
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Oct 2, 2024
c70724c
[Controller] Fix really strange merge issue (return statement got lost)
TD-er Oct 2, 2024
056c90c
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Oct 2, 2024
9e8330d
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Oct 3, 2024
6286391
Merge branch 'mega' into feature/Over_255_pluginIDs
TD-er Oct 7, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
218 changes: 110 additions & 108 deletions misc/CacheController/ParseCacheReaderBulkUpload.py
Original file line number Diff line number Diff line change
@@ -1,108 +1,110 @@
import csv
import ctypes
import datetime



def isNewSampleSet(data):
# Each sample on a new CSV line as is done by dump8.htm
return True

# We trigger on the event of the GPS to flush all values
# Thus consider this the start of a new row
#return data.pluginID == 82 # GPS plugin
#return data.TaskIndex == 1


def decode(bytestream):

# struct C016_binary_element
# {
# float values[VARS_PER_TASK]{};
# unsigned long _timestamp{}; // Unix timestamp
# taskIndex_t TaskIndex{INVALID_TASK_INDEX};
# pluginID_t pluginID{INVALID_PLUGIN_ID};
# Sensor_VType sensorType{Sensor_VType::SENSOR_TYPE_NONE};
# uint8_t valueCount{};
# };

class C016_binary_element(ctypes.Structure):
_fields_ = (
('val1', ctypes.c_float),
('val2', ctypes.c_float),
('val3', ctypes.c_float),
('val4', ctypes.c_float),
('timestamp', ctypes.c_ulong),
('TaskIndex', ctypes.c_uint8),
('pluginID', ctypes.c_uint8),
('sensorType', ctypes.c_uint8),
('valueCount', ctypes.c_uint8)
)

def __str__(self):
return "{}: {{{}}}".format(self.__class__.__name__,
", ".join(["{}: {}".format(field[0],
getattr(self,
field[0]))
for field in self._fields_]))

ex1 = C016_binary_element.from_buffer_copy(bytestream)
return ex1


def processFile(filename):
# Some example header, used for testing
# Should use the output sent by cachereader.sendtaskinfo command
header = "UNIX timestamp;UTC timestamp;task index;plugin ID;pms5003#cnt1.0;pms5003#cnt2.5;pms5003#cnt5.0;pms5003#cnt10;sunrise#co2;sunrise#T;sunrise#;sunrise#;bme280#Temperature;bme280#Humidity;bme280#Pressure;bme280#;bat#Analog;bat#;bat#;bat#;gps#long;gps#lat;gps#alt;gps#spd;batBackup#Analog;batBackup#;batBackup#;batBackup#;cachereader#FileNr;cachereader#FilePos;cachereader#;cachereader#;ADXL345#Pitch;ADXL345#Roll;ADXL345#Z;ADXL345#X;sysinfo#uptime;sysinfo#freeheap;sysinfo#rssi;sysinfo#load;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;bme2#Temperature;bme2#Humidity;bme2#Pressure;bme2#;pms1#pm1_0;pms1#pm2_5;pms1#pm10;pms1#cnt5_0;pms2#cnt0_3;pms2#cnt0_5;pms2#cnt1_0;pms2#cnt2_5;CO2#co2;CO2#temp;CO2#co2_prev;CO2#temp_prev;analog#carBat;analog#backupBat;analog#;analog#;gps2#hdop;gps2#satvis;gps2#sattracked;gps2#chksumfail"
csvheader = header.split(';')
nrcols = len(csvheader)
fileIn = open(filename, "r")

firstline = True

with open('decoded.csv', 'w', newline='') as fileOut:
writer = csv.writer(fileOut)
writer.writerow(csvheader)

row_out = [0] * nrcols

for line in fileIn:
samples = line.split(';')
length = len(samples)

# Skip the first 2 elements on the line
# filenr and filepos
i = 2

while i < length:
if len(samples[i]) == 48:
data = decode(bytes.fromhex(samples[i]))
if data.TaskIndex < 32: # and data.timestamp > 1674485061:
if isNewSampleSet(data):
if firstline:
firstline = False
else:
writer.writerow(row_out)

baseIndex = data.TaskIndex * 4 + 4
row_out[baseIndex + 0] = data.val1
row_out[baseIndex + 1] = data.val2
row_out[baseIndex + 2] = data.val3
row_out[baseIndex + 3] = data.val4

row_out[0] = data.timestamp
dt = datetime.datetime.fromtimestamp(data.timestamp)
row_out[1] = dt
row_out[2] = data.TaskIndex
row_out[3] = data.pluginID

print("{}: {}".format(samples[i], data))
i += 1

writer.writerow(row_out)
fileOut.close()


if __name__ == '__main__':
processFile("upload_18.csv")

import csv
import ctypes
import datetime



def isNewSampleSet(data):
# Each sample on a new CSV line as is done by dump8.htm
return True

# We trigger on the event of the GPS to flush all values
# Thus consider this the start of a new row
#return data.pluginID == 82 # GPS plugin
#return data.TaskIndex == 1


def decode(bytestream):

# struct C016_binary_element
# {
# float values[VARS_PER_TASK]{};
# unsigned long _timestamp{}; // Unix timestamp
# taskIndex_t TaskIndex{INVALID_TASK_INDEX};
# pluginID_t pluginID{INVALID_PLUGIN_ID};
# Sensor_VType sensorType{Sensor_VType::SENSOR_TYPE_NONE};
# uint8_t valueCount{};
# };

# FIXME TD-er: Must fix fetching pluginID > 255. See C016_binary_element

class C016_binary_element(ctypes.Structure):
_fields_ = (
('val1', ctypes.c_float),
('val2', ctypes.c_float),
('val3', ctypes.c_float),
('val4', ctypes.c_float),
('timestamp', ctypes.c_ulong),
('TaskIndex', ctypes.c_uint8),
('pluginID', ctypes.c_uint8),
('sensorType', ctypes.c_uint8),
('valueCount', ctypes.c_uint8)
)

def __str__(self):
return "{}: {{{}}}".format(self.__class__.__name__,
", ".join(["{}: {}".format(field[0],
getattr(self,
field[0]))
for field in self._fields_]))

ex1 = C016_binary_element.from_buffer_copy(bytestream)
return ex1


def processFile(filename):
# Some example header, used for testing
# Should use the output sent by cachereader.sendtaskinfo command
header = "UNIX timestamp;UTC timestamp;task index;plugin ID;pms5003#cnt1.0;pms5003#cnt2.5;pms5003#cnt5.0;pms5003#cnt10;sunrise#co2;sunrise#T;sunrise#;sunrise#;bme280#Temperature;bme280#Humidity;bme280#Pressure;bme280#;bat#Analog;bat#;bat#;bat#;gps#long;gps#lat;gps#alt;gps#spd;batBackup#Analog;batBackup#;batBackup#;batBackup#;cachereader#FileNr;cachereader#FilePos;cachereader#;cachereader#;ADXL345#Pitch;ADXL345#Roll;ADXL345#Z;ADXL345#X;sysinfo#uptime;sysinfo#freeheap;sysinfo#rssi;sysinfo#load;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;#;bme2#Temperature;bme2#Humidity;bme2#Pressure;bme2#;pms1#pm1_0;pms1#pm2_5;pms1#pm10;pms1#cnt5_0;pms2#cnt0_3;pms2#cnt0_5;pms2#cnt1_0;pms2#cnt2_5;CO2#co2;CO2#temp;CO2#co2_prev;CO2#temp_prev;analog#carBat;analog#backupBat;analog#;analog#;gps2#hdop;gps2#satvis;gps2#sattracked;gps2#chksumfail"
csvheader = header.split(';')
nrcols = len(csvheader)
fileIn = open(filename, "r")

firstline = True

with open('decoded.csv', 'w', newline='') as fileOut:
writer = csv.writer(fileOut)
writer.writerow(csvheader)

row_out = [0] * nrcols

for line in fileIn:
samples = line.split(';')
length = len(samples)

# Skip the first 2 elements on the line
# filenr and filepos
i = 2

while i < length:
if len(samples[i]) == 48:
data = decode(bytes.fromhex(samples[i]))
if data.TaskIndex < 32: # and data.timestamp > 1674485061:
if isNewSampleSet(data):
if firstline:
firstline = False
else:
writer.writerow(row_out)

baseIndex = data.TaskIndex * 4 + 4
row_out[baseIndex + 0] = data.val1
row_out[baseIndex + 1] = data.val2
row_out[baseIndex + 2] = data.val3
row_out[baseIndex + 3] = data.val4

row_out[0] = data.timestamp
dt = datetime.datetime.fromtimestamp(data.timestamp)
row_out[1] = dt
row_out[2] = data.TaskIndex
row_out[3] = data.pluginID

print("{}: {}".format(samples[i], data))
i += 1

writer.writerow(row_out)
fileOut.close()


if __name__ == '__main__':
processFile("upload_18.csv")

15 changes: 8 additions & 7 deletions src/_C013.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ void C013_SendUDPTaskInfo(uint8_t destUnit, uint8_t sourceTaskIndex, uint8_t des
infoReply.sourceUnit = Settings.Unit;
infoReply.sourceTaskIndex = sourceTaskIndex;
infoReply.destTaskIndex = destTaskIndex;
infoReply.deviceNumber = pluginID;
infoReply.setPluginID(pluginID);
infoReply.destUnit = destUnit;

if (destUnit == 0)
Expand All @@ -149,7 +149,7 @@ void C013_SendUDPTaskData(struct EventStruct *event, uint8_t destUnit, uint8_t d
dataReply.sourceUnit = Settings.Unit;
dataReply.sourceTaskIndex = event->TaskIndex;
dataReply.destTaskIndex = destTaskIndex;
dataReply.deviceNumber = Settings.getPluginID_for_task(event->TaskIndex);
dataReply.setPluginID(Settings.getPluginID_for_task(event->TaskIndex));

// FIXME TD-er: We should check for sensorType and pluginID on both sides.
// For example sending different sensor type data from one dummy to another is probably not going to work well
Expand Down Expand Up @@ -264,7 +264,7 @@ void C013_Receive(struct EventStruct *event) {
const pluginID_t currentPluginID = Settings.getPluginID_for_task(infoReply.destTaskIndex);
bool mustUpdateCurrentTask = false;

if (currentPluginID == infoReply.deviceNumber) {
if (currentPluginID == infoReply.getPluginID()) {
// Check to see if task already is set to receive from this host
if ((Settings.TaskDeviceDataFeed[infoReply.destTaskIndex] == infoReply.sourceUnit) &&
Settings.TaskDeviceEnabled[infoReply.destTaskIndex]) {
Expand All @@ -273,10 +273,11 @@ void C013_Receive(struct EventStruct *event) {
}

if ((mustUpdateCurrentTask || !validPluginID_fullcheck(currentPluginID)) &&
supportedPluginID(infoReply.deviceNumber))
supportedPluginID(infoReply.getPluginID()))
{
taskClear(infoReply.destTaskIndex, false);
Settings.TaskDeviceNumber[infoReply.destTaskIndex] = infoReply.deviceNumber.value;
// FIXME TD-er: Must find some extra bits to extend the pluginID beyond 255
Settings.setPluginID_for_task(infoReply.destTaskIndex, infoReply.getPluginID());
Settings.TaskDeviceDataFeed[infoReply.destTaskIndex] = infoReply.sourceUnit; // remote feed store unit nr sending the data

if (mustUpdateCurrentTask) {
Expand All @@ -285,7 +286,7 @@ void C013_Receive(struct EventStruct *event) {

constexpr pluginID_t DUMMY_PLUGIN_ID{ 33 };

if ((infoReply.deviceNumber == DUMMY_PLUGIN_ID) && (infoReply.sensorType != Sensor_VType::SENSOR_TYPE_NONE)) {
if ((infoReply.getPluginID() == DUMMY_PLUGIN_ID) && (infoReply.sensorType != Sensor_VType::SENSOR_TYPE_NONE)) {
// Received a dummy device and the sensor type is actually set
Settings.TaskDevicePluginConfig[infoReply.destTaskIndex][0] = static_cast<int16_t>(infoReply.sensorType);
}
Expand Down Expand Up @@ -372,7 +373,7 @@ void C013_Receive(struct EventStruct *event) {
if (loglevelActiveFor(LOG_LEVEL_ERROR)) {
String log = concat(F("P2P data : PluginID mismatch for task "), dataReply.destTaskIndex + 1);
log += concat(F(" from unit "), dataReply.sourceUnit);
log += concat(F(" remote: "), dataReply.deviceNumber.value);
log += concat(F(" remote: "), dataReply.getPluginID().value);
log += concat(F(" local: "), Settings.getPluginID_for_task(dataReply.destTaskIndex).value);
addLogMove(LOG_LEVEL_ERROR, log);
}
Expand Down
Loading