Skip to content

Commit

Permalink
Cleaning up datatypes
Browse files Browse the repository at this point in the history
  • Loading branch information
codepitbull committed Feb 10, 2025
1 parent d579086 commit 1f19d65
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 44 deletions.
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
package com.hivemq.edge.adapters.s7;

import com.github.xingshuangs.iot.protocol.s7.enums.EPlcType;
import com.github.xingshuangs.iot.protocol.s7.service.MultiAddressRead;
import com.github.xingshuangs.iot.protocol.s7.service.S7PLC;
import com.github.xingshuangs.iot.protocol.s7.utils.AddressUtil;
import com.hivemq.adapter.sdk.api.data.DataPoint;
import com.hivemq.adapter.sdk.api.factories.DataPointFactory;
import com.hivemq.edge.adapters.s7.config.S7AdapterConfig;
Expand All @@ -17,11 +15,6 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1200;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1500;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_300;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_400;

public class S7Client {

private static final Logger log = LoggerFactory.getLogger(S7Client.class);
Expand All @@ -40,29 +33,29 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
log.trace("Reading data from addresses {} with type {}", addresses, type);
}
switch (type) {
case BOOL: return combine(dataPointFactory, addresses, s7PLC.readBoolean(addresses));
case BOOL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readBoolean(addresses));
case BYTE: return addresses.stream().map(address -> dataPointFactory
.create(address, s7PLC.readByte(address)))
.collect(Collectors.toList());
case WORD: return combine(dataPointFactory, addresses, s7PLC.readInt16(addresses));
case DWORD: return combine(dataPointFactory, addresses, s7PLC.readInt32(addresses));
case LWORD: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
case WORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 2));
case DWORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 4));
case LWORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 8));
case USINT: return addresses.stream().map(address -> dataPointFactory
.create(address, Byte.toUnsignedInt(s7PLC.readByte(address))))
.collect(Collectors.toList());
case UINT: return combine(dataPointFactory, addresses, s7PLC.readUInt16(addresses));
case UDINT: return combine(dataPointFactory, addresses, s7PLC.readUInt32(addresses));
case UINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readUInt16(addresses));
case UDINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readUInt32(addresses));
case ULINT: return addresses.stream()
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
.collect(Collectors.toList());
case SINT: return addresses.stream().map(address -> dataPointFactory
.create(address, ((Byte)s7PLC.readByte(address)).shortValue()))
.collect(Collectors.toList());
case INT: return combine(dataPointFactory, addresses, s7PLC.readInt16(addresses));
case DINT: return combine(dataPointFactory, addresses, s7PLC.readInt32(addresses));
case LINT: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
case REAL: return combine(dataPointFactory, addresses, s7PLC.readFloat32(addresses));
case LREAL: return combine(dataPointFactory, addresses, s7PLC.readFloat64(addresses));
case INT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt16(addresses));
case DINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt32(addresses));
case LINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt64(addresses));
case REAL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readFloat32(addresses));
case LREAL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readFloat64(addresses));
case CHAR: return addresses.stream().map(address -> dataPointFactory
.create(address, s7PLC.readByte(address)))
.collect(Collectors.toList());
Expand All @@ -74,11 +67,11 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
})
.collect(Collectors.toList());
case STRING:
case WSTRING: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readString).collect(Collectors.toList()));
case TIME: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTime).collect(Collectors.toList()));
case LTIME: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
case DATE: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDate).collect(Collectors.toList()));
case TOD: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTimeOfDay).collect(Collectors.toList()));
case WSTRING: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readString).collect(Collectors.toList()));
case TIME: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTime).collect(Collectors.toList()));
case LTIME: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt64(addresses));
case DATE: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDate).collect(Collectors.toList()));
case TOD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTimeOfDay).collect(Collectors.toList()));
case LTOD: return addresses.stream()
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
.collect(Collectors.toList());
Expand All @@ -88,16 +81,19 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
case LDT:return addresses.stream()
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
.collect(Collectors.toList());
case DTL: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDTL).collect(Collectors.toList()));
case ARRAY: throw new IllegalArgumentException("Arrays not supported");
case DTL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDTL).collect(Collectors.toList()));
default: {
log.error("Unspported tag-type {} at address {}", type, addresses);
throw new IllegalArgumentException("Unspported tag-type " + type + " at address " + addresses);
}
}
}

public static List<DataPoint> combine(final @NotNull DataPointFactory dataPointFactory, final @NotNull List<String> addresses, final @NotNull List<?> values) {

public List<byte[]> readBytes(final List<String> addresses, final int count) {
return addresses.stream().map(address -> s7PLC.readByte(address, count)).collect(Collectors.toList());
}

public static List<DataPoint> createDatapointsFromAddressesAndValues(final @NotNull DataPointFactory dataPointFactory, final @NotNull List<String> addresses, final @NotNull List<?> values) {
return IntStream
.range(0, addresses.size())
.mapToObj(i -> dataPointFactory.create(addresses.get(i), values.get(i)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,10 @@
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.List;

import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1200;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1500;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_200;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_300;
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_400;

Expand Down Expand Up @@ -66,6 +64,7 @@ public enum S7DataType {
LDT(LocalDateTime.class, 64, List.of(S7_1500), "Date and time (year-month-day-hour:minute:second:nanoseconds)", "https://support.industry.siemens.com/cs/mdm/109054417?c=71834521483&lc=en-GE"),
DTL(LocalDateTime.class, 64, List.of(S7_1500), "Date and time (year-month-day-hour:minute:second:nanoseconds)", "https://support.industry.siemens.com/cs/mdm/109054417?c=64682916235&lc=en-GE"),
ARRAY(Byte[].class, -1, List.of(S7_300, S7_400, S7_1200, S7_1500), "Array of type", "https://support.industry.siemens.com/cs/mdm/109054417?c=52352205963&lc=en-GE");

//RAW_BYTE_ARRAY TODO: it's not an actual type but is there in the old implementation

S7DataType(final @NotNull Class<?> javaType, final @NotNull int lengthInBits, final @NotNull List<S7Versions> availableOn, final @NotNull String description, final @NotNull String docs){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Optional;

Expand Down Expand Up @@ -140,21 +141,9 @@ public void unconvertConfigObject_full_valid() {
}

private @NotNull HiveMQConfigEntity loadConfig(final @NotNull File configFile) {
final ConfigFileReaderWriter readerWriter = new ConfigFileReaderWriter(new ConfigurationFile(configFile),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock());
final ConfigFileReaderWriter readerWriter = new ConfigFileReaderWriter(
new ConfigurationFile(configFile),
List.of());
return readerWriter.applyConfig();
}

Expand Down

0 comments on commit 1f19d65

Please sign in to comment.