diff --git a/src/hub/controllers/PointsController_Config.ts b/src/hub/controllers/PointsController_Config.ts index 607b3fc4..087d6398 100644 --- a/src/hub/controllers/PointsController_Config.ts +++ b/src/hub/controllers/PointsController_Config.ts @@ -14,7 +14,12 @@ const PointsController_Config: SourceListConfig = { showInTypeName: true, color: "#000000", darkColor: "#ffffff", - sourceTypes: ["Translation2d", "Translation2d[]", "NumberArray"], + sourceTypes: [ + "Translation2d", + "Translation2d[]", + "NumberArray", + "TargetCorner:16f6ac0dedc8eaccb951f4895d9e18b6[]" + ], showDocs: true, options: [ { diff --git a/src/hub/dataSources/nt4/NT4Source.ts b/src/hub/dataSources/nt4/NT4Source.ts index e3ca50a8..f3efc6bf 100644 --- a/src/hub/dataSources/nt4/NT4Source.ts +++ b/src/hub/dataSources/nt4/NT4Source.ts @@ -1,5 +1,5 @@ import Log from "../../../shared/log/Log"; -import { PROTO_PREFIX, STRUCT_PREFIX, getEnabledKey, getURCLKeys } from "../../../shared/log/LogUtil"; +import { PHOTON_PREFIX, PROTO_PREFIX, STRUCT_PREFIX, getEnabledKey, getURCLKeys } from "../../../shared/log/LogUtil"; import LoggableType from "../../../shared/log/LoggableType"; import ProtoDecoder from "../../../shared/log/ProtoDecoder"; import { checkArrayType } from "../../../shared/util"; @@ -196,6 +196,8 @@ export default class NT4Source extends LiveDataSource { } } else if (topic.type.startsWith(PROTO_PREFIX)) { structuredType = ProtoDecoder.getFriendlySchemaType(topic.type.split(PROTO_PREFIX)[1]); + } else if (topic.type.startsWith(PHOTON_PREFIX)) { + structuredType = topic.type.split(PHOTON_PREFIX)[1]; } else if (topic.type === "msgpack") { structuredType = "MessagePack"; } else if (topic.type === "json") { @@ -295,6 +297,9 @@ export default class NT4Source extends LiveDataSource { } else { this.log?.putStruct(key, timestamp, value, schemaType, false); } + } else if (topic.type.startsWith(PHOTON_PREFIX)) { + let schemaType = topic.type.split(PHOTON_PREFIX)[1]; + this.log?.putPhotonStruct(key, timestamp, value, schemaType); } else if (topic.type.startsWith(PROTO_PREFIX)) { let schemaType = topic.type.split(PROTO_PREFIX)[1]; this.log?.putProto(key, timestamp, value, schemaType); diff --git a/src/hub/dataSources/schema/CustomSchemas.ts b/src/hub/dataSources/schema/CustomSchemas.ts index b2a70a78..30f8f62d 100644 --- a/src/hub/dataSources/schema/CustomSchemas.ts +++ b/src/hub/dataSources/schema/CustomSchemas.ts @@ -1,5 +1,4 @@ import Log from "../../../shared/log/Log"; -import PhotonSchema from "./PhotonSchema"; import URCLSchema from "./URCLSchema"; import URCLSchemaLegacy from "./URCLSchemaLegacy"; @@ -7,7 +6,6 @@ import URCLSchemaLegacy from "./URCLSchemaLegacy"; const CustomSchemas: Map void> = new Map(); export default CustomSchemas; -CustomSchemas.set("rawBytes", PhotonSchema); // PhotonVision 2023.1.2 CustomSchemas.set("URCL", URCLSchemaLegacy.parseURCLr1); CustomSchemas.set("URCLr2_periodic", URCLSchemaLegacy.parseURCLr2); CustomSchemas.set("URCLr3_periodic", URCLSchema.parseURCLr3); diff --git a/src/hub/dataSources/schema/PhotonSchema.ts b/src/hub/dataSources/schema/PhotonSchema.ts deleted file mode 100644 index 33f3a9df..00000000 --- a/src/hub/dataSources/schema/PhotonSchema.ts +++ /dev/null @@ -1,145 +0,0 @@ -import Log from "../../../shared/log/Log"; - -export default function process(log: Log, key: string, timestamp: number, value: Uint8Array) { - let result = parsePacket(value, timestamp); - saveResult(log, key, timestamp, result); -} - -/** Parses raw data to create a pipeline result. */ -function parsePacket(value: Uint8Array, timestamp: number): PhotonPipelineResult { - let view = new DataView(value.buffer, value.byteOffset, value.byteLength); - let offset = 0; - - let result = new PhotonPipelineResult(); - - result.latency = view.getFloat64(offset); - result.timestamp = timestamp - result.latency; - offset += 8; - - const numTargets = view.getInt8(offset); - offset += 1; - - result.targets = []; - for (let i = 0; i < numTargets; i++) { - let target = new PhotonTrackedTarget(); - target.yaw = view.getFloat64(offset); - offset += 8; - target.pitch = view.getFloat64(offset); - offset += 8; - target.area = view.getFloat64(offset); - offset += 8; - target.skew = view.getFloat64(offset); - offset += 8; - target.fiducialId = view.getInt32(offset); - offset += 4; - - target.bestCameraToTarget = parseTransform3d(view, offset); - offset += 7 * 8; - target.altCameraToTarget = parseTransform3d(view, offset); - offset += 7 * 8; - target.poseAmbiguity = view.getFloat64(offset); - offset += 8; - - target.minAreaRectCorners = []; - for (let j = 0; j < 4; j++) { - let x = view.getFloat64(offset); - offset += 8; - let y = view.getFloat64(offset); - offset += 8; - target.minAreaRectCorners.push({ x: x, y: y }); - } - - target.detectedCorners = []; - const numCorners = view.getInt8(offset); - offset += 1; - for (let j = 0; j < numCorners; j++) { - let x = view.getFloat64(offset); - offset += 8; - let y = view.getFloat64(offset); - offset += 8; - target.detectedCorners.push({ x: x, y: y }); - } - - result.targets.push(target); - } - - return result; -} - -/** Saves a pipeline result to a log file. */ -function saveResult(log: Log, baseKey: string, timestamp: number, result: PhotonPipelineResult) { - log.putNumber(baseKey + "/latency", timestamp, result.latency); - log.putNumber(baseKey + "/timestamp", timestamp, result.timestamp); - - // Loop over every target in the entry - for (const [idx, target] of result.targets.entries()) { - // Loop over every member of the target class - Object.entries(target).forEach(([objectFieldName, objectFieldValue]) => { - // If it's a number, we can log directly - if (typeof objectFieldValue === "number") { - log.putNumber(baseKey + `/target_${idx}/${objectFieldName}`, timestamp, Number(objectFieldValue)); - } - - // If it's an array, it's either a number array or an array of TargetCorner classes - if (Array.isArray(objectFieldValue)) { - // First entry is a number -- log as array - if (typeof objectFieldValue[0] === "number") { - log.putNumberArray(baseKey + `/target_${idx}/${objectFieldName}`, timestamp, objectFieldValue); - } else if (typeof objectFieldValue[0] === "object") { - // we can only ever have TargetCorners, so this parsing code works (for now) - let xArray: number[] = []; - let yArray: number[] = []; - objectFieldValue.forEach((it) => { - xArray.push(it.x); - yArray.push(it.y); - }); - log.putNumberArray(baseKey + `/target_${idx}/${objectFieldName}_x`, timestamp, xArray); - log.putNumberArray(baseKey + `/target_${idx}/${objectFieldName}_y`, timestamp, yArray); - } - } - }); - } -} - -function parseTransform3d(view: DataView, offset: number): number[] { - let tx = view.getFloat64(offset); - offset += 8; - let ty = view.getFloat64(offset); - offset += 8; - let tz = view.getFloat64(offset); - offset += 8; - let qw = view.getFloat64(offset); - offset += 8; - let qx = view.getFloat64(offset); - offset += 8; - let qy = view.getFloat64(offset); - offset += 8; - let qz = view.getFloat64(offset); - offset += 8; - - return [tx, ty, tz, qw, qx, qy, qz]; -} - -class PhotonTargetCorner { - x: number = 0; - y: number = 0; -} - -class PhotonTrackedTarget { - yaw: number = 0; - pitch: number = 0; - area: number = 0; - skew: number = 0; - fiducialId: number = 0; - bestCameraToTarget: number[] = []; - altCameraToTarget: number[] = []; - poseAmbiguity: number = 0; - minAreaRectCorners: PhotonTargetCorner[] = []; - detectedCorners: PhotonTargetCorner[] = []; -} - -class PhotonPipelineResult { - latency: number = 0; - timestamp: number = 0; - targets: PhotonTrackedTarget[] = []; -} diff --git a/src/shared/geometry.ts b/src/shared/geometry.ts index 1ff57500..e8931fbd 100644 --- a/src/shared/geometry.ts +++ b/src/shared/geometry.ts @@ -184,6 +184,8 @@ export function grabPosesAuto( return grabRotation2dArray(log, key, timestamp, uuid); case "Rotation3d[]": return grabRotation3dArray(log, key, timestamp, uuid); + case "TargetCorner:16f6ac0dedc8eaccb951f4895d9e18b6[]": + return grabTargetCornerArray(log, key, timestamp, uuid); case "Translation2d": return grabTranslation2d(log, key, timestamp, uuid); case "Translation3d": @@ -354,6 +356,28 @@ export function grabRotation3dArray(log: Log, key: string, timestamp: number, uu ); } +export function grabTargetCornerArray(log: Log, key: string, timestamp: number, uuid?: string): AnnotatedPose3d[] { + return indexArray(getOrDefault(log, key + "/length", LoggableType.Number, timestamp, 0, uuid)).reduce( + (array, index) => array.concat(grabTargetCorner(log, key + "/" + index.toString(), timestamp)), + [] as AnnotatedPose3d[] + ); +} + +export function grabTargetCorner(log: Log, key: string, timestamp: number, uuid?: string): AnnotatedPose3d[] { + return [ + { + pose: { + translation: translation2dTo3d([ + getOrDefault(log, key + "/x", LoggableType.Number, timestamp, 0, uuid), + getOrDefault(log, key + "/y", LoggableType.Number, timestamp, 0, uuid) + ]), + rotation: Rotation3dZero + }, + annotation: { is2DSource: true } + } + ]; +} + export function grabTranslation2d(log: Log, key: string, timestamp: number, uuid?: string): AnnotatedPose3d[] { return [ { diff --git a/src/shared/log/Log.ts b/src/shared/log/Log.ts index a0c8d79b..567e2655 100644 --- a/src/shared/log/Log.ts +++ b/src/shared/log/Log.ts @@ -3,7 +3,7 @@ import { Pose2d, Translation2d } from "../geometry"; import { arraysEqual, checkArrayType } from "../util"; import LogField from "./LogField"; import LogFieldTree from "./LogFieldTree"; -import { STRUCT_PREFIX, TYPE_KEY, applyKeyPrefix, getEnabledData, splitLogKey } from "./LogUtil"; +import { PHOTON_PREFIX, STRUCT_PREFIX, TYPE_KEY, applyKeyPrefix, getEnabledData, splitLogKey } from "./LogUtil"; import { LogValueSetAny, LogValueSetBoolean, @@ -15,6 +15,7 @@ import { LogValueSetStringArray } from "./LogValueSets"; import LoggableType from "./LoggableType"; +import PhotonStructDecoder from "./PhotonStructDecoder"; import ProtoDecoder from "./ProtoDecoder"; import StructDecoder from "./StructDecoder"; @@ -24,6 +25,7 @@ export default class Log { private msgpackDecoder = new Decoder(); private structDecoder = new StructDecoder(); private protoDecoder = new ProtoDecoder(); + private photonDecoder = new PhotonStructDecoder(); private fields: { [id: string]: LogField } = {}; private generatedParents: Set = new Set(); // Children of these fields are generated @@ -373,6 +375,11 @@ export default class Log { // Check for struct schema if (key.includes("/.schema/" + STRUCT_PREFIX)) { this.structDecoder.addSchema(key.split(STRUCT_PREFIX)[1], value); + this.photonDecoder.addSchema(key.split(STRUCT_PREFIX)[1], value); + this.attemptQueuedStructures(); + } + if (key.includes("/.schema/" + PHOTON_PREFIX)) { + this.photonDecoder.addSchema(key.split(PHOTON_PREFIX)[1], value); this.attemptQueuedStructures(); } } @@ -586,6 +593,39 @@ export default class Log { } } + /** Writes a photonstruct-encoded raw value to the field. + * + * The schema type should not include "photonstruct:" + */ + putPhotonStruct(key: string, timestamp: number, value: Uint8Array, schemaType: string) { + this.putRaw(key, timestamp, value); + if (this.fields[key].getType() === LoggableType.Raw) { + this.setGeneratedParent(key); + this.setStructuredType(key, schemaType); + let decodedData: { data: unknown; schemaTypes: { [key: string]: string } } | null = null; + try { + decodedData = this.photonDecoder.decode(schemaType, value); + } catch {} + if (decodedData !== null) { + this.putUnknownStruct(key, timestamp, decodedData.data); + Object.entries(decodedData.schemaTypes).forEach(([childKey, schemaType]) => { + // Create the key so it can be dragged even though it doesn't have data + let fullChildKey = key + "/" + childKey; + this.createBlankField(fullChildKey, LoggableType.Empty); + this.processTimestamp(fullChildKey, timestamp); + this.setStructuredType(fullChildKey, schemaType); + }); + } else { + this.queuedStructs.push({ + key: key, + timestamp: timestamp, + value: value, + schemaType: schemaType + }); + } + } + } + /** Writes a struct-encoded raw value to the field. * * The schema type should not include "struct:" or "[]" diff --git a/src/shared/log/LogUtil.ts b/src/shared/log/LogUtil.ts index 1aea5a78..650063c7 100644 --- a/src/shared/log/LogUtil.ts +++ b/src/shared/log/LogUtil.ts @@ -11,6 +11,7 @@ import LoggableType from "./LoggableType"; export const TYPE_KEY = ".type"; export const STRUCT_PREFIX = "struct:"; export const PROTO_PREFIX = "proto:"; +export const PHOTON_PREFIX = "photonstruct:"; export const MAX_SEARCH_RESULTS = 128; export const MERGE_PREFIX = "Log"; export const MERGE_PREFIX_REGEX = new RegExp(/^\/?Log\d+/); diff --git a/src/shared/log/PhotonStructDecoder.ts b/src/shared/log/PhotonStructDecoder.ts new file mode 100644 index 00000000..36452c3f --- /dev/null +++ b/src/shared/log/PhotonStructDecoder.ts @@ -0,0 +1,339 @@ +/** Class to manage decoding Photon structs. Like WPIlib structs, but without enums/bitfields for now + * + * Specification: https://github.com/mcm001/photonvision/blob/serde-hashes/photon-serde/README.md#dynamic-decoding + */ +export default class PhotonStructDecoder { + private schemaStrings: { [key: string]: string } = {}; + private schemas: { [key: string]: Schema } = {}; + private static textDecoder = new TextDecoder(); + + getSchemas() { + return this.schemas; + } + + addSchema(name: string, schema: Uint8Array): void { + let schemaStr = PhotonStructDecoder.textDecoder.decode(schema); + if (name in this.schemaStrings) return; + this.schemaStrings[name] = schemaStr; + + // Try to compile any missing schemas + while (true) { + let compileSuccess = false; + Object.keys(this.schemaStrings).forEach((schemaName) => { + if (!(schemaName in this.schemas)) { + let success = this.compileSchema(schemaName, this.schemaStrings[schemaName]); + compileSuccess = compileSuccess || success; + } + }); + if (!compileSuccess) { + // Nothing was compiled (either everything was already + // compiled or a schema dependency is missing) + break; + } + } + } + + private compileSchema(name: string, schema: string): boolean { + let valueSchemaStrs: string[] = schema.split(";").filter((schemaStr) => schemaStr.length > 0); + let valueSchemas: ValueSchema[] = []; + for (let i = 0; i < valueSchemaStrs.length; i++) { + let schemaStr = valueSchemaStrs[i]; + + // check if optional + let isOptional: boolean = false; + const OPTIONAL = "optional "; + if (schemaStr.startsWith(OPTIONAL)) { + isOptional = true; + schemaStr = schemaStr.substring(OPTIONAL.length); + } + + // Remove type from schema string + let schemaStrSplit = schemaStr.split(" ").filter((str) => str.length > 0); + let type = schemaStrSplit.shift() as string; + if (!VALID_TYPE_STRINGS.includes(type) && !(type in this.schemas)) { + // Missing struct, can't finish compiling + return false; + } + let nameStr = schemaStrSplit.join(""); + + // Get name and (bit length or array) + let name: string; + let isArray = false; + if (nameStr.includes("[")) { + // Array + let split = nameStr.split("["); + name = split[0]; + const arrayLengthStr = split[1].split("]")[0]; + if (arrayLengthStr === "?") { + isArray = true; // VLA + } else { + throw new Error("Fixed length arrays are unimplemented"); + } + } else { + // Normal value + name = nameStr; + } + + if (isOptional && isArray) { + throw Error("Can't be optional AND array?"); + } + + // Create schema + valueSchemas.push({ + name: name, + type: type, + isVLA: isArray, + isOptional: isOptional + }); + } + + // Save schema + this.schemas[name] = { + valueSchemas: valueSchemas + }; + return true; + } + + /** Converts struct-encoded data with a known schema to an object. */ + decode(name: string, value: Uint8Array | DataView): { data: unknown; schemaTypes: { [key: string]: string } } { + const ret = this.decodeImpl(name, value); + return { + data: ret.data, + schemaTypes: ret.schemaTypes + }; + } + + /** Converts struct-encoded data with a known schema to an object. */ + decodeImpl(name: string, value: Uint8Array | DataView): DecodeResult { + if (!(name in this.schemas)) { + throw new Error("Schema not defined"); + } + + let outputData: { [key: string]: unknown | unknown[] } = {}; + let outputSchemaTypes: { [key: string]: string } = {}; + let schema = this.schemas[name]; + + let dataView = new DataView(value.buffer, value.byteOffset, value.byteLength); + let offset = 0; + + for (let i = 0; i < schema.valueSchemas.length; i++) { + let valueSchema = schema.valueSchemas[i]; + + let isPresent: boolean = true; + let vlaLength: number | null = null; + + if (valueSchema.isOptional) { + isPresent = PhotonStructDecoder.decodeValue(dataView, offset, ValueType.Bool); + offset += VALUE_TYPE_MAX_BITS.get(ValueType.Bool)! / 8; + outputData[valueSchema.name] = null; + } + if (valueSchema.isVLA) { + vlaLength = PhotonStructDecoder.decodeValue(dataView, offset, ValueType.Int8); + offset += VALUE_TYPE_MAX_BITS.get(ValueType.Int8)! / 8; + } + + if (!isPresent) { + continue; + } + + if (VALID_TYPE_STRINGS.includes(valueSchema.type)) { + // base wpilib-defined struct + + let type = valueSchema.type as ValueType; + + if (vlaLength !== null) { + outputSchemaTypes[valueSchema.name] = type + "[]"; + + let inner: unknown[] = []; + for (let i = 0; i < vlaLength; i++) { + inner.push(PhotonStructDecoder.decodeValue(dataView, offset, type)); + offset += VALUE_TYPE_MAX_BITS.get(type)! / 8; + } + outputData[valueSchema.name] = inner; + outputData[valueSchema.name + "/length"] = vlaLength; + } else { + outputSchemaTypes[valueSchema.name] = type; + outputData[valueSchema.name] = PhotonStructDecoder.decodeValue(dataView, offset, type); + offset += VALUE_TYPE_MAX_BITS.get(type)! / 8; + } + } else { + // Child struct + if (vlaLength !== null) { + outputSchemaTypes[valueSchema.name] = valueSchema.type + "[]"; + let inner: unknown[] = []; + for (let i = 0; i < vlaLength; i++) { + let child = this.decodeImpl( + valueSchema.type, + new DataView(dataView.buffer, dataView.byteOffset + offset, dataView.byteLength - offset) + ); + inner.push(child.data); + + offset += child.bytesConsumed; + + Object.keys(child.schemaTypes).forEach((field) => { + outputSchemaTypes[valueSchema.name + "/" + i + "/" + field] = child.schemaTypes[field]; + }); + } + outputData[valueSchema.name] = inner; + outputData[valueSchema.name + "/length"] = vlaLength; + } else { + outputSchemaTypes[valueSchema.name] = valueSchema.type; + let child = this.decodeImpl( + valueSchema.type, + new DataView(dataView.buffer, dataView.byteOffset + offset, dataView.byteLength - offset) + ); + + // Write down how many bytes we consumed + offset += child.bytesConsumed; + outputData[valueSchema.name] = child.data; + + Object.keys(child.schemaTypes).forEach((field) => { + outputSchemaTypes[valueSchema.name + "/" + field] = child.schemaTypes[field]; + }); + } + } + } + + console.log(outputSchemaTypes); + return { + data: outputData, + schemaTypes: outputSchemaTypes, + bytesConsumed: offset + }; + } + + /** Decode a uint8 array as a single value based on the known type. */ + private static decodeValue(dataView: DataView, offset: number, type: ValueType): any { + let output: any; + switch (type) { + case ValueType.Bool: + output = dataView.getUint8(offset) > 0; + break; + case ValueType.Char: + output = this.textDecoder.decode( + new DataView(dataView.buffer, dataView.byteOffset + offset, dataView.byteLength - offset) + ); + break; + case ValueType.Int8: + output = dataView.getInt8(offset); + break; + case ValueType.Int16: + output = dataView.getInt16(offset, true); + break; + case ValueType.Int32: + output = dataView.getInt32(offset, true); + break; + case ValueType.Int64: + // JS doesn't support int64, get as close as possible + output = Number(dataView.getBigInt64(offset, true)); + break; + case ValueType.Uint8: + output = dataView.getUint8(offset); + break; + case ValueType.Uint16: + output = dataView.getUint16(offset, true); + break; + case ValueType.Uint32: + output = dataView.getUint32(offset, true); + break; + case ValueType.Uint64: + // JS doesn't support uint64, get as close as possible + output = Number(dataView.getBigUint64(offset, true)); + break; + case ValueType.Float: + case ValueType.Float32: + output = dataView.getFloat32(offset, true); + break; + case ValueType.Double: + case ValueType.Float64: + output = dataView.getFloat64(offset, true); + break; + } + return output; + } + + /** Returns a serialized version of the data from this decoder. */ + toSerialized(): any { + return { + schemaStrings: this.schemaStrings, + schemas: this.schemas + }; + } + + /** Creates a new decoder based on the data from `toSerialized()` */ + static fromSerialized(serializedData: any) { + let decoder = new PhotonStructDecoder(); + decoder.schemaStrings = serializedData.schemaStrings; + decoder.schemas = serializedData.schemas; + return decoder; + } +} + +interface DecodeResult { + data: unknown; + schemaTypes: { [key: string]: string }; + bytesConsumed: number; +} + +interface Schema { + valueSchemas: ValueSchema[]; +} + +interface ValueSchema { + // Field name (eg "yaw") + name: string; + // Type string (eg "bool", or something custom like "TrackedTarget:123456abcd") + type: ValueType | string; + // If this is a VLA type + isVLA: boolean; + // If the type is optional + isOptional: boolean; +} + +enum ValueType { + Bool = "bool", + Char = "char", + Int8 = "int8", + Int16 = "int16", + Int32 = "int32", + Int64 = "int64", + Uint8 = "uint8", + Uint16 = "uint16", + Uint32 = "uint32", + Uint64 = "uint64", + Float = "float", + Float32 = "float32", + Double = "double", + Float64 = "float64" +} + +const VALID_TYPE_STRINGS = Object.values(ValueType) as string[]; + +const BITFIELD_VALID_TYPES = [ + ValueType.Bool, + ValueType.Int8, + ValueType.Int16, + ValueType.Int32, + ValueType.Int64, + ValueType.Uint8, + ValueType.Uint16, + ValueType.Uint32, + ValueType.Uint64 +]; + +const VALUE_TYPE_MAX_BITS = new Map([ + [ValueType.Bool, 8], + [ValueType.Char, 8], + [ValueType.Int8, 8], + [ValueType.Int16, 16], + [ValueType.Int32, 32], + [ValueType.Int64, 64], + [ValueType.Uint8, 8], + [ValueType.Uint16, 16], + [ValueType.Uint32, 32], + [ValueType.Uint64, 64], + [ValueType.Float, 32], + [ValueType.Float32, 32], + [ValueType.Double, 64], + [ValueType.Float64, 64] +]); diff --git a/src/sourceListHelp.ts b/src/sourceListHelp.ts index 423c7a47..cc4c9fd8 100644 --- a/src/sourceListHelp.ts +++ b/src/sourceListHelp.ts @@ -61,7 +61,7 @@ window.addEventListener("message", (event) => { if (Object.values(LoggableType).includes(type)) { return type.replaceAll("Array", "[]").toLowerCase(); } else { - return type; + return type.split(":")[0]; } });