Skip to content
This repository has been archived by the owner on May 20, 2024. It is now read-only.

1.1.0 version is ready #3

Merged
merged 4 commits into from
Dec 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
428 changes: 427 additions & 1 deletion README.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@coremarine/nmea-parser",
"version": "1.0.0",
"version": "1.1.0",
"description": "Library to parse NMEA 0183 sentences",
"author": "CoreMarine",
"license": "ISC",
Expand Down
17 changes: 10 additions & 7 deletions src/parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import { readdirSync } from 'node:fs'
import Path from 'node:path'
import { END_FLAG, END_FLAG_LENGTH, MAX_CHARACTERS, START_FLAG, START_FLAG_LENGTH } from "./constants";
import { BooleanSchema, NMEALikeSchema, NaturalSchema, ProtocolsInputSchema, StringSchema } from "./schemas";
import { Data, FieldType, NMEAKnownSentence, NMEAParser, NMEAPreParsed, NMEASentence, NMEAUknownSentence, ParserSentences, ProtocolsFile, ProtocolsInput, StoredSentence, StoredSentences } from "./types";
import { getStoreSentences, readProtocolsFile } from './protocols';
import { Data, FieldType, FieldUnknown, NMEAKnownSentence, NMEAParser, NMEAPreParsed, NMEASentence, NMEAUknownSentence, ParserSentences, ProtocolsFile, ProtocolsInput, StoredSentence, StoredSentences } from "./types";
import { getStoreSentences, readProtocolsFile, readProtocolsString } from './protocols';
import { getNMEAUnparsedSentence } from './sentences';


Expand Down Expand Up @@ -41,7 +41,7 @@ export class Parser implements NMEAParser {

private readProtocols(input: ProtocolsInput): ProtocolsFile {
if (input.file !== undefined) return readProtocolsFile(input.file)
if (input.content !== undefined) return input.content
if (input.content !== undefined) return readProtocolsString(input.content)
if (input.protocols !== undefined) return { protocols: input.protocols }
throw new Error('Invalid protocols to add')
}
Expand Down Expand Up @@ -97,19 +97,22 @@ export class Parser implements NMEAParser {
}

private getUnknowFrame(sentence: NMEAPreParsed): NMEAUknownSentence {
return { ...sentence, protocol: { name: 'UNKNOWN' } }
const fields: FieldUnknown[] = sentence.data.map(value => ({
name: 'unknown', type: 'string', data: value
}))
return { ...sentence, fields, protocol: { name: 'UNKNOWN' } }
}

private getKnownFrame(preparsed: NMEAPreParsed): NMEAKnownSentence | null {
const storedSentence = this._sentences.get(preparsed.sentence) as StoredSentence
// Bad known frame
if (storedSentence.fields.length !== preparsed.fields.length) {
console.debug(`Invalid ${preparsed.sentence} sentence -> it has to have ${storedSentence.fields.length} fields but it contains ${preparsed.fields.length}`)
if (storedSentence.fields.length !== preparsed.data.length) {
console.debug(`Invalid ${preparsed.sentence} sentence -> it has to have ${storedSentence.fields.length} fields but it contains ${preparsed.data.length}`)
return null
}
try {
const knownSentence: NMEAKnownSentence = {...preparsed, ...storedSentence, data: [] } as NMEAKnownSentence
preparsed.fields.forEach((value, index) => {
preparsed.data.forEach((value, index) => {
const type = knownSentence.fields[index].type
const data = this.getField(value, type)
knownSentence.fields[index].data = data
Expand Down
8 changes: 6 additions & 2 deletions src/protocols.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,15 @@ export const createJSONSchema = (input: JSONSchemaInput) => {
fs.writeFileSync(FILE, CONTENT)
}

export const readProtocolsString = (content: string): ProtocolsFile => {
const fileData = yaml.load(content)
return ProtocolsFileSchema.parse(fileData)
}

export const readProtocolsFile = (file: string): ProtocolsFile => {
const filename = StringSchema.parse(file)
const content = fs.readFileSync(filename, 'utf-8')
const fileData = yaml.load(content)
return ProtocolsFileSchema.parse(fileData)
return readProtocolsString(content)
}

const getStoreSentencesFromProtocol = (protocol: Protocol) => {
Expand Down
13 changes: 10 additions & 3 deletions src/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ export const FieldSchema = z.object({
note: StringSchema.optional()
})

export const FieldUnknownSchema = z.object({
name: z.literal('unknown'),
type: z.literal('string'),
data: StringSchema,
})

export const ProtocolSentenceSchema = z.object({
sentence: StringSchema,
fields: z.array(FieldSchema),
Expand All @@ -74,7 +80,7 @@ export const ProtocolsFileSchema = z.object({ protocols: z.array(ProtocolSchema)

export const ProtocolsInputSchema = z.object({
file: StringSchema.optional(),
content: ProtocolsFileSchema.optional(),
content: StringSchema.optional(),
protocols: z.array(ProtocolSchema).optional()
})

Expand Down Expand Up @@ -106,7 +112,7 @@ export const NMEAUnparsedSentenceSchema = z.object({
raw: StringSchema,
sentence: StringSchema,
checksum: NaturalSchema,
fields: StringArraySchema
data: StringArraySchema
})

export const NMEAPreParsedSentenceSchema = NMEAUnparsedSentenceSchema.extend({ timestamp: NaturalSchema })
Expand All @@ -123,7 +129,8 @@ export const StoredSentenceDataSchema = StoredSentenceSchema.extend({
})

export const NMEAUknownSentenceSchema = NMEAPreParsedSentenceSchema.extend({
protocol: z.object({ name: z.literal('UNKNOWN') })
protocol: z.object({ name: z.literal('UNKNOWN') }),
fields: z.array(FieldUnknownSchema),
})

export const NMEAKnownSentenceSchema = StoredSentenceDataSchema.extend({
Expand Down
11 changes: 6 additions & 5 deletions src/sentences.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { getChecksum, numberChecksumToString, stringChecksumToNumber } from "./checksum";
import { CHECKSUM_LENGTH, DELIMITER, END_FLAG_LENGTH, MINIMAL_LENGTH, SEPARATOR, START_FLAG, START_FLAG_LENGTH } from "./constants";
import { NMEALikeSchema, NMEAUknownSentenceSchema, NMEAUnparsedSentenceSchema } from "./schemas";
import { Data, FieldType, NMEALike, NMEAPreParsed, NMEAUknownSentence, NMEAUnparsedSentence, StoredSentence } from "./types";
import { Data, FieldType, FieldUnknown, NMEALike, NMEAPreParsed, NMEAUknownSentence, NMEAUnparsedSentence, StoredSentence } from "./types";
import { isLowerCharASCII, isNumberCharASCII, isUpperCharASCII } from "./utils";
// GET NMEA SENTENCE
export const isNMEAFrame = (text: string): boolean => {
Expand Down Expand Up @@ -46,14 +46,15 @@ export const isNMEAFrame = (text: string): boolean => {
export const getNMEAUnparsedSentence = (text: string): NMEAUnparsedSentence | null => {
if (!isNMEAFrame(text)) return null
const raw = text
const [data, cs] = raw.slice(1, -END_FLAG_LENGTH).split(DELIMITER)
const [info, cs] = raw.slice(1, -END_FLAG_LENGTH).split(DELIMITER)
const checksum = stringChecksumToNumber(cs)
const [sentence, ...fields] = data.split(SEPARATOR)
return NMEAUnparsedSentenceSchema.parse({ raw, sentence, checksum, fields })
const [sentence, ...data] = info.split(SEPARATOR)
return NMEAUnparsedSentenceSchema.parse({ raw, sentence, checksum, data })
}

export const getUnknownSentence = (sentence: NMEAPreParsed): NMEAUknownSentence => {
const unknowFrame = {...sentence, protocol: { name: 'UNKNOWN' } }
const fields: FieldUnknown[] = sentence.data.map(value => ({ name: 'unknown', type: 'string', data: value }))
const unknowFrame = {...sentence, protocol: { name: 'UNKNOWN' }, fields }
const parsed = NMEAUknownSentenceSchema.safeParse(unknowFrame)
if (parsed.success) return parsed.data
throw new Error(parsed.error.message)
Expand Down
3 changes: 2 additions & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@ import {
VersionSchema, JSONSchemaInputSchema,
StoredSentenceSchema, StoredSentencesSchema,
NMEALikeSchema, NMEAUnparsedSentenceSchema, NMEAPreParsedSentenceSchema,
DataSchema, FieldParsedSchema, NMEASentenceSchema, NMEAUknownSentenceSchema, NMEAKnownSentenceSchema, ProtocolsInputSchema,
DataSchema, FieldParsedSchema, NMEASentenceSchema, NMEAUknownSentenceSchema, NMEAKnownSentenceSchema, ProtocolsInputSchema, FieldUnknownSchema,
} from './schemas'

// PROTOCOLS
export type FieldType = z.infer<typeof FieldTypeSchema>
export type Field = z.infer<typeof FieldSchema>
export type FieldUnknown = z.infer<typeof FieldUnknownSchema>
export type ProtocolSentence = z.infer<typeof ProtocolSentenceSchema>
export type Version = z.infer<typeof VersionSchema>
export type Protocol = z.infer<typeof ProtocolSchema>
Expand Down
211 changes: 211 additions & 0 deletions tests/index.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
import fs from 'node:fs'
import path from 'node:path'
import { describe, test, expect } from 'vitest'
import Parser from '../src'
import { generateSentence } from '../src/sentences'
import { NMEAKnownSentenceSchema, NMEAUknownSentenceSchema } from '../src/schemas'
import { DELIMITER, END_FLAG, END_FLAG_LENGTH, SEPARATOR, START_FLAG_LENGTH } from '../src/constants'
import { getChecksum, numberChecksumToString } from '../src/checksum'
import { readProtocolsFile } from '../src/protocols'
import { Protocol, ProtocolsFile } from '../src/types'

const NORSUB_FILE = path.join(__dirname, 'norsub.yaml')

describe('Parser', () => {
test('Default constructor', () => {
const parser = new Parser()
const parserProtocols = parser.getProtocols()
expect(parserProtocols.includes('NMEA')).toBeTruthy()

const parserSentences = parser.getSentences()
const expectedSentences = ['AAM', 'GGA']
expectedSentences.forEach(sentence => expect(Object.keys(parserSentences).includes(sentence)).toBeTruthy())
})

test('Add protocols with file', () => {
const file = NORSUB_FILE
const parser = new Parser()
parser.addProtocols({ file })

const parserProtocols = parser.getProtocols()

const expectedProtocols = [
'NMEA',
'GYROCOMPAS1', 'Tokimek PTVG', 'RDI ADCP', 'SMCA', 'SMCC',
'NORSUB', 'NORSUB2', 'NORSUB6', 'NORSUB7', 'NORSUB7b', 'NORSUB8', 'NORSUB PRDID',
]
expectedProtocols.forEach(protocol => {
const result = parserProtocols.includes(protocol)
if (!result) { console.log(`Protocol ${protocol} is not included`) }
expect(result).toBeTruthy()
})

const parserSentences = parser.getSentences()
const expectedSentences = [
'AAM', 'GGA',
'HEHDT', 'PHTRO', 'PHINF',
'PNORSUB', 'PNORSUB2', 'PNORSUB6', 'PNORSUB7', 'PNORSUB7b', 'PNORSUB8', 'PRDID',
'PTVG', 'PRDID', 'PSMCA', 'PSMCC',
]
expectedSentences.forEach(sentence => {
const result = Object.keys(parserSentences).includes(sentence)
if (!result) { console.log(`Sentence ${sentence} is not included`) }
expect(result).toBeTruthy()
})
})

test('Add protocols with content', () => {
const content = fs.readFileSync(NORSUB_FILE, 'utf-8')
const parser = new Parser()
parser.addProtocols({ content })

const parserProtocols = parser.getProtocols()

const expectedProtocols = [
'NMEA',
'GYROCOMPAS1', 'Tokimek PTVG', 'RDI ADCP', 'SMCA', 'SMCC',
'NORSUB', 'NORSUB2', 'NORSUB6', 'NORSUB7', 'NORSUB7b', 'NORSUB8', 'NORSUB PRDID',
]
expectedProtocols.forEach(protocol => {
const result = parserProtocols.includes(protocol)
if (!result) { console.log(`Protocol ${protocol} is not included`) }
expect(result).toBeTruthy()
})

const parserSentences = parser.getSentences()
const expectedSentences = [
'AAM', 'GGA',
'HEHDT', 'PHTRO', 'PHINF',
'PNORSUB', 'PNORSUB2', 'PNORSUB6', 'PNORSUB7', 'PNORSUB7b', 'PNORSUB8', 'PRDID',
'PTVG', 'PRDID', 'PSMCA', 'PSMCC',
]
expectedSentences.forEach(sentence => {
const result = Object.keys(parserSentences).includes(sentence)
if (!result) { console.log(`Sentence ${sentence} is not included`) }
expect(result).toBeTruthy()
})
})

test('Add protocols with protocols', () => {
const { protocols } = readProtocolsFile(NORSUB_FILE)
const parser = new Parser()
parser.addProtocols({ protocols })

const parserProtocols = parser.getProtocols()

const expectedProtocols = [
'NMEA',
'GYROCOMPAS1', 'Tokimek PTVG', 'RDI ADCP', 'SMCA', 'SMCC',
'NORSUB', 'NORSUB2', 'NORSUB6', 'NORSUB7', 'NORSUB7b', 'NORSUB8', 'NORSUB PRDID',
]
expectedProtocols.forEach(protocol => {
const result = parserProtocols.includes(protocol)
if (!result) { console.log(`Protocol ${protocol} is not included`) }
expect(result).toBeTruthy()
})

const parserSentences = parser.getSentences()
const expectedSentences = [
'AAM', 'GGA',
'HEHDT', 'PHTRO', 'PHINF',
'PNORSUB', 'PNORSUB2', 'PNORSUB6', 'PNORSUB7', 'PNORSUB7b', 'PNORSUB8', 'PRDID',
'PTVG', 'PRDID', 'PSMCA', 'PSMCC',
]
expectedSentences.forEach(sentence => {
const result = Object.keys(parserSentences).includes(sentence)
if (!result) { console.log(`Sentence ${sentence} is not included`) }
expect(result).toBeTruthy()
})
})

test('Add protocols error', () => {
const parser = new Parser()
expect(() => parser.addProtocols({})).toThrow()
expect(() => parser.addProtocols({ file: '' })).toThrow()
expect(() => parser.addProtocols({ content: '' })).toThrow()
expect(() => parser.addProtocols({ protocols: {} as Protocol[] })).toThrow()
})

test('Parsing NMEA + NorSub sentences', () => {
const parser = new Parser()
parser.addProtocols({ file: NORSUB_FILE })
const storedSentences = parser.getSentences()
Object.values(storedSentences).forEach(storedSentence => {
const input = generateSentence(storedSentence)
expect(input).toBeTypeOf('string')
const output = parser.parseData(input)[0]
const parsed = NMEAKnownSentenceSchema.safeParse(output)
if (!parsed.success) {
console.error(parsed.error)
}
expect(parsed.success).toBeTruthy()
})
})

test('Uncompleted frames WITHOUT memory', () => {
const parser = new Parser()
const storedSentences = parser.getSentences()
const input1 = generateSentence(storedSentences['AAM'])
const halfInput1 = input1.slice(0, 10)
const halfInput2 = input1.slice(10)
const input2 = generateSentence(storedSentences['GGA']);
[
halfInput1 + input2,
halfInput1 + halfInput1+ input2,
input2 + halfInput2,
input2 + halfInput2 + halfInput2,
'asdfasfaf' + input2 + 'lakjs'
].forEach(input => {
const output = parser.parseData(input)
expect(output).toHaveLength(1)
})
})

test('Uncompleted frames WITH memory', () => {
const parser = new Parser()
parser.memory = true
const storedSentences = parser.getSentences()
const input1 = generateSentence(storedSentences['AAM'])
const halfInput1 = input1.slice(0, 10)
const halfInput2 = input1.slice(10)
const input2 = generateSentence(storedSentences['GGA']);
[
halfInput1 + input2,
halfInput1 + halfInput1+ input2,
input2 + halfInput2,
input2 + halfInput2 + halfInput2,
'asdfasfaf' + input2 + 'lakjs'
].forEach(input => {
const output = parser.parseData(input)
expect(output).toHaveLength(1)
})
parser.parseData(halfInput1)
const mem = parser.parseData(halfInput2)
expect(mem).toHaveLength(1)
})

test('Unknown frames', () => {
const getFakeSentece = (text: string, sentence: string): string => {
const [frame, _cs] = text.slice(START_FLAG_LENGTH, -END_FLAG_LENGTH).split(DELIMITER)
const [_emitter, ...info] = frame.split(SEPARATOR)
const newFrame = [sentence, ...info].join(SEPARATOR)
const checksum = numberChecksumToString(getChecksum(newFrame))
return `$${newFrame}${DELIMITER}${checksum}${END_FLAG}`
}

const parser = new Parser()
const storedSentences = parser.getSentences()
const aam = storedSentences['AAM']
const gga = storedSentences['GGA']
const input1 = getFakeSentece(generateSentence(aam), 'XXX')
const input2 = getFakeSentece(generateSentence(gga), 'YYY');
[input1, input2].forEach(input => {
const output = parser.parseData(input)[0]
const parsed = NMEAUknownSentenceSchema.safeParse(output)
if (!parsed.success) {
console.error(parsed.error)
}
expect(parsed.success).toBeTruthy()
})
})
})
Loading