From 62a043cca8d99bbc609c607f66559e64f873513f Mon Sep 17 00:00:00 2001 From: Piet Brauer Date: Tue, 20 Oct 2015 17:06:43 +0700 Subject: [PATCH] Clarfy tests --- Common/Framework/States/Named.swift | 3 - OysterKit/OysterKitTests/parserTests.swift | 7 +-- .../OysterKitTests/standardTokensTest.swift | 62 ++++++++++++++++--- 3 files changed, 55 insertions(+), 17 deletions(-) diff --git a/Common/Framework/States/Named.swift b/Common/Framework/States/Named.swift index 6db9edbf..805895b3 100644 --- a/Common/Framework/States/Named.swift +++ b/Common/Framework/States/Named.swift @@ -58,9 +58,6 @@ public class Named : TokenizationState { //Create a "new" named state with the root set as a clone of our root let newState = Named(name:name,root: rootState.clone()) -// println(self.rootState.description) -// println(newState.rootState.description) - newState.endState = newState.rootState.lowLeaf() newState.cloneTimeEnd = endState diff --git a/OysterKit/OysterKitTests/parserTests.swift b/OysterKit/OysterKitTests/parserTests.swift index 8e23d279..fcb47ed6 100644 --- a/OysterKit/OysterKitTests/parserTests.swift +++ b/OysterKit/OysterKitTests/parserTests.swift @@ -119,12 +119,7 @@ class parserTests: XCTestCase { //Create a tokenizer from the generated description let generatedTokenizer = parser.parse(tokFileTokDef) - var parserErrors = "" - for error in parser.errors { - parserErrors += "\t\(error)\n" - } - - XCTAssert(parserErrors.characters.count == 0, "Self parsing generated an error: \(parserErrors) with \(tokFileTokDef)") + XCTAssert(parser.errors.count == 0) //Tokenize original serialized description with the parsed tokenizer built from my own serialized description let parserGeneratedTokens = generatedTokenizer.tokenize(tokFileTokDef) diff --git a/OysterKit/OysterKitTests/standardTokensTest.swift b/OysterKit/OysterKitTests/standardTokensTest.swift index 819eef6a..ee1e3e33 100644 --- a/OysterKit/OysterKitTests/standardTokensTest.swift +++ b/OysterKit/OysterKitTests/standardTokensTest.swift @@ -71,9 +71,14 @@ class standardTokensTest: XCTestCase { ) let parsingTest = "Short 10 string" - - - XCTAssert(tokenizer.tokenize(parsingTest) == [token("word",chars:"Short"), token("blank",chars:" "), token("integer",chars:"10"), token("blank",chars:" "), token("word",chars:"string"), ]) + let reference = [ + token("word", chars: "Short"), + token("blank", chars: " "), + token("integer", chars: "10"), + token("blank", chars: " "), + token("word", chars: "string") + ] + XCTAssertEqual(tokenizer.tokenize(parsingTest), reference) } func testWhiteSpaces(){ @@ -87,11 +92,28 @@ class standardTokensTest: XCTestCase { ) let parsingTest = "Short\tlittle\nstring that\n tries \tto break \n\tthings up" - - __debugScanning = true + let tokens = tokenizer.tokenize(parsingTest) - assertTokenListsEqual(tokens, reference: [token("word",chars:"Short"), token("whitespace",chars:"\t"), token("word",chars:"little"), token("whitespace",chars:"\n"), token("word",chars:"string"), token("whitespace",chars:" "), token("word",chars:"that"), token("whitespace",chars:"\n "), token("word",chars:"tries"), token("whitespace",chars:" \t"), token("word",chars:"to"), token("whitespace",chars:" "), token("word",chars:"break"), token("whitespace",chars:" \n\t"), token("word",chars:"things"), token("whitespace",chars:" "), token("word",chars:"up"), ]) - __debugScanning = false + let reference = [ + token("word", chars: "Short"), + token("whitespace", chars: "\t"), + token("word", chars: "little"), + token("whitespace", chars: "\n"), + token("word", chars: "string"), + token("whitespace", chars: " "), + token("word", chars: "that"), + token("whitespace", chars: "\n "), + token("word", chars: "tries"), + token("whitespace", chars: " \t"), + token("word", chars: "to"), + token("whitespace", chars: " "), + token("word", chars: "break"), + token("whitespace", chars: " \n\t"), + token("word", chars: "things"), + token("whitespace", chars: " "), + token("word", chars: "up") + ] + XCTAssertEqual(tokens, reference) } func testQuotedString(){ @@ -105,7 +127,31 @@ class standardTokensTest: XCTestCase { ) let parsingTest = "A great man once said \"It is a far better thing that I do now than I have ever done\". " + let reference = [ + token("word", chars:"A"), + token("blank", chars:" "), + token("word", chars:"great"), + token("blank", chars:" "), + token("word", chars:"man"), + token("blank", chars:" "), + token("word", chars:"once"), + token("blank", chars:" "), + token("word", chars:"said"), + token("blank", chars:" "), + token("double-quote", chars:"\""), + token("quoted-string", chars:"It is a far better thing that I do now than I have ever done"), + token("double-quote", chars:"\""), + token("punct", chars:"."), + token("blank", chars:" ") + ] - assertTokenListsEqual(tokenizer.tokenize(parsingTest), reference: [token("word",chars:"A"), token("blank",chars:" "), token("word",chars:"great"), token("blank",chars:" "), token("word",chars:"man"), token("blank",chars:" "), token("word",chars:"once"), token("blank",chars:" "), token("word",chars:"said"), token("blank",chars:" "), token("double-quote",chars:"\""), token("quoted-string",chars:"It is a far better thing that I do now than I have ever done"), token("double-quote",chars:"\""), token("punct",chars:"."), token("blank",chars:" "), ]) + XCTAssertEqual(tokenizer.tokenize(parsingTest), reference) } } + +extension Token: Equatable {} + +public func ==(lhs: Token, rhs: Token) -> Bool { + return lhs.name == rhs.name && + lhs.characters == rhs.characters +}