Skip to content

Commit 5276f15

Browse files
authored
Merge pull request #5 from AndreasVolkmann/master
fix windows line endings not being ignored
2 parents 8215825 + 580acc6 commit 5276f15

File tree

2 files changed

+42
-35
lines changed

2 files changed

+42
-35
lines changed

src/main/kotlin/com/apurebase/kgraphql/request/RequestPreProcessing.kt

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,21 +3,21 @@ package com.apurebase.kgraphql.request
33
import com.apurebase.kgraphql.RequestException
44
import com.apurebase.kgraphql.not
55

6-
val OPERANDS = "{}():[]"
6+
internal const val OPERANDS = "{}():[]"
77

8-
val IGNORED_CHARACTERS = "\n\t, "
8+
private const val IGNORED_CHARACTERS = "\n\t\r, "
99

10-
val DELIMITERS = OPERANDS + IGNORED_CHARACTERS
10+
private const val DELIMITERS = OPERANDS + IGNORED_CHARACTERS
1111

12-
fun tokenizeRequest(input : String) : List<String> {
12+
internal fun tokenizeRequest(input : String) : List<String> {
1313
var i = 0
1414
val tokens : MutableList<String> = mutableListOf()
1515

16-
while(i < input.length){
17-
when(input[i]){
18-
in IGNORED_CHARACTERS -> { i++ }
16+
while (i < input.length) {
17+
when (input[i]) {
18+
in IGNORED_CHARACTERS -> i++
1919
in OPERANDS -> {
20-
if (input.length > i+1 && input.substring(i, i+2) == "]!") {
20+
if (input.length > i + 1 && input.substring(i, i + 2) == "]!") {
2121
tokens.add("]!")
2222
i += 2
2323
} else {
@@ -70,7 +70,7 @@ private fun extractValueToken(substring: String): String {
7070
return tokenBuilder.toString()
7171
}
7272

73-
fun createDocumentTokens(tokens : List<String>) : Document {
73+
internal fun createDocumentTokens(tokens : List<String>) : Document {
7474
val operations : MutableList<Document.OperationTokens> = mutableListOf()
7575
val fragments : MutableList<Document.FragmentTokens> = mutableListOf()
7676

@@ -92,13 +92,12 @@ fun createDocumentTokens(tokens : List<String>) : Document {
9292
return Document(fragments, operations)
9393
}
9494

95-
fun createFragmentTokens(tokens : List<String>, startIndex: Int) : Pair<Int, Document.FragmentTokens>{
95+
private fun createFragmentTokens(tokens : List<String>, startIndex: Int) : Pair<Int, Document.FragmentTokens>{
9696
var index = startIndex
9797
var name : String? = null
9898
var typeCondition : String? = null
9999
while(index < tokens.size){
100-
val token = tokens[index]
101-
when(token) {
100+
when(val token = tokens[index]) {
102101
"fragment" -> {
103102
name = tokens[index + 1]
104103
index++
@@ -120,7 +119,7 @@ fun createFragmentTokens(tokens : List<String>, startIndex: Int) : Pair<Int, Doc
120119
throw RequestException("Invalid fragment $name declaration without selection set")
121120
}
122121

123-
fun createOperationTokens(tokens : List<String>, startIndex: Int) : Pair<Int, Document.OperationTokens>{
122+
private fun createOperationTokens(tokens : List<String>, startIndex: Int) : Pair<Int, Document.OperationTokens>{
124123
var index = startIndex
125124
var name : String? = null
126125
var type : String? = null
@@ -191,25 +190,25 @@ private fun parseOperationVariables(variablesTokens: List<String>): MutableList<
191190
return operationVariables
192191
}
193192

194-
val TYPE_WRAPPERS = arrayOf('!', '[', ']')
193+
private val TYPE_WRAPPERS = arrayOf('!', '[', ']')
195194

196-
fun String.toTypeReference() : TypeReference {
195+
private fun String.toTypeReference() : TypeReference {
197196
val isNullable = not(endsWith("!"))
198197
val isList = startsWith("[") && (endsWith("]") || endsWith("]!"))
199198
val isElementNullable = isList && not(endsWith("!]") || endsWith("!]!"))
200199
val name = dropWhile { it in TYPE_WRAPPERS }.dropLastWhile { it in TYPE_WRAPPERS }
201200
return TypeReference(name, isNullable, isList, isElementNullable)
202201
}
203202

204-
fun indexOfClosingBracket(tokens: List<String>, startIndex: Int) : Int {
203+
private fun indexOfClosingBracket(tokens: List<String>, startIndex: Int) : Int {
205204
var nestedBrackets = 0
206205
val subList = tokens.subList(startIndex, tokens.size)
207206
subList.forEachIndexed { index, token ->
208-
when(token){
207+
when (token) {
209208
"{" -> nestedBrackets++
210209
"}" -> nestedBrackets--
211210
}
212-
if(nestedBrackets == 0) return index + startIndex + 1
211+
if (nestedBrackets == 0) return index + startIndex + 1
213212
}
214213
val indexOfTokenInString = getIndexOfTokenInString(tokens.subList(0, startIndex))
215214
throw RequestException("Missing closing bracket for opening bracket at $indexOfTokenInString")

src/test/kotlin/com/apurebase/kgraphql/request/RequestTokenizationTest.kt

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,45 +9,45 @@ import org.junit.Test
99
*/
1010
class RequestTokenizationTest {
1111

12-
fun testTokenization(input : String, expected : List<String>) {
12+
private fun testTokenization(input : String, expected : List<String>) {
1313
val tokens = tokenizeRequest(input)
1414
assertThat(tokens, equalTo(expected))
1515
}
1616

1717
@Test
1818
fun `tokenize mutation with args`(){
1919
testTokenization(
20-
input = "{createHero(name: \"Batman\", appearsIn: \"The Dark Knight\")}",
21-
expected = listOf("{", "createHero", "(", "name", ":", "\"Batman\"", "appearsIn", ":", "\"The Dark Knight\"", ")", "}")
20+
input = "{createHero(name: \"Batman\", appearsIn: \"The Dark Knight\")}",
21+
expected = listOf("{", "createHero", "(", "name", ":", "\"Batman\"", "appearsIn", ":", "\"The Dark Knight\"", ")", "}")
2222
)
2323
}
2424

2525
@Test
2626
fun `tokenize simple query`(){
2727
testTokenization(
28-
input = "{batman: hero(name: \"Batman\"){ skills : powers }}",
29-
expected = listOf("{", "batman", ":", "hero", "(", "name", ":", "\"Batman\"", ")", "{", "skills", ":", "powers", "}", "}")
28+
input = "{batman: hero(name: \"Batman\"){ skills : powers }}",
29+
expected = listOf("{", "batman", ":", "hero", "(", "name", ":", "\"Batman\"", ")", "{", "skills", ":", "powers", "}", "}")
3030
)
3131
}
3232

3333
@Test
3434
fun `tokenize query with nested selection set`(){
3535
testTokenization(
36-
input = "{hero{name appearsIn{title{abbr full} year}}\nvillain{name deeds}}",
37-
expected = listOf(
38-
"{", "hero", "{", "name", "appearsIn", "{", "title", "{", "abbr", "full", "}", "year", "}", "}",
39-
"villain", "{", "name", "deeds", "}", "}"
40-
)
36+
input = "{hero{name appearsIn{title{abbr full} year}}\nvillain{name deeds}}",
37+
expected = listOf(
38+
"{", "hero", "{", "name", "appearsIn", "{", "title", "{", "abbr", "full", "}", "year", "}", "}",
39+
"villain", "{", "name", "deeds", "}", "}"
40+
)
4141
)
4242
}
4343

4444
@Test
4545
fun `Tokenize list argument`(){
4646
testTokenization(
47-
input = "{List(value : [23, 3, 23])}",
48-
expected = listOf(
49-
"{", "List","(", "value", ":", "[", "23", "3", "23", "]", ")","}"
50-
)
47+
input = "{List(value : [23, 3, 23])}",
48+
expected = listOf(
49+
"{", "List","(", "value", ":", "[", "23", "3", "23", "]", ")","}"
50+
)
5151
)
5252
}
5353

@@ -91,8 +91,16 @@ class RequestTokenizationTest {
9191
@Test
9292
fun `tokenize input with quotes`(){
9393
testTokenization(
94-
input = "{hello(name : \"Ted\\\" Mosby\")}",
95-
expected = listOf("{", "hello", "(", "name", ":", "\"Ted\\\" Mosby\"", ")", "}")
94+
input = "{hello(name : \"Ted\\\" Mosby\")}",
95+
expected = listOf("{", "hello", "(", "name", ":", "\"Ted\\\" Mosby\"", ")", "}")
96+
)
97+
}
98+
99+
@Test
100+
fun `tokenize input with new lines`() {
101+
testTokenization(
102+
input = "{lists{\r\ntotalCount\r\nnodes{\r\ntitle\r\n }\r\n}\r\n}",
103+
expected = listOf("{", "lists", "{", "totalCount", "nodes", "{", "title", "}", "}", "}")
96104
)
97105
}
98-
}
106+
}

0 commit comments

Comments
 (0)