Skip to content

Commit

Permalink
Implement Dict in Wirespec Language (#212)
Browse files Browse the repository at this point in the history
Co-authored-by: Jerre van Veluw <[email protected]>
  • Loading branch information
wilmveel and jerrevanveluw authored May 22, 2024
1 parent f1b34b5 commit 99d915f
Show file tree
Hide file tree
Showing 18 changed files with 374 additions and 313 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class TypeScriptEmitter(logger: Logger = noLogger) : DefinitionModelEmitter, Emi

override fun Field.Reference.emit() = emitSymbol()
.let { if (isIterable) "$it[]" else it }
.let { if (isMap) "Record<string, $it>" else it }
.let { if (isDictionary) "Record<string, $it>" else it }

override fun Refined.emit() =
"""export type ${identifier.sanitizeSymbol()} = string;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,32 +22,32 @@ data class Type(
data class Field(val identifier: Identifier, val reference: Reference, val isNullable: Boolean) {
sealed interface Reference : Value<String> {
val isIterable: Boolean
val isMap: Boolean
val isDictionary: Boolean

data class Any(
override val isIterable: Boolean,
override val isMap: Boolean = false,
override val isDictionary: Boolean = false,
) : Reference {
override val value = "Any"
}

data class Unit(
override val isIterable: Boolean,
override val isMap: Boolean = false,
override val isDictionary: Boolean = false,
) : Reference {
override val value = "Unit"
}

data class Custom(
override val value: String,
override val isIterable: Boolean,
override val isMap: Boolean = false
override val isDictionary: Boolean = false
) : Reference

data class Primitive(
val type: Type,
override val isIterable: Boolean = false,
override val isMap: Boolean = false
override val isDictionary: Boolean = false
) : Reference {
enum class Type { String, Integer, Number, Boolean }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,48 +91,70 @@ class TypeParser(logger: Logger) : AbstractParser(logger) {
is Colon -> eatToken().bind()
else -> raise(WrongTokenException<Colon>(token).also { eatToken().bind() })
}
val isDict = when (token.type) {
is LeftCurly -> true.also { eatToken().bind() }
else -> false
}
when (val type = token.type) {
is WirespecType -> Field(
identifier = identifier,
reference = parseFieldValue(type, token.value).bind(),
reference = parseFieldValue(type, token.value, isDict).bind(),
isNullable = (token.type is QuestionMark).also { if (it) eatToken().bind() }
)
).also {
if (isDict) {
when (token.type) {
is RightCurly -> eatToken().bind()
else -> raise(WrongTokenException<RightCurly>(token).also { eatToken().bind() })
}
}
}

else -> raise(WrongTokenException<CustomType>(token).also { eatToken().bind() })
}
}

private fun TokenProvider.parseFieldValue(wsType: WirespecType, value: String) = either {
private fun TokenProvider.parseFieldValue(wsType: WirespecType, value: String, isDict: Boolean) = either {
val previousToken = token
eatToken().bind()
token.log()
val isIterable = (token.type is Brackets).also { if (it) eatToken().bind() }
when (wsType) {
is WsString -> Field.Reference.Primitive(
Field.Reference.Primitive.Type.String,
isIterable
type = Field.Reference.Primitive.Type.String,
isIterable = isIterable,
isDictionary = isDict
)

is WsInteger -> Field.Reference.Primitive(
Field.Reference.Primitive.Type.Integer,
isIterable
type = Field.Reference.Primitive.Type.Integer,
isIterable = isIterable,
isDictionary = isDict
)

is WsNumber -> Field.Reference.Primitive(
Field.Reference.Primitive.Type.Number,
isIterable
type = Field.Reference.Primitive.Type.Number,
isIterable = isIterable,
isDictionary = isDict
)

is WsBoolean -> Field.Reference.Primitive(
Field.Reference.Primitive.Type.Boolean,
isIterable
type = Field.Reference.Primitive.Type.Boolean,
isIterable = isIterable,
isDictionary = isDict
)

is WsUnit -> Field.Reference.Unit(isIterable)
is WsUnit -> Field.Reference.Unit(
isIterable = isIterable,
isDictionary = isDict
)

is CustomType -> {
previousToken.shouldBeDefined().bind()
Field.Reference.Custom(value, isIterable)
Field.Reference.Custom(
value = value,
isIterable = isIterable,
isDictionary = isDict
)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ class ParseEndpointTest {
reference = Primitive(
type = String,
isIterable = false,
isMap = false,
isDictionary = false,
)
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class ParseTypeTest {
fun testTypeParser() {
val source = """
type Foo {
bar: String
bar: {String[]}
}
""".trimIndent()

Expand All @@ -35,8 +35,8 @@ class ParseTypeTest {
identifier.shouldBeInstanceOf<Identifier>().value shouldBe "bar"
reference.shouldBeInstanceOf<Field.Reference.Primitive>().run {
type shouldBe Field.Reference.Primitive.Type.String
isIterable shouldBe false
isMap shouldBe false
isIterable shouldBe true
isDictionary shouldBe true
}
isNullable shouldBe false
}
Expand Down Expand Up @@ -78,8 +78,8 @@ class ParseTypeTest {
.also { it.size shouldBe 2 }
.let {
val (first, second) = it.toList()
first shouldBe Field.Reference.Custom(value = "Bar", isIterable = false, isMap = false)
second shouldBe Field.Reference.Custom(value = "Bal", isIterable = false, isMap = false)
first shouldBe Field.Reference.Custom(value = "Bar", isIterable = false, isDictionary = false)
second shouldBe Field.Reference.Custom(value = "Bal", isIterable = false, isDictionary = false)
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package community.flock.wirespec.compiler.core.tokenize

import community.flock.wirespec.compiler.core.WirespecSpec
import community.flock.wirespec.compiler.core.tokenize.types.Invalid
import community.flock.wirespec.compiler.core.tokenize.types.TokenType
import io.kotest.matchers.collections.shouldNotBeEmpty
import io.kotest.matchers.collections.shouldNotContain
import io.kotest.matchers.shouldBe

fun testTokenizer(
source: String,
vararg expected: TokenType,
removeWhiteSpace: Boolean = true,
noInvalid: Boolean = true,
) {
WirespecSpec.tokenize(source)
.run { if (removeWhiteSpace) removeWhiteSpace() else this }
.shouldNotBeEmpty()
.apply { size shouldBe expected.size }
.map { it.type }
.apply { if (noInvalid) shouldNotContain(Invalid) }
.onEachIndexed { index, tokenType -> tokenType shouldBe expected[index] }
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package community.flock.wirespec.compiler.core.tokenize

import community.flock.wirespec.compiler.core.WirespecSpec
import community.flock.wirespec.compiler.core.tokenize.types.Arrow
import community.flock.wirespec.compiler.core.tokenize.types.Brackets
import community.flock.wirespec.compiler.core.tokenize.types.Colon
Expand All @@ -19,116 +18,68 @@ import community.flock.wirespec.compiler.core.tokenize.types.RightCurly
import community.flock.wirespec.compiler.core.tokenize.types.StatusCode
import community.flock.wirespec.compiler.core.tokenize.types.WsEndpointDef
import community.flock.wirespec.compiler.core.tokenize.types.WsString
import io.kotest.matchers.collections.shouldNotBeEmpty
import io.kotest.matchers.collections.shouldNotContain
import io.kotest.matchers.shouldBe
import kotlin.test.Test

class TokenizeEndpointTest {

@Test
fun testStatusCodeTokenize() {
val source = """
000 099 100 199 200 299 300 399 400 499 500 599 600 699
""".trimIndent()

val expected = listOf(
Invalid, Invalid, Invalid, Invalid, Invalid, Invalid,
StatusCode, StatusCode, StatusCode, StatusCode, StatusCode,
StatusCode, StatusCode, StatusCode, StatusCode, StatusCode,
Invalid, Invalid, Invalid, Invalid, Invalid, Invalid,
EndOfProgram
)

WirespecSpec.tokenize(source).removeWhiteSpace()
.shouldNotBeEmpty()
.also { it.size shouldBe expected.size }
.onEachIndexed { index, token -> token.type shouldBe expected[index] }
}
fun testStatusCodeTokenize() = testTokenizer(
"""000 099 100 199 200 299 300 399 400 499 500 599 600 699""",
Invalid, Invalid, Invalid, Invalid, Invalid, Invalid,
StatusCode, StatusCode, StatusCode, StatusCode, StatusCode,
StatusCode, StatusCode, StatusCode, StatusCode, StatusCode,
Invalid, Invalid, Invalid, Invalid, Invalid, Invalid,
EndOfProgram,
noInvalid = false,
)

@Test
fun testEndpointTokenizer() {
val source = """
endpoint GetTodos GET /todos/{id: String} -> {
200 -> Todo[]
404 -> Error
}
""".trimIndent()

val expected = listOf(
WsEndpointDef, CustomType, Method, Path, ForwardSlash, LeftCurly, CustomValue, Colon, WsString,
RightCurly, Arrow, LeftCurly, StatusCode, Arrow, CustomType, Brackets, StatusCode, Arrow, CustomType,
RightCurly, EndOfProgram,
)

WirespecSpec.tokenize(source).removeWhiteSpace()
.shouldNotBeEmpty()
.also { it.size shouldBe expected.size }
.map { it.type }.shouldNotContain(Invalid)
.onEachIndexed { index, tokenType -> tokenType shouldBe expected[index] }
}
fun testEndpointTokenizer() = testTokenizer(
"""
|endpoint GetTodos GET /todos/{id: String} -> {
| 200 -> Todo[]
| 404 -> Error
|}
""".trimMargin(),
WsEndpointDef, CustomType, Method, Path, ForwardSlash, LeftCurly, CustomValue, Colon, WsString,
RightCurly, Arrow, LeftCurly, StatusCode, Arrow, CustomType, Brackets, StatusCode, Arrow, CustomType,
RightCurly, EndOfProgram,
)

@Test
fun testPOSTWithBodyTokenizer() {
val source = """
endpoint PostTodo Todo POST /todos -> {
200 -> Todo
}
""".trimIndent()

val expected = listOf(
WsEndpointDef, CustomType, CustomType, Method, Path, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, RightCurly, EndOfProgram,
)

WirespecSpec.tokenize(source).removeWhiteSpace()
.shouldNotBeEmpty()
.also { it.size shouldBe expected.size }
.map { it.type }.shouldNotContain(Invalid)
.onEachIndexed { index, tokenType -> tokenType shouldBe expected[index] }
}
fun testPOSTWithBodyTokenizer() = testTokenizer(
"""
|endpoint PostTodo Todo POST /todos -> {
| 200 -> Todo
|}
""".trimMargin(),
WsEndpointDef, CustomType, CustomType, Method, Path, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, RightCurly, EndOfProgram,
)

@Test
fun testQueryParamsTokenizer() {
val source = """
endpoint GetTodos GET /todos
?{name: String, date: String} -> {
200 -> Todo[]
}
""".trimIndent()

val expected = listOf(
WsEndpointDef, CustomType, Method, Path, QuestionMark, LeftCurly, CustomValue, Colon,
WsString, Comma, CustomValue, Colon, WsString, RightCurly, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, Brackets, RightCurly, EndOfProgram,
)

WirespecSpec.tokenize(source).removeWhiteSpace()
.shouldNotBeEmpty()
.also { it.size shouldBe expected.size }
.map { it.type }.shouldNotContain(Invalid)
.onEachIndexed { index, tokenType -> tokenType shouldBe expected[index] }
}
fun testQueryParamsTokenizer() = testTokenizer(
"""
|endpoint GetTodos GET /todos
|?{name: String, date: String} -> {
| 200 -> Todo[]
|}
""".trimMargin(),
WsEndpointDef, CustomType, Method, Path, QuestionMark, LeftCurly, CustomValue, Colon,
WsString, Comma, CustomValue, Colon, WsString, RightCurly, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, Brackets, RightCurly, EndOfProgram,
)

@Test
fun testHeadersTokenizer() {
val source = """
endpoint GetTodos GET /todos
#{version: String, accept: String} -> {
200 -> Todo[]
}
""".trimIndent()

val expected = listOf(
WsEndpointDef, CustomType, Method, Path, Hash, LeftCurly, CustomValue, Colon,
WsString, Comma, CustomValue, Colon, WsString, RightCurly, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, Brackets, RightCurly, EndOfProgram,
)

WirespecSpec.tokenize(source).removeWhiteSpace()
.shouldNotBeEmpty()
.also { it.size shouldBe expected.size }
.map { it.type }.shouldNotContain(Invalid)
.onEachIndexed { index, tokenType -> tokenType shouldBe expected[index] }
}
fun testHeadersTokenizer() = testTokenizer(
"""
|endpoint GetTodos GET /todos
|#{version: String, accept: String} -> {
| 200 -> Todo[]
|}
""".trimMargin(),
WsEndpointDef, CustomType, Method, Path, Hash, LeftCurly, CustomValue, Colon,
WsString, Comma, CustomValue, Colon, WsString, RightCurly, Arrow, LeftCurly,
StatusCode, Arrow, CustomType, Brackets, RightCurly, EndOfProgram,
)
}
Loading

0 comments on commit 99d915f

Please sign in to comment.