Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -658,6 +658,8 @@ public class JsonBuilder internal constructor(json: Json) {
*/
public var useArrayPolymorphism: Boolean = json.configuration.useArrayPolymorphism

public var allowPrimitiveCoercion: Boolean = json.configuration.allowPrimitiveCoercion

/**
* Module with contextual and polymorphic serializers to be used in the resulting [Json] instance.
*
Expand Down Expand Up @@ -695,7 +697,8 @@ public class JsonBuilder internal constructor(json: Json) {
allowStructuredMapKeys, prettyPrint, explicitNulls, prettyPrintIndent,
coerceInputValues, useArrayPolymorphism,
classDiscriminator, allowSpecialFloatingPointValues, useAlternativeNames,
namingStrategy, decodeEnumsCaseInsensitive, allowTrailingComma, allowComments, classDiscriminatorMode
namingStrategy, decodeEnumsCaseInsensitive, allowTrailingComma, allowComments, classDiscriminatorMode,
allowPrimitiveCoercion
)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ public class JsonConfiguration @OptIn(ExperimentalSerializationApi::class) inter
level = DeprecationLevel.ERROR
)
public var classDiscriminatorMode: ClassDiscriminatorMode = ClassDiscriminatorMode.POLYMORPHIC,
@ExperimentalSerializationApi
public val allowPrimitiveCoercion: Boolean = true,
) {

/** @suppress Dokka **/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ internal open class StreamingJsonDecoder(
descriptor: SerialDescriptor,
discriminatorHolder: DiscriminatorHolder?
) : JsonDecoder, ChunkedDecoder, AbstractDecoder() {
private val coercePrimitives = json.configuration.allowPrimitiveCoercion

// A mutable reference to the discriminator that have to be skipped when in optimistic phase
// of polymorphic serialization, see `decodeSerializableValue`
Expand Down Expand Up @@ -273,47 +274,51 @@ internal open class StreamingJsonDecoder(
}

/*
* The primitives are allowed to be quoted and unquoted
* to simplify map key parsing and integrations with third-party API.
*/
* The primitives are allowed to be quoted and unquoted
* to simplify map key parsing and integrations with third-party API.
*/
override fun decodeBoolean(): Boolean {
return lexer.consumeBooleanLenient()
return if (coercePrimitives) {
lexer.consumeBooleanLenient()
} else {
lexer.consumeBoolean()
}
}

override fun decodeByte(): Byte {
val value = lexer.consumeNumericLiteral()
val value = lexer.consumeNumericLiteral(coercePrimitives)
// Check for overflow
if (value != value.toByte().toLong()) lexer.fail("Failed to parse byte for input '$value'")
return value.toByte()
}

override fun decodeShort(): Short {
val value = lexer.consumeNumericLiteral()
val value = lexer.consumeNumericLiteral(coercePrimitives)
// Check for overflow
if (value != value.toShort().toLong()) lexer.fail("Failed to parse short for input '$value'")
return value.toShort()
}

override fun decodeInt(): Int {
val value = lexer.consumeNumericLiteral()
val value = lexer.consumeNumericLiteral(coercePrimitives)
// Check for overflow
if (value != value.toInt().toLong()) lexer.fail("Failed to parse int for input '$value'")
return value.toInt()
}

override fun decodeLong(): Long {
return lexer.consumeNumericLiteral()
return lexer.consumeNumericLiteral(coercePrimitives)
}

override fun decodeFloat(): Float {
val result = lexer.parseString("float") { toFloat() }
val result = lexer.parseString("float", coercePrimitives) { toFloat() }
val specialFp = json.configuration.allowSpecialFloatingPointValues
if (specialFp || result.isFinite()) return result
lexer.throwInvalidFloatingPointDecoded(result)
}

override fun decodeDouble(): Double {
val result = lexer.parseString("double") { toDouble() }
val result = lexer.parseString("double", coercePrimitives) { toDouble() }
val specialFp = json.configuration.allowSpecialFloatingPointValues
if (specialFp || result.isFinite()) return result
lexer.throwInvalidFloatingPointDecoded(result)
Expand Down Expand Up @@ -374,15 +379,16 @@ internal class JsonDecoderForUnsignedTypes(
) : AbstractDecoder() {
override val serializersModule: SerializersModule = json.serializersModule
override fun decodeElementIndex(descriptor: SerialDescriptor): Int = error("unsupported")
private val coercePrimitves = json.configuration.allowPrimitiveCoercion

override fun decodeInt(): Int = lexer.parseString("UInt") { toUInt().toInt() }
override fun decodeLong(): Long = lexer.parseString("ULong") { toULong().toLong() }
override fun decodeByte(): Byte = lexer.parseString("UByte") { toUByte().toByte() }
override fun decodeShort(): Short = lexer.parseString("UShort") { toUShort().toShort() }
override fun decodeInt(): Int = lexer.parseString("UInt", coercePrimitves) { toUInt().toInt() }
override fun decodeLong(): Long = lexer.parseString("ULong", coercePrimitves) { toULong().toLong() }
override fun decodeByte(): Byte = lexer.parseString("UByte", coercePrimitves) { toUByte().toByte() }
override fun decodeShort(): Short = lexer.parseString("UShort", coercePrimitves) { toUShort().toShort() }
}

private inline fun <T> AbstractJsonLexer.parseString(expectedType: String, block: String.() -> T): T {
val input = consumeStringLenient()
private inline fun <T> AbstractJsonLexer.parseString(expectedType: String, coercePrimitives: Boolean, block: String.() -> T): T {
val input = consumeOther(allowQuoted = coercePrimitives)
try {
return input.block()
} catch (e: IllegalArgumentException) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ private sealed class AbstractJsonTreeDecoder(

private inline fun <T : Any> getPrimitiveValue(tag: String, primitiveName: String, convert: JsonPrimitive.() -> T?): T {
val literal = cast<JsonPrimitive>(currentElement(tag), primitiveName, tag)
if (!json.configuration.allowPrimitiveCoercion && literal.isString) {
unparsedPrimitive(literal, primitiveName, tag)
}
try {
return literal.convert() ?: unparsedPrimitive(literal, primitiveName, tag)
} catch (e: IllegalArgumentException) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -438,14 +438,18 @@ internal abstract class AbstractJsonLexer {
}

// Allows consuming unquoted string
fun consumeStringLenient(): String {
fun consumeStringLenient(): String =
consumeOther(allowQuoted = true)

fun consumeOther(allowQuoted: Boolean): String {
// TODO this string peeking stuff might break things...
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looking at the code around peekedString I'm pretty sure this is not actually a problem, because this branch would only ever be hit when attempting to read a string. Right?

if (peekedString != null) {
return takePeeked()
}
var current = skipWhitespaces()
if (current >= source.length || current == -1) fail("EOF", current)
val token = charToTokenClass(source[current])
if (token == TC_STRING) {
if (allowQuoted && token == TC_STRING) {
return consumeString()
}

Expand Down Expand Up @@ -587,15 +591,18 @@ internal abstract class AbstractJsonLexer {
throw JsonDecodingException(position, message + " at path: " + path.getPath() + hintMessage, source)
}

fun consumeNumericLiteral(): Long {
fun consumeNumericLiteral(): Long =
consumeNumericLiteral(coercePrimitives = true)

fun consumeNumericLiteral(coercePrimitives: Boolean): Long {
/*
* This is an optimized (~40% for numbers) version of consumeString().toLong()
* that doesn't allocate and also doesn't support any radix but 10
*/
var current = skipWhitespaces()
current = prefetchOrEof(current)
if (current >= source.length || current == -1) fail("EOF")
val hasQuotation = if (source[current] == STRING) {
val hasQuotation = if (coercePrimitives && source[current] == STRING) {
// Check it again
// not sure if should call ensureHaveChars() because threshold is far greater than chars count in MAX_LONG
if (++current == source.length) fail("EOF")
Expand Down