diff --git a/README.md b/README.md index 5116332..9dfc7a2 100644 --- a/README.md +++ b/README.md @@ -216,6 +216,7 @@ feeds: ### Example Configuration File ```yaml +showEmptyFields: true feeds: - name: "application-1-logs" commands: @@ -229,6 +230,7 @@ feeds: - "DEBUG" excludeFields: - "thread_name" + showEmptyFields: false - name: "application-2-logs" commands: - cat log2.txt @@ -288,6 +290,12 @@ json-log-viewer --config-file json-log-viewer.yml json-log-viewer --timestamp-field time ``` +- **--show-empty-fields**: Display fields with empty values (null, empty strings, etc.) in output. + ```bash + cat log.txt | json-log-viewer --show-empty-fields + ``` + + #### Field Name Options You can override the default field names to work with non-standard log formats: diff --git a/frontend-laminar/src/main/scala/ru/d10xa/jsonlogviewer/ViewElement.scala b/frontend-laminar/src/main/scala/ru/d10xa/jsonlogviewer/ViewElement.scala index adec23b..4df7af6 100644 --- a/frontend-laminar/src/main/scala/ru/d10xa/jsonlogviewer/ViewElement.scala +++ b/frontend-laminar/src/main/scala/ru/d10xa/jsonlogviewer/ViewElement.scala @@ -26,6 +26,7 @@ object ViewElement { def makeConfigYamlForInlineInput(string: String, config: Config): ConfigYaml = ConfigYaml( + showEmptyFields = None, fieldNames = None, feeds = Some( List( @@ -38,7 +39,8 @@ object ViewElement { rawInclude = None, rawExclude = None, excludeFields = None, - fieldNames = None + fieldNames = None, + showEmptyFields = None ) ) ) diff --git a/json-log-viewer/jvm/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYamlLoaderImpl.scala b/json-log-viewer/jvm/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYamlLoaderImpl.scala index 6d265c9..064fbbe 100644 --- a/json-log-viewer/jvm/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYamlLoaderImpl.scala +++ b/json-log-viewer/jvm/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYamlLoaderImpl.scala @@ -164,6 +164,22 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader { case None => Validated.valid(None) } + private def parseOptionalBoolean( + fields: Map[String, Json], + fieldName: String + ): ValidatedNel[String, Option[Boolean]] = + fields.get(fieldName) match { + case Some(jsonValue) => + jsonValue + .as[Boolean] + .leftMap(_ => + s"Invalid '$fieldName' field format, should be a boolean" + ) + .toValidatedNel + .map(Some(_)) + case None => Validated.valid(None) + } + private def parseFeed(feedJson: Json): ValidatedNel[String, Feed] = feedJson.asObject.map(_.toMap) match { case None => Validated.invalidNel("Feed entry is not a valid JSON object") @@ -177,7 +193,7 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader { parseOptionalString(feedFields, "inlineInput") val filterValidated = parseOptionalQueryAST(feedFields, "filter") val formatInValidated - : Validated[NonEmptyList[String], Option[FormatIn]] = + : Validated[NonEmptyList[String], Option[FormatIn]] = parseOptionalFormatIn(feedFields, "formatIn") val fieldNamesValidated = parseOptionalFieldNames(feedFields, "fieldNames") @@ -190,6 +206,9 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader { feedFields, "excludeFields" ) + val showEmptyFieldsValidated = + parseOptionalBoolean(feedFields, "showEmptyFields") + ( nameValidated, commandsValidated, @@ -199,7 +218,8 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader { fieldNamesValidated, rawIncludeValidated, rawExcludeValidated, - excludeFieldsValidated + excludeFieldsValidated, + showEmptyFieldsValidated ) .mapN(Feed.apply) } @@ -223,7 +243,10 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader { parseOptionalFeeds(fields, "feeds") val fieldNamesValidated = parseOptionalFieldNames(fields, "fieldNames") - (fieldNamesValidated, feedsValidated).mapN(ConfigYaml.apply) + val showEmptyFieldsValidated = + parseOptionalBoolean(fields, "showEmptyFields") + + (fieldNamesValidated, feedsValidated, showEmptyFieldsValidated).mapN(ConfigYaml.apply) } } } diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/config/ResolvedConfig.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/config/ResolvedConfig.scala index 5699a57..a63eff0 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/config/ResolvedConfig.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/config/ResolvedConfig.scala @@ -37,7 +37,8 @@ final case class ResolvedConfig( timestampBefore: Option[ZonedDateTime], // Other settings - grep: List[ConfigGrep] + grep: List[ConfigGrep], + showEmptyFields: Boolean ) /** Resolves configuration by merging global and feed-specific settings into a @@ -75,7 +76,9 @@ object ConfigResolver { // For each feed, merge its field names with global field names val feedFieldNames = mergeFieldNames(globalFieldNames, feed.fieldNames) - + val feedShowEmptyFields = feed.showEmptyFields + .orElse(yaml.showEmptyFields) + .getOrElse(config.showEmptyFields) ResolvedConfig( feedName = feed.name, commands = feed.commands, @@ -89,7 +92,8 @@ object ConfigResolver { excludeFields = feed.excludeFields, timestampAfter = config.timestamp.after, timestampBefore = config.timestamp.before, - grep = config.grep + grep = config.grep, + showEmptyFields = feedShowEmptyFields ) } case _ => @@ -108,7 +112,8 @@ object ConfigResolver { excludeFields = None, timestampAfter = config.timestamp.after, timestampBefore = config.timestamp.before, - grep = config.grep + grep = config.grep, + showEmptyFields = config.showEmptyFields ) ) } @@ -128,7 +133,8 @@ object ConfigResolver { excludeFields = None, timestampAfter = config.timestamp.after, timestampBefore = config.timestamp.before, - grep = config.grep + grep = config.grep, + showEmptyFields = config.showEmptyFields ) ) } diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/Config.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/Config.scala index 8f9b500..20b289f 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/Config.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/Config.scala @@ -12,7 +12,8 @@ final case class Config( grep: List[ConfigGrep], filter: Option[QueryAST], formatIn: Option[Config.FormatIn], - formatOut: Option[Config.FormatOut] + formatOut: Option[Config.FormatOut], + showEmptyFields: Boolean ) object Config: diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/DeclineOpts.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/DeclineOpts.scala index 6fe81e1..84d1cf5 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/DeclineOpts.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/DeclineOpts.scala @@ -114,6 +114,10 @@ object DeclineOpts { .map(ConfigFile.apply) .orNone + val showEmptyFields: Opts[Boolean] = Opts + .flag("show-empty-fields", help = "Show fields with empty values in output") + .orFalse + val config: Opts[Config] = ( configFile, @@ -122,17 +126,19 @@ object DeclineOpts { grepConfig, filterConfig, formatIn, - formatOut + formatOut, + showEmptyFields ).mapN { case ( - configFile, - fieldNamesConfig, - timestampConfig, - grepConfig, - filterConfig, - formatIn, - formatOut - ) => + configFile, + fieldNamesConfig, + timestampConfig, + grepConfig, + filterConfig, + formatIn, + formatOut, + showEmptyFields + ) => Config( configFile = configFile, fieldNames = fieldNamesConfig, @@ -140,7 +146,8 @@ object DeclineOpts { grep = grepConfig, filter = filterConfig, formatIn = formatIn, - formatOut = formatOut + formatOut = formatOut, + showEmptyFields = showEmptyFields ) } diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYaml.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYaml.scala index 1ecb219..fbb3f00 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYaml.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/ConfigYaml.scala @@ -2,8 +2,9 @@ package ru.d10xa.jsonlogviewer.decline.yaml case class ConfigYaml( fieldNames: Option[FieldNames], - feeds: Option[List[Feed]] + feeds: Option[List[Feed]], + showEmptyFields: Option[Boolean] ) object ConfigYaml: - val empty: ConfigYaml = ConfigYaml(None, None) + val empty: ConfigYaml = ConfigYaml(None, None, None) diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/Feed.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/Feed.scala index 02b19f4..ce42164 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/Feed.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/decline/yaml/Feed.scala @@ -12,5 +12,6 @@ case class Feed( fieldNames: Option[FieldNames], rawInclude: Option[List[String]], rawExclude: Option[List[String]], - excludeFields: Option[List[String]] + excludeFields: Option[List[String]], + showEmptyFields: Option[Boolean] ) diff --git a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatter.scala b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatter.scala index 712214a..23e4c1b 100644 --- a/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatter.scala +++ b/json-log-viewer/shared/src/main/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatter.scala @@ -89,9 +89,13 @@ class ColorLineFormatter( otherAttributes: Map[String, String], needNewLine: Boolean ): Seq[Str] = - val filteredAttributes = otherAttributes.filterNot { case (key, _) => - shouldExcludeField(key) - } + val filteredAttributes = otherAttributes + .filterNot { case (key, _) => + shouldExcludeField(key) + } + .filterNot { case (_, value) => + !config.showEmptyFields && isEmptyValue(value) + } filteredAttributes match case m if m.isEmpty => Nil @@ -110,6 +114,9 @@ class ColorLineFormatter( ) (if (needNewLine) strNewLine else strEmpty) :: s :: Nil + private def isEmptyValue(value: String): Boolean = + value.trim.isEmpty || value == "null" || value == "\"\"" || value == "{}" || value == "[]" + def strPrefix(s: Option[String]): Seq[Str] = if (shouldExcludeField("prefix")) Nil else diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamIntegrationTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamIntegrationTest.scala index 3b1ca4e..f36cf97 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamIntegrationTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamIntegrationTest.scala @@ -40,13 +40,15 @@ class LogViewerStreamIntegrationTest extends CatsEffectSuite { grep = List.empty, filter = None, formatIn = Some(Config.FormatIn.Json), - formatOut = Some(Config.FormatOut.Raw) + formatOut = Some(Config.FormatOut.Raw), + showEmptyFields = false ) test("config filters should update during live reload") { // Create config with INFO filter val infoFilter = QueryCompiler("level = 'INFO'").toOption val initialConfig = ConfigYaml( + showEmptyFields = None, fieldNames = None, feeds = Some( List( @@ -59,7 +61,8 @@ class LogViewerStreamIntegrationTest extends CatsEffectSuite { fieldNames = None, rawInclude = None, rawExclude = None, - excludeFields = None + excludeFields = None, + showEmptyFields = None ) ) ) @@ -155,6 +158,7 @@ class LogViewerStreamIntegrationTest extends CatsEffectSuite { test("field mappings should update during live reload") { // Initial configuration with standard field names val initialConfig = ConfigYaml( + showEmptyFields = None, fieldNames = None, feeds = Some( List( @@ -167,7 +171,8 @@ class LogViewerStreamIntegrationTest extends CatsEffectSuite { fieldNames = None, rawInclude = None, rawExclude = None, - excludeFields = None + excludeFields = None, + showEmptyFields = None ) ) ) @@ -180,6 +185,7 @@ class LogViewerStreamIntegrationTest extends CatsEffectSuite { // Updated config with custom field names mapping val updatedConfig = ConfigYaml( + showEmptyFields = None, fieldNames = Some( FieldNames( timestamp = Some("ts"), diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamLiveReloadTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamLiveReloadTest.scala index 2fe8ce8..9e298ec 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamLiveReloadTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/LogViewerStreamLiveReloadTest.scala @@ -13,15 +13,17 @@ import ru.d10xa.jsonlogviewer.decline.FieldNamesConfig import ru.d10xa.jsonlogviewer.decline.TimestampConfig import ru.d10xa.jsonlogviewer.query.QueryCompiler -/** - * Tests that verify LogViewerStream's live config reload functionality - */ +/** Tests that verify LogViewerStream's live config reload functionality + */ class LogViewerStreamLiveReloadTest extends CatsEffectSuite { // Test logs - val infoLog = """{"@timestamp":"2023-01-01T10:00:00Z","level":"INFO","message":"Test message","logger_name":"TestLogger","thread_name":"main"}""" - val errorLog = """{"@timestamp":"2023-01-01T12:00:00Z","level":"ERROR","message":"Error message","logger_name":"TestLogger","thread_name":"main"}""" - val customFormatLog = """{"ts":"2023-01-01T12:00:00Z","severity":"ERROR","msg":"Test message","logger_name":"TestLogger","thread_name":"main"}""" + val infoLog = + """{"@timestamp":"2023-01-01T10:00:00Z","level":"INFO","message":"Test message","logger_name":"TestLogger","thread_name":"main"}""" + val errorLog = + """{"@timestamp":"2023-01-01T12:00:00Z","level":"ERROR","message":"Error message","logger_name":"TestLogger","thread_name":"main"}""" + val customFormatLog = + """{"ts":"2023-01-01T12:00:00Z","severity":"ERROR","msg":"Test message","logger_name":"TestLogger","thread_name":"main"}""" val baseConfig = Config( configFile = None, @@ -37,7 +39,8 @@ class LogViewerStreamLiveReloadTest extends CatsEffectSuite { grep = List.empty, filter = None, formatIn = Some(Config.FormatIn.Json), - formatOut = Some(Config.FormatOut.Raw) + formatOut = Some(Config.FormatOut.Raw), + showEmptyFields = false ) test("live reload should update filters during execution") { @@ -45,17 +48,23 @@ class LogViewerStreamLiveReloadTest extends CatsEffectSuite { val infoFilter = QueryCompiler("level = 'INFO'").toOption val initialConfig = ConfigYaml( fieldNames = None, - feeds = Some(List(Feed( - name = Some("test-feed"), - commands = List.empty, - inlineInput = None, - filter = infoFilter, - formatIn = None, - fieldNames = None, - rawInclude = None, - rawExclude = None, - excludeFields = None - ))) + showEmptyFields = None, + feeds = Some( + List( + Feed( + name = Some("test-feed"), + commands = List.empty, + inlineInput = None, + filter = infoFilter, + formatIn = None, + fieldNames = None, + rawInclude = None, + rawExclude = None, + excludeFields = None, + showEmptyFields = None + ) + ) + ) ) // Updated config with ERROR filter @@ -69,53 +78,83 @@ class LogViewerStreamLiveReloadTest extends CatsEffectSuite { configRef <- Ref.of[IO, Option[ConfigYaml]](Some(initialConfig)) // Process logs with initial config (INFO filter) - infoResults1 <- LogViewerStream.processLineWithRef(infoLog, configRef, baseConfig).compile.toList - errorResults1 <- LogViewerStream.processLineWithRef(errorLog, configRef, baseConfig).compile.toList + infoResults1 <- LogViewerStream + .processLineWithRef(infoLog, configRef, baseConfig) + .compile + .toList + errorResults1 <- LogViewerStream + .processLineWithRef(errorLog, configRef, baseConfig) + .compile + .toList // Update config to use ERROR filter _ <- configRef.set(Some(updatedConfig)) // Process logs with updated config (ERROR filter) - infoResults2 <- LogViewerStream.processLineWithRef(infoLog, configRef, baseConfig).compile.toList - errorResults2 <- LogViewerStream.processLineWithRef(errorLog, configRef, baseConfig).compile.toList + infoResults2 <- LogViewerStream + .processLineWithRef(infoLog, configRef, baseConfig) + .compile + .toList + errorResults2 <- LogViewerStream + .processLineWithRef(errorLog, configRef, baseConfig) + .compile + .toList } yield { // With initial config (INFO filter): assert(infoResults1.nonEmpty, "INFO log should pass initial INFO filter") - assert(errorResults1.isEmpty, "ERROR log should not pass initial INFO filter") + assert( + errorResults1.isEmpty, + "ERROR log should not pass initial INFO filter" + ) // With updated config (ERROR filter): - assert(infoResults2.isEmpty, "INFO log should not pass updated ERROR filter") - assert(errorResults2.nonEmpty, "ERROR log should pass updated ERROR filter") + assert( + infoResults2.isEmpty, + "INFO log should not pass updated ERROR filter" + ) + assert( + errorResults2.nonEmpty, + "ERROR log should pass updated ERROR filter" + ) } } test("live reload should update field names mapping") { // Initial config with standard field names val initialConfig = ConfigYaml( + showEmptyFields = None, fieldNames = None, - feeds = Some(List(Feed( - name = Some("test-feed"), - commands = List.empty, - inlineInput = None, - filter = None, - formatIn = None, - fieldNames = None, - rawInclude = None, - rawExclude = None, - excludeFields = None - ))) + feeds = Some( + List( + Feed( + name = Some("test-feed"), + commands = List.empty, + inlineInput = None, + filter = None, + formatIn = None, + fieldNames = None, + rawInclude = None, + rawExclude = None, + excludeFields = None, + showEmptyFields = None + ) + ) + ) ) // Updated config with custom field names val customFieldConfig = ConfigYaml( - fieldNames = Some(FieldNames( - timestamp = Some("ts"), - level = Some("severity"), - message = Some("msg"), - stackTrace = None, - loggerName = None, - threadName = None - )), + showEmptyFields = None, + fieldNames = Some( + FieldNames( + timestamp = Some("ts"), + level = Some("severity"), + message = Some("msg"), + stackTrace = None, + loggerName = None, + threadName = None + ) + ), feeds = initialConfig.feeds ) @@ -131,28 +170,63 @@ class LogViewerStreamLiveReloadTest extends CatsEffectSuite { // With initial config: // Standard log with level=ERROR should pass level filter - standardLevelResults <- LogViewerStream.processLineWithRef(errorLog, configRef, configWithLevelFilter).compile.toList + standardLevelResults <- LogViewerStream + .processLineWithRef(errorLog, configRef, configWithLevelFilter) + .compile + .toList // Custom log with severity=ERROR should not pass level filter (field not recognized) - customLevelResults <- LogViewerStream.processLineWithRef(customFormatLog, configRef, configWithLevelFilter).compile.toList + customLevelResults <- LogViewerStream + .processLineWithRef(customFormatLog, configRef, configWithLevelFilter) + .compile + .toList // Custom log with severity=ERROR should not pass severity filter (field not mapped) - customSeverityResults1 <- LogViewerStream.processLineWithRef(customFormatLog, configRef, configWithSeverityFilter).compile.toList + customSeverityResults1 <- LogViewerStream + .processLineWithRef( + customFormatLog, + configRef, + configWithSeverityFilter + ) + .compile + .toList // Update config to map custom field names _ <- configRef.set(Some(customFieldConfig)) // With updated config: // Standard log with level=ERROR should still pass level filter - standardLevelResults2 <- LogViewerStream.processLineWithRef(errorLog, configRef, configWithLevelFilter).compile.toList + standardLevelResults2 <- LogViewerStream + .processLineWithRef(errorLog, configRef, configWithLevelFilter) + .compile + .toList // Custom log should now pass severity filter (field properly mapped) - customSeverityResults2 <- LogViewerStream.processLineWithRef(customFormatLog, configRef, configWithSeverityFilter).compile.toList + customSeverityResults2 <- LogViewerStream + .processLineWithRef( + customFormatLog, + configRef, + configWithSeverityFilter + ) + .compile + .toList } yield { // Before field mapping update: - assert(standardLevelResults.nonEmpty, "Standard log should pass level filter with initial config") - assert(customLevelResults.isEmpty, "Custom log should not pass level filter with initial config") + assert( + standardLevelResults.nonEmpty, + "Standard log should pass level filter with initial config" + ) + assert( + customLevelResults.isEmpty, + "Custom log should not pass level filter with initial config" + ) // After field mapping update: - assert(standardLevelResults2.nonEmpty, "Standard log should still pass level filter after update") - assert(customSeverityResults2.nonEmpty, "Custom log should pass severity filter after field mapping update") + assert( + standardLevelResults2.nonEmpty, + "Standard log should still pass level filter after update" + ) + assert( + customSeverityResults2.nonEmpty, + "Custom log should pass severity filter after field mapping update" + ) } } -} \ No newline at end of file +} diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysGetByKeyTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysGetByKeyTest.scala index e6c320d..bf924d1 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysGetByKeyTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysGetByKeyTest.scala @@ -26,7 +26,8 @@ class ParseResultKeysGetByKeyTest extends FunSuite { excludeFields = None, timestampAfter = None, timestampBefore = None, - grep = List.empty + grep = List.empty, + showEmptyFields = false, ) private val parseResult = ParseResult( diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysTest.scala index b93f691..f4f8744 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/ParseResultKeysTest.scala @@ -29,7 +29,8 @@ class ParseResultKeysTest extends FunSuite { excludeFields = None, timestampAfter = None, timestampBefore = None, - grep = List.empty + grep = List.empty, + showEmptyFields = false ) val customConfig = ResolvedConfig( @@ -52,7 +53,8 @@ class ParseResultKeysTest extends FunSuite { excludeFields = None, timestampAfter = None, timestampBefore = None, - grep = List.empty + grep = List.empty, + showEmptyFields = false ) val parseResult = ParseResult( diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/config/ConfigResolverTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/config/ConfigResolverTest.scala index deeba91..e466a93 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/config/ConfigResolverTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/config/ConfigResolverTest.scala @@ -24,7 +24,8 @@ class ConfigResolverTest extends FunSuite { grep = List.empty, filter = None, formatIn = None, - formatOut = None + formatOut = None, + showEmptyFields = false ) test("resolve returns single config when no ConfigYaml provided") { @@ -43,6 +44,7 @@ class ConfigResolverTest extends FunSuite { test("resolve merges global fieldNames from ConfigYaml") { val configYaml = ConfigYaml( + showEmptyFields = None, fieldNames = Some( FieldNames( timestamp = Some("ts"), @@ -72,6 +74,7 @@ class ConfigResolverTest extends FunSuite { test("resolve correctly handles multiple feeds") { val configYaml = ConfigYaml( + showEmptyFields = None, fieldNames = Some( FieldNames( timestamp = Some("ts"), @@ -102,7 +105,8 @@ class ConfigResolverTest extends FunSuite { ), rawInclude = None, rawExclude = None, - excludeFields = None + excludeFields = None, + showEmptyFields = None ), Feed( name = Some("feed2"), @@ -113,7 +117,8 @@ class ConfigResolverTest extends FunSuite { fieldNames = None, rawInclude = None, rawExclude = None, - excludeFields = None + excludeFields = None, + showEmptyFields = None ) ) ) diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvLogLineParserTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvLogLineParserTest.scala index 6a7c6e7..42e3ea9 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvLogLineParserTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvLogLineParserTest.scala @@ -26,7 +26,8 @@ class CsvLogLineParserTest extends FunSuite { excludeFields = None, timestampAfter = None, timestampBefore = None, - grep = List.empty + grep = List.empty, + showEmptyFields = false ) test("parse CSV log line with standard headers") { diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvProcessingTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvProcessingTest.scala index 553721f..955a21d 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvProcessingTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/csv/CsvProcessingTest.scala @@ -33,7 +33,8 @@ class CsvProcessingTest extends CatsEffectSuite { grep = List.empty, filter = None, formatIn = Some(Config.FormatIn.Csv), - formatOut = Some(Config.FormatOut.Raw) + formatOut = Some(Config.FormatOut.Raw), + showEmptyFields = false ) for { @@ -84,7 +85,8 @@ class CsvProcessingTest extends CatsEffectSuite { grep = List.empty, filter = None, formatIn = Some(Config.FormatIn.Csv), - formatOut = Some(Config.FormatOut.Raw) + formatOut = Some(Config.FormatOut.Raw), + showEmptyFields = false ) for { diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatterTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatterTest.scala new file mode 100644 index 0000000..b9f4563 --- /dev/null +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/formatout/ColorLineFormatterTest.scala @@ -0,0 +1,122 @@ +package ru.d10xa.jsonlogviewer.formatout + +import munit.FunSuite +import ru.d10xa.jsonlogviewer.config.ResolvedConfig +import ru.d10xa.jsonlogviewer.decline.FieldNamesConfig +import ru.d10xa.jsonlogviewer.ParseResult +import ru.d10xa.jsonlogviewer.ParsedLine + +class ColorLineFormatterTest extends FunSuite { + + val standardConfig: ResolvedConfig = ResolvedConfig( + feedName = None, + commands = List.empty, + inlineInput = None, + filter = None, + formatIn = None, + formatOut = None, + fieldNames = FieldNamesConfig( + timestampFieldName = "@timestamp", + levelFieldName = "level", + messageFieldName = "message", + stackTraceFieldName = "stack_trace", + loggerNameFieldName = "logger_name", + threadNameFieldName = "thread_name" + ), + rawInclude = None, + rawExclude = None, + excludeFields = None, + timestampAfter = None, + timestampBefore = None, + grep = List.empty, + showEmptyFields = false + ) + + val configWithEmptyFields: ResolvedConfig = standardConfig.copy( + showEmptyFields = true + ) + + test("shouldHideEmptyFields when showEmptyFields=false") { + val formatter = new ColorLineFormatter(standardConfig, None, None) + + val parseResult = ParseResult( + raw = "raw log line", + parsed = Some( + ParsedLine( + timestamp = Some("2023-01-01T12:00:00Z"), + level = Some("INFO"), + message = Some("Test message"), + stackTrace = None, + loggerName = Some("TestLogger"), + threadName = Some("main"), + otherAttributes = Map( + "empty_string" -> "", + "null_value" -> "null", + "normal_value" -> "test", + "empty_object" -> "{}", + "empty_array" -> "[]" + ) + ) + ), + middle = "middle part", + prefix = None, + postfix = None + ) + + val formatted = formatter.formatLine(parseResult).plainText + + assert(formatted.contains("normal_value"), "Normal value should be present") + assert( + !formatted.contains("empty_string"), + "Empty string should not be present" + ) + assert( + !formatted.contains("null_value"), + "Null value should not be present" + ) + assert( + !formatted.contains("empty_object"), + "Empty object should not be present" + ) + assert( + !formatted.contains("empty_array"), + "Empty array should not be present" + ) + } + + test("shouldShowEmptyFields when showEmptyFields=true") { + val formatter = new ColorLineFormatter(configWithEmptyFields, None, None) + + val parseResult = ParseResult( + raw = "raw log line", + parsed = Some( + ParsedLine( + timestamp = Some("2023-01-01T12:00:00Z"), + level = Some("INFO"), + message = Some("Test message"), + stackTrace = None, + loggerName = Some("TestLogger"), + threadName = Some("main"), + otherAttributes = Map( + "empty_string" -> "", + "null_value" -> "null", + "normal_value" -> "test", + "empty_object" -> "{}", + "empty_array" -> "[]" + ) + ) + ), + middle = "middle part", + prefix = None, + postfix = None + ) + + val formatted = formatter.formatLine(parseResult).plainText + + assert(formatted.contains("normal_value"), "Normal value should be present") + assert(formatted.contains("empty_string"), "Empty string should be present") + assert(formatted.contains("null_value"), "Null value should be present") + assert(formatted.contains("empty_object"), "Empty object should be present") + assert(formatted.contains("empty_array"), "Empty array should be present") + } +} diff --git a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/query/LogLineQueryPredicateImplTest.scala b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/query/LogLineQueryPredicateImplTest.scala index a724de4..638eaa2 100644 --- a/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/query/LogLineQueryPredicateImplTest.scala +++ b/json-log-viewer/shared/src/test/scala/ru/d10xa/jsonlogviewer/query/LogLineQueryPredicateImplTest.scala @@ -83,6 +83,7 @@ class LogLineQueryPredicateImplTest extends munit.FunSuite { ) private val resolvedConfig: ResolvedConfig = ResolvedConfig( + showEmptyFields = false, feedName = None, commands = List.empty, inlineInput = None,