Best Kotest code snippet using io.kotest.runner.junit.platform.JUnitTestEngineListener.addChild
JUnitTestEngineListener.kt
Source:JUnitTestEngineListener.kt
...41 *42 * Top level descriptors must have a source attached or the execution will fail with a parent attached exception.43 * Type.CONTAINER_TEST doesn't seem to work as a top level descriptor, it will hang44 * leaf tests do not need to be completed but they will be marked as uncomplete in intellij.45 * Dynamic test can be called after or before addChild.46 * A Type.TEST can be a child of a Type.TEST.47 * Intermediate Type.CONTAINER seem to be ignored in output.48 * Intermediate containers can have same class source as parent.49 * Type.TEST as top level seems to hang.50 * A TEST doesn't seem to be able to have the same source as a parent, or hang.51 * A TEST seems to hang if it has a ClassSource.52 * MethodSource seems to be ok with a TEST.53 * Container test names seem to be taken from a Source.54 * Nested tests are outputted as siblings.55 * Can complete executions out of order.56 * Child failures will fail parent CONTAINER.57 * Sibling containers can start and finish in parallel.58 *59 * Intellij runner observations:60 *61 * Intermediate Type.CONTAINERs are shown.62 * Intermediate Type.TESTs are shown.63 * A Type.TEST can be a child of a Type.TEST64 * MethodSource seems to be ok with a TEST.65 * Container test names seem to be taken from the name property.66 * Nested tests are outputted as nested.67 * Child failures will not fail containing TEST.68 * child failures will fail a containing CONTAINER.69 * Call addChild _before_ registering test otherwise will appear in the display out of order.70 * Must start tests after their parent or they can go missing.71 * Sibling containers can start and finish in parallel.72 */73class JUnitTestEngineListener(74 private val listener: EngineExecutionListener,75 val root: EngineDescriptor,76) : AbstractTestEngineListener() {77 private val logger = Logger(JUnitTestEngineListener::class)78 private var formatter: DisplayNameFormatter = DefaultDisplayNameFormatter(ProjectConfiguration())79 // contains a mapping of junit TestDescriptor's, so we can find previously registered tests80 private val descriptors = mutableMapOf<Descriptor, TestDescriptor>()81 private var started = false82 private val startedTests = mutableSetOf<Descriptor.TestDescriptor>()83 // the root tests are our entry point when outputting results84 private val rootTests = mutableListOf<TestCase>()85 private var failOnIgnoredTests = false86 private val children = mutableMapOf<Descriptor, MutableList<TestCase>>()87 private val results = mutableMapOf<Descriptor, TestResult>()88 private val dummies = hashSetOf<String>()89 override suspend fun engineStarted() {90 logger.log { Pair(null, "Engine started") }91 listener.executionStarted(root)92 }93 override suspend fun engineInitialized(context: EngineContext) {94 failOnIgnoredTests = context.configuration.failOnIgnoredTests95 formatter = getDisplayNameFormatter(context.configuration.registry, context.configuration)96 }97 override suspend fun engineFinished(t: List<Throwable>) {98 logger.log { Pair(null, "Engine finished; throwables=[${t}]") }99 registerExceptionPlaceholders(t)100 val result = if (failOnIgnoredTests && results.values.any { it.isIgnored }) {101 TestExecutionResult.failed(RuntimeException("Build contained ignored test"))102 } else {103 TestExecutionResult.successful()104 }105 logger.log { Pair(null, "Notifying junit that engine completed $root") }106 listener.executionFinished(root, result)107 }108 override suspend fun specStarted(kclass: KClass<*>) {109 markSpecStarted(kclass)110 }111 override suspend fun specFinished(kclass: KClass<*>, result: TestResult) {112 val t = result.errorOrNull113 when {114 // if we have a spec error before we even started the spec, we will start the spec, add a placeholder115 // to hold the error, mark that test as failed, and then fail the spec as well116 t != null && !started -> {117 val descriptor = markSpecStarted(kclass)118 addPlaceholderTest(descriptor, t, kclass)119 logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }120 listener.executionFinished(descriptor, TestExecutionResult.failed(t))121 }122 // if we had an error in the spec, and we had no tests, we'll add the dummy and return123 t != null && rootTests.isEmpty() -> {124 val descriptor = getSpecDescriptor(kclass)125 addPlaceholderTest(descriptor, t, kclass)126 logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }127 listener.executionFinished(descriptor, TestExecutionResult.failed(t))128 }129 else -> {130 val descriptor = getSpecDescriptor(kclass)131 val result = when (t) {132 null -> TestExecutionResult.successful()133 else -> {134 addPlaceholderTest(descriptor, t, kclass)135 TestExecutionResult.successful()136 }137 }138 logger.log { Pair(kclass.bestName(), "executionFinished: $descriptor") }139 listener.executionFinished(descriptor, result)140 }141 }142 reset()143 }144 override suspend fun specIgnored(kclass: KClass<*>, reason: String?) {145 logger.log { Pair(kclass.bestName(), "Spec is being flagged as ignored") }146 listener.executionSkipped(getSpecDescriptor(kclass), reason)147 }148 private fun markSpecStarted(kclass: KClass<*>): TestDescriptor {149 return try {150 val descriptor = getSpecDescriptor(root, kclass.toDescriptor(), formatter.format(kclass))151 logger.log { Pair(kclass.bestName(), "Registering dynamic spec $descriptor") }152 listener.dynamicTestRegistered(descriptor)153 logger.log { Pair(kclass.bestName(), "Spec executionStarted $descriptor") }154 listener.executionStarted(descriptor)155 started = true156 descriptor157 } catch (t: Throwable) {158 logger.log { Pair(kclass.bestName(), "Error in JUnit Platform listener $t") }159 throw t160 }161 }162 private fun reset() {163 rootTests.clear()164 children.clear()165 results.clear()166 started = false167 descriptors.clear()168 startedTests.clear()169 }170 private fun addPlaceholderTest(parent: TestDescriptor, t: Throwable, kclass: KClass<*>) {171 val (name, cause) = ExtensionExceptionExtractor.resolve(t)172 val descriptor = createTestDescriptor(173 parent.uniqueId.append(Segment.Test.value, name),174 name,175 TestDescriptor.Type.TEST,176 ClassSource.from(kclass.java),177 false178 )179 parent.addChild(descriptor)180 listener.dynamicTestRegistered(descriptor)181 listener.executionStarted(descriptor)182 listener.executionFinished(descriptor, TestResult.Error(Duration.ZERO, cause).toTestExecutionResult())183 }184 override suspend fun testStarted(testCase: TestCase) {185 // depending on the test type, we may want to wait to notify junit, this is because gradle doesn't work186 // properly with the junit test types. Ideally, we'd just set everything to CONTAINER_AND_TEST, which is187 // supposed to mean a test can contain other tests as well as being a test itself, which is exactly how188 // Kotest views tests, but unfortunately it doesn't work properly.189 //190 // Another approach is to wait until the spec finishes to see which tests contain children and which191 // don't and set the test type appropriately, but junit doesn't give us a way to specify test duration192 // (instead it just calculates it itself from the time between marking a test as started and marking193 // it as finished), so this approach works but ends up having all tests as 0ms194 //195 // So the approach we will take is use the TestType from the test definition, unless it is dynamic,196 // then for dynamic we will calculate it later, and accept the 0ms drawback197 logger.log { Pair(testCase.name.testName, "test started") }198 if (testCase.parent != null) rootTests.add(testCase)199 addChild(testCase)200 when (testCase.type) {201 TestType.Container -> startTestIfNotStarted(testCase, TestDescriptor.Type.CONTAINER)202 TestType.Test -> startTestIfNotStarted(testCase, TestDescriptor.Type.TEST)203 TestType.Dynamic -> Unit204 }205 }206 // this test can be output now it has completed as we have all we need to know to complete it207 override suspend fun testFinished(testCase: TestCase, result: TestResult) {208 logger.log { Pair(testCase.name.testName, "test finished $result") }209 results[testCase.descriptor] = result210 val descriptor = getOrCreateTestDescriptor(testCase, null)211 // we need to ensure all parents have been started first212 startParents(testCase)213 startTestIfNotStarted(testCase, null)214 logger.log { Pair(testCase.name.testName, "executionFinished: $descriptor") }215 listener.executionFinished(descriptor, result.toTestExecutionResult())216 }217 override suspend fun testIgnored(testCase: TestCase, reason: String?) {218 logger.log { Pair(testCase.name.testName, "test ignored $reason") }219 if (testCase.parent == null) rootTests.add(testCase)220 addChild(testCase)221 results[testCase.descriptor] = TestResult.Ignored(reason)222 // we need to ensure all parents have been started first223 startParents(testCase)224 val descriptor = getOrCreateTestDescriptor(testCase, TestDescriptor.Type.TEST)225 logger.log { Pair(testCase.name.testName, "Registering dynamic test: $descriptor") }226 listener.dynamicTestRegistered(descriptor)227 logger.log { Pair(testCase.name.testName, "executionSkipped: $descriptor") }228 listener.executionSkipped(descriptor, reason)229 }230 private fun addChild(testCase: TestCase) {231 children.getOrPut(testCase.descriptor.parent) { mutableListOf() }.add(testCase)232 }233 private fun startParents(testCase: TestCase) {234 val parent = testCase.parent235 if (parent != null) {236 startParents(parent)237 startTestIfNotStarted(parent, null)238 }239 }240 private fun startTestIfNotStarted(testCase: TestCase, type: TestDescriptor.Type?) {241 if (!startedTests.contains(testCase.descriptor)) {242 val descriptor = getOrCreateTestDescriptor(testCase, type)243 logger.log { Pair(testCase.name.testName, "Registering dynamic test: $descriptor") }244 listener.dynamicTestRegistered(descriptor)245 logger.log { Pair(testCase.name.testName, "executionStarted: $descriptor") }246 listener.executionStarted(descriptor)247 startedTests.add(testCase.descriptor)248 }249 }250 private fun getOrCreateTestDescriptor(testCase: TestCase, type: TestDescriptor.Type?): TestDescriptor {251 val existing = descriptors[testCase.descriptor]252 if (existing != null) return existing253 val parent = when (val p = testCase.parent) {254 null -> getSpecDescriptor(testCase.spec::class)255 else -> getOrCreateTestDescriptor(p, null)256 }257 val id = parent.uniqueId.append(testCase.descriptor)258 // we dynamically work out the type if null by looking to see if this test had any children259 val c = children[testCase.descriptor]260 val t = when {261 type != null -> type262 c == null || c.isEmpty() -> TestDescriptor.Type.TEST263 else -> TestDescriptor.Type.CONTAINER264 }265 return createTestDescriptor(266 id,267 formatter.format(testCase),268 t,269 ClassSource.from(testCase.spec::class.java, null), // gradle-junit-platform hides tests if we don't send this270 type == TestDescriptor.Type.CONTAINER271 ).apply {272 parent.addChild(this)273 descriptors[testCase.descriptor] = this274 }275 }276 private fun getSpecDescriptor(kclass: KClass<*>): TestDescriptor {277 return getSpecDescriptor(root, kclass.toDescriptor(), formatter.format(kclass))278 }279 private fun createAndRegisterDummySpec(name: String): TestDescriptor {280 val unique = UniqueNames.unique(name, dummies) { s, k -> "${s}_$k" } ?: name281 dummies.add(unique)282 val descriptor = getSpecDescriptor(root, Descriptor.SpecDescriptor(DescriptorId(unique), this::class), unique)283 listener.dynamicTestRegistered(descriptor)284 return descriptor285 }286 private fun registerExceptionPlaceholders(ts: List<Throwable>) {...
addChild
Using AI Code Generation
1val listener = JUnitTestEngineListener()2val description = Description.createTestDescription("myClass", "myTest")3listener.executionStarted(description)4listener.executionFinished(description, TestExecutionResult.successful())5val listener = JUnitTestEngineListener()6val description = Description.createTestDescription("myClass", "myTest")7listener.executionStarted(description)8listener.executionFinished(description, TestExecutionResult.successful())9val listener = JUnitTestEngineListener()10val description = Description.createTestDescription("myClass", "myTest")11listener.executionStarted(description)12listener.executionFinished(description, TestExecutionResult.successful())13val listener = JUnitTestEngineListener()14val description = Description.createTestDescription("myClass", "myTest")15listener.executionStarted(description)16listener.executionFinished(description, TestExecutionResult.successful())17val listener = JUnitTestEngineListener()18val description = Description.createTestDescription("myClass", "myTest")19listener.executionStarted(description)20listener.executionFinished(description, TestExecutionResult.successful())21val listener = JUnitTestEngineListener()22val description = Description.createTestDescription("myClass", "myTest")23listener.executionStarted(description)24listener.executionFinished(description, TestExecutionResult.successful())25val listener = JUnitTestEngineListener()26val description = Description.createTestDescription("myClass", "myTest")27listener.executionStarted(description)28listener.executionFinished(description, TestExecutionResult.successful())29val listener = JUnitTestEngineListener()30val description = Description.createTestDescription("myClass", "myTest")31listener.executionStarted(description)32listener.executionFinished(description, TestExecutionResult.successful())33val listener = JUnitTestEngineListener()34val description = Description.createTestDescription("myClass", "myTest")
addChild
Using AI Code Generation
1private fun addChild(parent: TestDescriptor, child: TestDescriptor) {2 parent.addChild(child)3 if (child is CompositeTestDescriptor) {4 for (c in child.children) {5 addChild(child, c)6 }7 }8}9override fun execute(request: ExecutionRequest) {10 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)11 engineListener.executionStarted(rootTestDescriptor)12 rootTestDescriptor.execute(engineListener, emptyMap())13 engineListener.executionFinished(rootTestDescriptor, TestExecutionResult.successful())14}15override fun execute(request: ExecutionRequest) {16 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)17 engineListener.executionStarted(rootTestDescriptor)18 rootTestDescriptor.execute(engineListener, emptyMap())19 engineListener.executionFinished(rootTestDescriptor, TestExecutionResult.successful())20}21override fun execute(request: ExecutionRequest) {22 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)23 engineListener.executionStarted(rootTestDescriptor)24 rootTestDescriptor.execute(engineListener, emptyMap())25 engineListener.executionFinished(rootTestDescriptor, TestExecutionResult.successful())26}27override fun execute(request: ExecutionRequest) {28 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)29 engineListener.executionStarted(rootTestDescriptor)30 rootTestDescriptor.execute(engineListener, emptyMap())31 engineListener.executionFinished(rootTestDescriptor, TestExecutionResult.successful())32}33override fun execute(request: ExecutionRequest) {34 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)35 engineListener.executionStarted(rootTestDescriptor)36 rootTestDescriptor.execute(engineListener, emptyMap())37 engineListener.executionFinished(rootTestDescriptor, TestExecutionResult.successful())38}39override fun execute(request: ExecutionRequest) {40 val engineListener = JUnitTestEngineListener(request.engineExecutionListener)41 engineListener.executionStarted(rootTestDescriptor)42 rootTestDescriptor.execute(engineListener
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!