blob: e705675f8c4ba822689ff6be62901b4182dc7c20 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.griffin.measure.execution.impl
import org.apache.commons.lang3.StringUtils
import org.apache.griffin.measure.configuration.dqdefinition.MeasureParam
import org.apache.griffin.measure.execution.Measure._
class SchemaConformanceMeasureTest extends MeasureTest {
var param: MeasureParam = _
final val SourceColStr: String = "source.col"
final val DataTypeStr: String = "type"
override def beforeAll(): Unit = {
super.beforeAll()
param = MeasureParam(
"param",
"SchemaConformance",
"source",
Map(Expression -> Seq(Map(SourceColStr -> "id", DataTypeStr -> "int"))))
}
"SchemaConformanceMeasure" should "validate expression config" in {
// Validations for SchemaConformance Expr
// Empty
assertThrows[AssertionError] {
SchemaConformanceMeasure(spark, param.copy(config = Map.empty[String, String]))
}
// Incorrect Type and Empty
assertThrows[AssertionError] {
SchemaConformanceMeasure(spark, param.copy(config = Map(Expression -> StringUtils.EMPTY)))
}
// Null
assertThrows[AssertionError] {
SchemaConformanceMeasure(spark, param.copy(config = Map(Expression -> null)))
}
// Incorrect Type
assertThrows[AssertionError] {
SchemaConformanceMeasure(spark, param.copy(config = Map(Expression -> "gender")))
}
// Correct Type and Empty
assertThrows[AssertionError] {
SchemaConformanceMeasure(
spark,
param.copy(config = Map(Expression -> Seq.empty[Map[String, String]])))
}
// Invalid Expr
assertThrows[AssertionError] {
SchemaConformanceMeasure(
spark,
param.copy(config = Map(Expression -> Seq(Map("a" -> "b")))))
}
// Invalid Expr as type is missing
assertThrows[AssertionError] {
SchemaConformanceMeasure(
spark,
param.copy(config = Map(Expression -> Seq(Map(SourceColStr -> "b")))))
}
// Invalid Expr as source.col is missing
assertThrows[AssertionError] {
SchemaConformanceMeasure(
spark,
param.copy(config = Map(Expression -> Seq(Map(DataTypeStr -> "b")))))
}
}
it should "support metric writing" in {
val measure = SchemaConformanceMeasure(spark, param)
assertResult(true)(measure.supportsMetricWrite)
}
it should "support record writing" in {
val measure = SchemaConformanceMeasure(spark, param)
assertResult(true)(measure.supportsRecordWrite)
}
it should "execute defined measure expr" in {
val measure = SchemaConformanceMeasure(spark, param)
val (recordsDf, metricsDf) = measure.execute(None)
assertResult(recordDfSchema)(recordsDf.schema)
assertResult(metricDfSchema)(metricsDf.schema)
assertResult(source.count())(recordsDf.count())
assertResult(1L)(metricsDf.count())
val metricMap = metricsDf
.head()
.getAs[Seq[Map[String, String]]](Metrics)
.map(x => x(MetricName) -> x(MetricValue))
.toMap
assertResult(metricMap(Total))("5")
assertResult(metricMap(measure.Complete))("5")
assertResult(metricMap(measure.InComplete))("0")
}
}