@@ -29,6 +29,8 @@ import TestSQLContext._
2929
3030class CsvSuite extends FunSuite {
3131 val carsFile = " src/test/resources/cars.csv"
32+ val carsTypedColumnsFile = " src/test/resources/cars-typed.csv"
33+ val carsTypedColumnsFailFile = " src/test/resources/cars-typed-fail.csv"
3234 val carsFile8859 = " src/test/resources/cars_iso-8859-1.csv"
3335 val carsTsvFile = " src/test/resources/cars.tsv"
3436 val carsAltFile = " src/test/resources/cars-alternative.csv"
@@ -159,6 +161,37 @@ class CsvSuite extends FunSuite {
159161 assert(results.size === numCars)
160162 }
161163
164+ test(" DSL test typed columns using sparkContext.csvFile" ) {
165+ val typedColumnsMap = Map (" price" -> DoubleType , " new" -> BooleanType )
166+
167+ val expectedColumnNamesAndTheirTypes =
168+ Array (" year" -> StringType .toString,
169+ " make" -> StringType .toString,
170+ " model" -> StringType .toString,
171+ " comment" -> StringType .toString,
172+ " price" -> DoubleType .toString,
173+ " new" -> BooleanType .toString,
174+ " blank" -> StringType .toString)
175+
176+ val results = TestSQLContext .csvFile(carsTypedColumnsFile, columnsTypeMap = typedColumnsMap)
177+ assume(results.dtypes containsSlice expectedColumnNamesAndTheirTypes)
178+ }
179+
180+ test(" DSL test typed values using sparkContext.csvFile" ) {
181+ val typedColumnsMap = Map (" price" -> DoubleType , " new" -> BooleanType )
182+
183+ val results = TestSQLContext .csvFile(carsTypedColumnsFile, columnsTypeMap = typedColumnsMap)
184+ assert(results.collect().map(_.getDouble(4 )) === Seq (90000.00d , 23000d , 40000.6767d ))
185+ assert(results.collect().map(_.getBoolean(5 )) === Seq (false , true , false ))
186+ }
187+
188+ test(" Expect parsing error with wrong type for FailFast mode using sparkContext.csvFile" ) {
189+ val typedColumnsMap = Map (" price" -> DoubleType , " new" -> BooleanType )
190+
191+ intercept[SparkException ] {
192+ TestSQLContext .csvFile(carsTypedColumnsFailFile, columnsTypeMap = typedColumnsMap, mode = " FAILFAST" ).collect()
193+ }
194+ }
162195
163196 test(" Expect parsing error with wrong delimiter setting using sparkContext.csvFile" ) {
164197 intercept[ org.apache.spark.sql.AnalysisException ] {
0 commit comments