@@ -555,13 +555,16 @@ by `SQLContext`.
555555
556556For example:
557557{% highlight java %}
558- // Import factory methods provided by DataType.
559- import org.apache.spark.sql.types.DataType;
558+ import org.apache.spark.api.java.function.Function;
559+ // Import factory methods provided by DataTypes.
560+ import org.apache.spark.sql.types.DataTypes;
560561// Import StructType and StructField
561562import org.apache.spark.sql.types.StructType;
562563import org.apache.spark.sql.types.StructField;
563564// Import Row.
564565import org.apache.spark.sql.Row;
566+ // Import RowFactory.
567+ import org.apache.spark.sql.RowFactory;
565568
566569// sc is an existing JavaSparkContext.
567570SQLContext sqlContext = new org.apache.spark.sql.SQLContext(sc);
@@ -575,16 +578,16 @@ String schemaString = "name age";
575578// Generate the schema based on the string of schema
576579List<StructField > fields = new ArrayList<StructField >();
577580for (String fieldName: schemaString.split(" ")) {
578- fields.add(DataType .createStructField(fieldName, DataType .StringType, true));
581+ fields.add(DataTypes .createStructField(fieldName, DataTypes .StringType, true));
579582}
580- StructType schema = DataType .createStructType(fields);
583+ StructType schema = DataTypes .createStructType(fields);
581584
582585// Convert records of the RDD (people) to Rows.
583586JavaRDD<Row > rowRDD = people.map(
584587 new Function<String, Row>() {
585588 public Row call(String record) throws Exception {
586589 String[ ] fields = record.split(",");
587- return Row .create(fields[ 0] , fields[ 1] .trim());
590+ return RowFactory .create(fields[ 0] , fields[ 1] .trim());
588591 }
589592 });
590593
0 commit comments