org.apache.spark.sql.Row.getInt()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(10.0k)|赞(0)|评价(0)|浏览(247)

本文整理了Java中org.apache.spark.sql.Row.getInt方法的一些代码示例,展示了Row.getInt的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Row.getInt方法的具体详情如下:
包路径:org.apache.spark.sql.Row
类名称:Row
方法名:getInt

Row.getInt介绍

暂无

代码示例

代码示例来源:origin: databricks/learning-spark

public Integer call(Row row) throws Exception {
  return row.getInt(0) * row.getInt(0);
 }
}

代码示例来源:origin: apache/phoenix

@Override
public int getInt(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 :  getCurrentRow().getInt(columnIndex-1);
}

代码示例来源:origin: org.apache.spark/spark-sql

@SuppressWarnings("unchecked")
@Test
public void udf2Test() {
 spark.udf().register("stringLengthTest",
   (String str1, String str2) -> str1.length() + str2.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@SuppressWarnings("unchecked")
@Test
public void udf2Test() {
 spark.udf().register("stringLengthTest",
   (String str1, String str2) -> str1.length() + str2.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.10

@SuppressWarnings("unchecked")
@Test
public void udf2Test() {
 spark.udf().register("stringLengthTest",
   (String str1, String str2) -> str1.length() + str2.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@SuppressWarnings("unchecked")
@Test
public void udf3Test() {
 spark.udf().registerJava("stringLengthTest", StringLengthTest.class.getName(),
   DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
 // returnType is not provided
 spark.udf().registerJava("stringLengthTest2", StringLengthTest.class.getName(), null);
 result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.10

@SuppressWarnings("unchecked")
@Test
public void udf3Test() {
 spark.udf().registerJava("stringLengthTest", StringLengthTest.class.getName(),
   DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
 // returnType is not provided
 spark.udf().registerJava("stringLengthTest2", StringLengthTest.class.getName(), null);
 result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql

@SuppressWarnings("unchecked")
@Test
public void udf3Test() {
 spark.udf().registerJava("stringLengthTest", StringLengthTest.class.getName(),
   DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
 // returnType is not provided
 spark.udf().registerJava("stringLengthTest2", StringLengthTest.class.getName(), null);
 result = spark.sql("SELECT stringLengthTest('test', 'test2')").head();
 Assert.assertEquals(9, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Test
public void pivot() {
 Dataset<Row> df = spark.table("courseSales");
 List<Row> actual = df.groupBy("year")
  .pivot("course", Arrays.asList("dotNET", "Java"))
  .agg(sum("earnings")).orderBy("year").collectAsList();
 Assert.assertEquals(2012, actual.get(0).getInt(0));
 Assert.assertEquals(15000.0, actual.get(0).getDouble(1), 0.01);
 Assert.assertEquals(20000.0, actual.get(0).getDouble(2), 0.01);
 Assert.assertEquals(2013, actual.get(1).getInt(0));
 Assert.assertEquals(48000.0, actual.get(1).getDouble(1), 0.01);
 Assert.assertEquals(30000.0, actual.get(1).getDouble(2), 0.01);
}

代码示例来源:origin: org.apache.spark/spark-sql

@Test
public void pivot() {
 Dataset<Row> df = spark.table("courseSales");
 List<Row> actual = df.groupBy("year")
  .pivot("course", Arrays.asList("dotNET", "Java"))
  .agg(sum("earnings")).orderBy("year").collectAsList();
 Assert.assertEquals(2012, actual.get(0).getInt(0));
 Assert.assertEquals(15000.0, actual.get(0).getDouble(1), 0.01);
 Assert.assertEquals(20000.0, actual.get(0).getDouble(2), 0.01);
 Assert.assertEquals(2013, actual.get(1).getInt(0));
 Assert.assertEquals(48000.0, actual.get(1).getDouble(1), 0.01);
 Assert.assertEquals(30000.0, actual.get(1).getDouble(2), 0.01);
}

代码示例来源:origin: org.apache.spark/spark-sql_2.10

@Test
public void pivot() {
 Dataset<Row> df = spark.table("courseSales");
 List<Row> actual = df.groupBy("year")
  .pivot("course", Arrays.asList("dotNET", "Java"))
  .agg(sum("earnings")).orderBy("year").collectAsList();
 Assert.assertEquals(2012, actual.get(0).getInt(0));
 Assert.assertEquals(15000.0, actual.get(0).getDouble(1), 0.01);
 Assert.assertEquals(20000.0, actual.get(0).getDouble(2), 0.01);
 Assert.assertEquals(2013, actual.get(1).getInt(0));
 Assert.assertEquals(48000.0, actual.get(1).getDouble(1), 0.01);
 Assert.assertEquals(30000.0, actual.get(1).getDouble(2), 0.01);
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@SuppressWarnings("unchecked")
@Test
public void udf1Test() {
 spark.udf().register("stringLengthTest", (String str) -> str.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test')").head();
 Assert.assertEquals(4, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql

@SuppressWarnings("unchecked")
@Test
public void udf1Test() {
 spark.udf().register("stringLengthTest", (String str) -> str.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test')").head();
 Assert.assertEquals(4, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.10

@SuppressWarnings("unchecked")
@Test
public void udf1Test() {
 spark.udf().register("stringLengthTest", (String str) -> str.length(), DataTypes.IntegerType);
 Row result = spark.sql("SELECT stringLengthTest('test')").head();
 Assert.assertEquals(4, result.getInt(0));
}

代码示例来源:origin: org.apache.spark/spark-sql

@Test
public void isInCollectionWorksCorrectlyOnJava() {
 List<Row> rows = Arrays.asList(
  RowFactory.create(1, "x"),
  RowFactory.create(2, "y"),
  RowFactory.create(3, "z"));
 StructType schema = createStructType(Arrays.asList(
  createStructField("a", IntegerType, false),
  createStructField("b", StringType, false)));
 Dataset<Row> df = spark.createDataFrame(rows, schema);
 // Test with different types of collections
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(Arrays.asList(1, 2))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 1 || r.getInt(0) == 2).collect()
 ));
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(new HashSet<>(Arrays.asList(1, 2)))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 1 || r.getInt(0) == 2).collect()
 ));
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(new ArrayList<>(Arrays.asList(3, 1)))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 3 || r.getInt(0) == 1).collect()
 ));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Test
public void isInCollectionWorksCorrectlyOnJava() {
 List<Row> rows = Arrays.asList(
  RowFactory.create(1, "x"),
  RowFactory.create(2, "y"),
  RowFactory.create(3, "z"));
 StructType schema = createStructType(Arrays.asList(
  createStructField("a", IntegerType, false),
  createStructField("b", StringType, false)));
 Dataset<Row> df = spark.createDataFrame(rows, schema);
 // Test with different types of collections
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(Arrays.asList(1, 2))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 1 || r.getInt(0) == 2).collect()
 ));
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(new HashSet<>(Arrays.asList(1, 2)))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 1 || r.getInt(0) == 2).collect()
 ));
 Assert.assertTrue(Arrays.equals(
  (Row[]) df.filter(df.col("a").isInCollection(new ArrayList<>(Arrays.asList(3, 1)))).collect(),
  (Row[]) df.filter((FilterFunction<Row>) r -> r.getInt(0) == 3 || r.getInt(0) == 1).collect()
 ));
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@SuppressWarnings("unchecked")
 @Test
 public void udf6Test() {
  spark.udf().register("returnOne", () -> 1, DataTypes.IntegerType);
  Row result = spark.sql("SELECT returnOne()").head();
  Assert.assertEquals(1, result.getInt(0));
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@SuppressWarnings("unchecked")
 @Test
 public void udf6Test() {
  spark.udf().register("returnOne", () -> 1, DataTypes.IntegerType);
  Row result = spark.sql("SELECT returnOne()").head();
  Assert.assertEquals(1, result.getInt(0));
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.10

Assert.assertEquals(shortValue, simpleRow.getShort(3));
Assert.assertEquals(shortValue, simpleRow.get(3));
Assert.assertEquals(intValue, simpleRow.getInt(4));
Assert.assertEquals(intValue, simpleRow.get(4));
Assert.assertEquals(intValue, simpleRow.getInt(5));
Assert.assertEquals(intValue, simpleRow.get(5));
Assert.assertEquals(longValue, simpleRow.getLong(6));

代码示例来源:origin: org.apache.spark/spark-sql_2.11

Assert.assertEquals(shortValue, simpleRow.getShort(3));
Assert.assertEquals(shortValue, simpleRow.get(3));
Assert.assertEquals(intValue, simpleRow.getInt(4));
Assert.assertEquals(intValue, simpleRow.get(4));
Assert.assertEquals(intValue, simpleRow.getInt(5));
Assert.assertEquals(intValue, simpleRow.get(5));
Assert.assertEquals(longValue, simpleRow.getLong(6));

相关文章