org.apache.spark.sql.Row.isNullAt()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(7.6k)|赞(0)|评价(0)|浏览(450)

本文整理了Java中org.apache.spark.sql.Row.isNullAt方法的一些代码示例,展示了Row.isNullAt的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Row.isNullAt方法的具体详情如下:
包路径:org.apache.spark.sql.Row
类名称:Row
方法名:isNullAt

Row.isNullAt介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public Object evaluate(Row buffer) {
  if (buffer.isNullAt(0)) {
   // If the buffer value is still null, we return null.
   return null;
  } else {
   // Otherwise, the intermediate sum is the final result.
   return buffer.getDouble(0);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public Object evaluate(Row buffer) {
  if (buffer.isNullAt(0)) {
   // If the buffer value is still null, we return null.
   return null;
  } else {
   // Otherwise, the intermediate sum is the final result.
   return buffer.getDouble(0);
  }
 }
}

代码示例来源:origin: apache/phoenix

@Override
public Date getDate(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return getCurrentRow().getDate(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public int getInt(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 :  getCurrentRow().getInt(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public long getLong(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 :  getCurrentRow().getLong(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public String getString(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? null : getCurrentRow().getString(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public boolean getBoolean(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? false : getCurrentRow().getBoolean(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public double getDouble(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 : getCurrentRow().getDouble(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public byte getByte(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 : getCurrentRow().getByte(columnIndex-1);
}

代码示例来源:origin: apache/phoenix

@Override
public float getFloat(int columnIndex) throws SQLException {
  wasNull = getCurrentRow().isNullAt(columnIndex-1);
  return wasNull ? 0 : getCurrentRow().getFloat(columnIndex-1);
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public Object evaluate(Row buffer) {
  if (buffer.isNullAt(0)) {
   // If the bufferSum is still null, we return null because this function has not got
   // any input row.
   return null;
  } else {
   // Otherwise, we calculate the special average value.
   return buffer.getDouble(0) / buffer.getLong(1) + 100.0;
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public Object evaluate(Row buffer) {
  if (buffer.isNullAt(0)) {
   // If the bufferSum is still null, we return null because this function has not got
   // any input row.
   return null;
  } else {
   // Otherwise, we calculate the special average value.
   return buffer.getDouble(0) / buffer.getLong(1) + 100.0;
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public void update(MutableAggregationBuffer buffer, Row input) {
 // This input Row only has a single column storing the input value in Double.
 // We only update the buffer when the input value is not null.
 if (!input.isNullAt(0)) {
  if (buffer.isNullAt(0)) {
   // If the buffer value (the intermediate result of the sum) is still null,
   // we set the input value to the buffer.
   buffer.update(0, input.getDouble(0));
  } else {
   // Otherwise, we add the input value to the buffer value.
   Double newValue = input.getDouble(0) + buffer.getDouble(0);
   buffer.update(0, newValue);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public void update(MutableAggregationBuffer buffer, Row input) {
 // This input Row only has a single column storing the input value in Double.
 // We only update the buffer when the input value is not null.
 if (!input.isNullAt(0)) {
  if (buffer.isNullAt(0)) {
   // If the buffer value (the intermediate result of the sum) is still null,
   // we set the input value to the buffer.
   buffer.update(0, input.getDouble(0));
  } else {
   // Otherwise, we add the input value to the buffer value.
   Double newValue = input.getDouble(0) + buffer.getDouble(0);
   buffer.update(0, newValue);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public void merge(MutableAggregationBuffer buffer1, Row buffer2) {
 // buffer1 and buffer2 have the same structure.
 // We only update the buffer1 when the input buffer2's value is not null.
 if (!buffer2.isNullAt(0)) {
  if (buffer1.isNullAt(0)) {
   // If the buffer value (intermediate result of the sum) is still null,
   // we set the it as the input buffer's value.
   buffer1.update(0, buffer2.getDouble(0));
  } else {
   // Otherwise, we add the input buffer's value (buffer1) to the mutable
   // buffer's value (buffer2).
   Double newValue = buffer2.getDouble(0) + buffer1.getDouble(0);
   buffer1.update(0, newValue);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public void merge(MutableAggregationBuffer buffer1, Row buffer2) {
 // buffer1 and buffer2 have the same structure.
 // We only update the buffer1 when the input buffer2's value is not null.
 if (!buffer2.isNullAt(0)) {
  if (buffer1.isNullAt(0)) {
   // If the buffer value (intermediate result of the sum) is still null,
   // we set the it as the input buffer's value.
   buffer1.update(0, buffer2.getDouble(0));
  } else {
   // Otherwise, we add the input buffer's value (buffer1) to the mutable
   // buffer's value (buffer2).
   Double newValue = buffer2.getDouble(0) + buffer1.getDouble(0);
   buffer1.update(0, newValue);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public void update(MutableAggregationBuffer buffer, Row input) {
 // This input Row only has a single column storing the input value in Double.
 // We only update the buffer when the input value is not null.
 if (!input.isNullAt(0)) {
  // If the buffer value (the intermediate result of the sum) is still null,
  // we set the input value to the buffer and set the bufferCount to 1.
  if (buffer.isNullAt(0)) {
   buffer.update(0, input.getDouble(0));
   buffer.update(1, 1L);
  } else {
   // Otherwise, update the bufferSum and increment bufferCount.
   Double newValue = input.getDouble(0) + buffer.getDouble(0);
   buffer.update(0, newValue);
   buffer.update(1, buffer.getLong(1) + 1L);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public void update(MutableAggregationBuffer buffer, Row input) {
 // This input Row only has a single column storing the input value in Double.
 // We only update the buffer when the input value is not null.
 if (!input.isNullAt(0)) {
  // If the buffer value (the intermediate result of the sum) is still null,
  // we set the input value to the buffer and set the bufferCount to 1.
  if (buffer.isNullAt(0)) {
   buffer.update(0, input.getDouble(0));
   buffer.update(1, 1L);
  } else {
   // Otherwise, update the bufferSum and increment bufferCount.
   Double newValue = input.getDouble(0) + buffer.getDouble(0);
   buffer.update(0, newValue);
   buffer.update(1, buffer.getLong(1) + 1L);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override public void merge(MutableAggregationBuffer buffer1, Row buffer2) {
 // buffer1 and buffer2 have the same structure.
 // We only update the buffer1 when the input buffer2's sum value is not null.
 if (!buffer2.isNullAt(0)) {
  if (buffer1.isNullAt(0)) {
   // If the buffer value (intermediate result of the sum) is still null,
   // we set the it as the input buffer's value.
   buffer1.update(0, buffer2.getDouble(0));
   buffer1.update(1, buffer2.getLong(1));
  } else {
   // Otherwise, we update the bufferSum and bufferCount.
   Double newValue = buffer2.getDouble(0) + buffer1.getDouble(0);
   buffer1.update(0, newValue);
   buffer1.update(1, buffer1.getLong(1) + buffer2.getLong(1));
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override public void merge(MutableAggregationBuffer buffer1, Row buffer2) {
 // buffer1 and buffer2 have the same structure.
 // We only update the buffer1 when the input buffer2's sum value is not null.
 if (!buffer2.isNullAt(0)) {
  if (buffer1.isNullAt(0)) {
   // If the buffer value (intermediate result of the sum) is still null,
   // we set the it as the input buffer's value.
   buffer1.update(0, buffer2.getDouble(0));
   buffer1.update(1, buffer2.getLong(1));
  } else {
   // Otherwise, we update the bufferSum and bufferCount.
   Double newValue = buffer2.getDouble(0) + buffer1.getDouble(0);
   buffer1.update(0, newValue);
   buffer1.update(1, buffer1.getLong(1) + buffer2.getLong(1));
  }
 }
}

相关文章