org.elasticsearch.hadoop.serialization.Generator类的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(10.7k)|赞(0)|评价(0)|浏览(187)

本文整理了Java中org.elasticsearch.hadoop.serialization.Generator类的一些代码示例,展示了Generator类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Generator类的具体详情如下:
包路径:org.elasticsearch.hadoop.serialization.Generator
类名称:Generator

Generator介绍

暂无

代码示例

代码示例来源:origin: elastic/elasticsearch-hadoop

protected Result doWrite(Object value, Generator generator, String parentField) {
  if (value == null) {
    generator.writeNull();
    generator.writeString((String) value);
    generator.writeNumber(((Integer) value).intValue());
    generator.writeNumber(((Long) value).longValue());
    generator.writeNumber(((Float) value).floatValue());
    generator.writeNumber(((Double) value).doubleValue());
    generator.writeNumber(((Short) value).shortValue());
    generator.writeNumber(((Byte) value).byteValue());
    double d = n.doubleValue();
    if (Math.floor(d) == d) {
      generator.writeNumber(n.longValue());
      generator.writeNumber(d);
    generator.writeBoolean(((Boolean) value).booleanValue());
    generator.writeBinary((byte[]) value);
    generator.writeBeginArray();
    for (Object o : ObjectUtils.toObjectArray(value)) {

代码示例来源:origin: elastic/elasticsearch-hadoop

generator.writeBeginArray();
  generator.writeBeginObject();
  generator.writeEndObject();
  generator.writeEndArray();
generator.writeBeginArray();
generator.writeBeginObject();
  if (shouldKeep(generator.getParentPath(), actualName)) {
    generator.writeFieldName(actualName);
    Result res = write(tuples.get(i), nestedFields[i], generator);
    if (!res.isSuccesful()) {
generator.writeEndObject();
generator.writeEndArray();

代码示例来源:origin: elastic/elasticsearch-hadoop

public static String printQueryBuilder(QueryBuilder builder, boolean addQuery) {
    FastByteArrayOutputStream out = new FastByteArrayOutputStream(256);
    Generator generator = new JacksonJsonGenerator(out);
    if (addQuery) {
      generator.writeBeginObject().writeFieldName("query");
    }
    generator.writeBeginObject();
    builder.toJson(generator);
    generator.writeEndObject();
    if (addQuery) {
      generator.writeEndObject();
    }
    generator.close();
    return out.toString();
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private void writeNullable(Generator generator, String key, String value) {
    if (value != null) {
      generator.writeFieldName(key).writeString(value);
    }
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
  public void toJson(Generator out) {
    out.writeFieldName("match_all")
        .writeBeginObject().writeEndObject();
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
public void toJson(Generator out) {
  if (field == null) {
    throw new IllegalArgumentException("inner clause [field] cannot be null");
  }
  if (term == null) {
    throw new IllegalArgumentException("inner clause [term] cannot be null");
  }
  out.writeFieldName("term")
      .writeBeginObject()
        .writeFieldName(field)
        .writeString(term)
      .writeEndObject();
}

代码示例来源:origin: elastic/elasticsearch-hadoop

break;
case FIELD_NAME:
  generator.writeFieldName(parser.currentName());
  parser.nextToken();
  traverse(parser, generator);
  break;
case VALUE_STRING:
  generator.writeString(parser.text());
  parser.nextToken();
  break;
case VALUE_BOOLEAN:
  generator.writeBoolean(parser.booleanValue());
  parser.nextToken();
  break;
case VALUE_NULL:
  generator.writeNull();
  parser.nextToken();
  break;
  switch (parser.numberType()) {
  case INT:
    generator.writeNumber(parser.intValue());
    break;
  case LONG:
    generator.writeNumber(parser.longValue());
    break;
  case DOUBLE:
    generator.writeNumber(parser.doubleValue());
    break;

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
  public void toJson(Generator out) {
    if (filter == null) {
      throw new IllegalArgumentException("inner clause [filter] cannot be null.");
    }
    out.writeFieldName("constant_score");
    out.writeBeginObject();
    out.writeFieldName("filter");
    out.writeBeginObject();
    filter.toJson(out);
    out.writeEndObject();
    out.writeFieldName("boost");
    out.writeNumber(boost);
    out.writeEndObject();
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

generator.writeBeginObject();
assertEquals("", generator.getParentPath());
generator.writeFieldName("test");
assertEquals("", generator.getParentPath());
generator.writeBeginObject();
assertEquals("test", generator.getParentPath());
generator.writeFieldName("subfield");
assertEquals("test", generator.getParentPath());
generator.writeBeginObject();
assertEquals("test.subfield", generator.getParentPath());
generator.writeFieldName("subsubfield");
assertEquals("test.subfield", generator.getParentPath());
generator.writeString("value");
assertEquals("test.subfield", generator.getParentPath());
generator.writeEndObject();
assertEquals("test", generator.getParentPath());
generator.writeEndObject();
assertEquals("", generator.getParentPath());
generator.writeEndObject();
assertEquals("", generator.getParentPath());

代码示例来源:origin: elastic/elasticsearch-hadoop

generator.writeFieldName(FieldNames.FIELD_TIMESTAMP).writeString(ts);
  generator.writeFieldName(FieldNames.FIELD_TAGS).writeBeginArray();
  for (String tag : tags) {
    generator.writeString(tag);
  generator.writeFieldName(FieldNames.FIELD_LABELS).writeBeginObject();
  for (Map.Entry<String, String> label : labels.entrySet()) {
    generator.writeFieldName(label.getKey()).writeString(label.getValue());
generator.writeFieldName(FieldNames.FIELD_MESSAGE).writeString(message);
  generator.writeFieldName(FieldNames.FIELD_HOST).writeBeginObject();
      generator.writeFieldName(FieldNames.FIELD_HOST_OS).writeBeginObject();
      generator.writeFieldName(FieldNames.FIELD_HOST_TIMEZONE).writeBeginObject();
        generator.writeFieldName(FieldNames.FIELD_HOST_TIMEZONE_OFFSET).writeBeginObject();
              .writeNumber(host.getTimezoneOffsetSec());
generator.writeFieldName(FieldNames.FIELD_ERROR).writeBeginObject();
  generator.writeFieldName(FieldNames.FIELD_ERROR_CODE).writeString(exceptionType);
  generator.writeFieldName(FieldNames.FIELD_ERROR_MESSAGE).writeString(exceptionMessage);
generator.writeFieldName(FieldNames.FIELD_EVENT).writeBeginObject();

代码示例来源:origin: elastic/elasticsearch-hadoop

@SuppressWarnings("unchecked")
@Override
public Result write(SinkCall<Object[], ?> sinkCall, Generator generator) {
  Tuple tuple = CascadingUtils.coerceToString(sinkCall);
  // consider names (in case of aliases these are already applied)
  List<String> names = (List<String>) sinkCall.getContext()[SINK_CTX_ALIASES];
  generator.writeBeginObject();
  for (int i = 0; i < tuple.size(); i++) {
    String name = (i < names.size() ? names.get(i) : "tuple" + i);
    // filter out fields
    if (shouldKeep(generator.getParentPath(), name)) {
      generator.writeFieldName(name);
      Object object = tuple.getObject(i);
      Result result = jdkWriter.write(object, generator);
      if (!result.isSuccesful()) {
        if (object instanceof Writable) {
          return writableWriter.write((Writable) object, generator);
        }
        return Result.FAILED(object);
      }
    }
  }
  generator.writeEndObject();
  return Result.SUCCESFUL();
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private static void traverseMap(Parser parser, Generator generator) {
  generator.writeBeginObject();
  parser.nextToken();
  for (; parser.currentToken() != Token.END_OBJECT;) {
    traverse(parser, generator);
  }
  generator.writeEndObject();
  parser.nextToken();
}

代码示例来源:origin: elastic/elasticsearch-hadoop

static QueryBuilder randomRawQueryStringQuery(Random rand, int level) {
  QueryBuilder query = randomQuery(rand);
  FastByteArrayOutputStream out = new FastByteArrayOutputStream(256);
  Generator generator = new JacksonJsonGenerator(out);
  generator.writeBeginObject();
  query.toJson(generator);
  generator.writeEndObject();
  generator.close();
  try {
    return new RawQueryBuilder(out.toString().trim(), false);
  } catch (IOException e) {
    throw new IllegalArgumentException("failed to parse raw query", e);
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private static void traverseArray(Parser parser, Generator generator) {
  generator.writeBeginArray();
  parser.nextToken();
  for (; parser.currentToken() != Token.END_ARRAY;) {
    traverse(parser, generator);
  }
  generator.writeEndArray();
  parser.nextToken();
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
public Result write(Writable writable, Generator generator) {
  if (writable instanceof ByteWritable) {
    generator.writeNumber(((ByteWritable) writable).get());
    generator.writeNumber(((DoubleWritable) writable).get());
    generator.writeNumber(((ShortWritable) writable).get());
    generator.writeString(writable.toString());
    Calendar cal = Calendar.getInstance();
    cal.setTimeInMillis(ts);
    generator.writeString(DatatypeConverter.printDateTime(cal));
    generator.writeString(DateWritableWriter.toES(writable));
    generator.writeString(writable.toString());
    generator.writeString(StringUtils.trim(writable.toString()));

代码示例来源:origin: elastic/elasticsearch-hadoop

protected Result handleUnknown(Object value, Generator generator) {
    generator.writeString(value.toString());
    return Result.SUCCESFUL();
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void close() {
    generator.close();
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
public void toJson(Generator out) {
  out.writeFieldName("bool");
  out.writeBeginObject();
  if (mustClauses.size() > 0) {
    out.writeFieldName("must");
    out.writeBeginArray();
    for (QueryBuilder innerQuery : mustClauses) {
      out.writeBeginObject();
      innerQuery.toJson(out);
      out.writeEndObject();
    out.writeEndArray();
    out.writeFieldName("should");
    out.writeBeginArray();
    for (QueryBuilder innerQuery : shouldClauses) {
      out.writeBeginObject();
      innerQuery.toJson(out);
      out.writeEndObject();
    out.writeEndArray();
    out.writeFieldName("filter");
    out.writeBeginArray();
    for (QueryBuilder innerQuery : filterClauses) {
      out.writeBeginObject();
      innerQuery.toJson(out);
      out.writeEndObject();

代码示例来源:origin: elastic/elasticsearch-hadoop

throw new IllegalArgumentException("inner clause [query] cannot be null");
out.writeFieldName("query_string");
out.writeBeginObject();
out.writeFieldName("query");
out.writeString(query);
if (defaultField != null) {
  out.writeFieldName("default_field");
  out.writeString(defaultField);
  out.writeFieldName("analyzer");
  out.writeString(analyzer);
  out.writeFieldName("lowercaseExpandedTerms");
  out.writeBoolean(lowercaseExpandedTerms);
  out.writeFieldName("analyzeWildcard");
  out.writeBoolean(analyzeWildcard);
  out.writeFieldName("defaultOperator");
  out.writeString(defaultOperator);
  out.writeFieldName("lenient");
  out.writeBoolean(lenient);
out.writeEndObject();

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
public void toJson(Generator out) {
  if (query == null) {
    throw new IllegalArgumentException("inner clause [query] cannot be null.");
  }
  out.writeFieldName("filtered");
  out.writeBeginObject();
  out.writeFieldName("query");
  out.writeBeginObject();
  query.toJson(out);
  out.writeEndObject();
  if (filters.isEmpty() == false) {
    out.writeFieldName("filter");
    out.writeBeginObject();
    if (filters.size() == 1) {
      filters.get(0).toJson(out);
    } else {
      BoolQueryBuilder inner = new BoolQueryBuilder();
      for (QueryBuilder filter : filters) {
        inner.must(filter);
      }
      inner.toJson(out);
    }
    out.writeEndObject();
  }
  out.writeEndObject();
}

相关文章