本文整理了Java中org.apache.hadoop.fs.XAttrCodec
类的一些代码示例,展示了XAttrCodec
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。XAttrCodec
类的具体详情如下:
包路径:org.apache.hadoop.fs.XAttrCodec
类名称:XAttrCodec
[英]The value of XAttr
is byte[], this class is to covert byte[] to some kind of string representation or convert back. String representation is convenient for display and input. For example display in screen as shell response and json response, input as http or shell parameter.
[中]XAttr
的值是byte[],这个类将把byte[]转换成某种字符串表示或转换回。字符串表示法便于显示和输入。例如,在屏幕上显示为shell响应和json响应,输入为http或shell参数。
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
name = StringUtils.popOptionWithArgument("-n", args);
String v = StringUtils.popOptionWithArgument("-v", args);
if (v != null) {
value = XAttrCodec.decodeValue(v);
}
xname = StringUtils.popOptionWithArgument("-x", args);
if (name != null && xname != null) {
throw new HadoopIllegalArgumentException(
"Can not specify both '-n name' and '-x name' option.");
}
if (name == null && xname == null) {
throw new HadoopIllegalArgumentException(
"Must specify '-n name' or '-x name' option.");
}
if (args.isEmpty()) {
throw new HadoopIllegalArgumentException("<path> is missing.");
}
if (args.size() > 1) {
throw new HadoopIllegalArgumentException("Too many arguments.");
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
private void printXAttr(String name, byte[] value) throws IOException{
if (value != null) {
if (value.length != 0) {
out.println(name + "=" + XAttrCodec.encodeValue(value, encoding));
} else {
out.println(name);
}
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
name = StringUtils.popOptionWithArgument("-n", args);
String en = StringUtils.popOptionWithArgument("-e", args);
if (en != null) {
try {
encoding = XAttrCodec.valueOf(StringUtils.toUpperCase(en));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid/unsupported encoding option specified: " + en);
}
Preconditions.checkArgument(encoding != null,
"Invalid/unsupported encoding option specified: " + en);
}
boolean r = StringUtils.popOption("-R", args);
setRecursive(r);
dump = StringUtils.popOption("-d", args);
if (!dump && name == null) {
throw new HadoopIllegalArgumentException(
"Must specify '-n name' or '-d' option.");
}
if (args.isEmpty()) {
throw new HadoopIllegalArgumentException("<path> is missing.");
}
if (args.size() > 1) {
throw new HadoopIllegalArgumentException("Too many arguments.");
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-client
@Override
public String getValueString() {
return value.toString();
}
代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs
@Override
public String getValueString() {
return value.toString();
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs
private static List<XAttr> readXAttrsFromXml(Stanza st)
throws InvalidXmlException {
if (!st.hasChildren("XATTR")) {
return null;
}
List<Stanza> stanzas = st.getChildren("XATTR");
List<XAttr> xattrs = Lists.newArrayListWithCapacity(stanzas.size());
for (Stanza a: stanzas) {
XAttr.Builder builder = new XAttr.Builder();
builder.setNameSpace(XAttr.NameSpace.valueOf(a.getValue("NAMESPACE"))).
setName(a.getValue("NAME"));
String v = a.getValueOrNull("VALUE");
if (v != null) {
try {
builder.setValue(XAttrCodec.decodeValue(v));
} catch (IOException e) {
throw new InvalidXmlException(e.toString());
}
}
xattrs.add(builder.build());
}
return xattrs;
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs
private static Map<String, Object> toJsonMap(final XAttr xAttr,
final XAttrCodec encoding) throws IOException {
if (xAttr == null) {
return null;
}
final Map<String, Object> m = new TreeMap<String, Object>();
m.put("name", XAttrHelper.getPrefixedName(xAttr));
m.put("value", xAttr.getValue() != null ?
XAttrCodec.encodeValue(xAttr.getValue(), encoding) : null);
return m;
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
@Override
public String getValueString() {
return value.toString();
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-client
static byte[] decodeXAttrValue(String value) throws IOException {
if (value != null) {
return XAttrCodec.decodeValue(value);
} else {
return new byte[0];
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs
private static void appendXAttrsToXml(ContentHandler contentHandler,
List<XAttr> xAttrs) throws SAXException {
for (XAttr xAttr: xAttrs) {
contentHandler.startElement("", "", "XATTR", new AttributesImpl());
XMLUtils.addSaxString(contentHandler, "NAMESPACE",
xAttr.getNameSpace().toString());
XMLUtils.addSaxString(contentHandler, "NAME", xAttr.getName());
if (xAttr.getValue() != null) {
try {
XMLUtils.addSaxString(contentHandler, "VALUE",
XAttrCodec.encodeValue(xAttr.getValue(), XAttrCodec.HEX));
} catch (IOException e) {
throw new SAXException(e);
}
}
contentHandler.endElement("", "", "XATTR");
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs
private static byte[] decodeXAttrValue(String value) throws IOException {
if (value != null) {
return XAttrCodec.decodeValue(value);
} else {
return new byte[0];
}
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
private void printXAttr(String name, byte[] value) throws IOException{
if (value != null) {
if (value.length != 0) {
out.println(name + "=" + XAttrCodec.encodeValue(value, encoding));
} else {
out.println(name);
}
}
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
private static byte[] decodeXAttrValue(String value) throws IOException {
if (value != null) {
return XAttrCodec.decodeValue(value);
} else {
return new byte[0];
}
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
private void printXAttr(String name, byte[] value) throws IOException{
if (value != null) {
if (value.length != 0) {
out.println(name + "=" + XAttrCodec.encodeValue(value, encoding));
} else {
out.println(name);
}
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-httpfs
public FSSetXAttr(String path, String name, String encodedValue,
EnumSet<XAttrSetFlag> flag) throws IOException {
this.path = new Path(path);
this.name = name;
this.value = XAttrCodec.decodeValue(encodedValue);
this.flag = flag;
}
代码示例来源:origin: io.hops/hadoop-common
private void printXAttr(String name, byte[] value) throws IOException{
if (value != null) {
if (value.length != 0) {
out.println(name + "=" + XAttrCodec.encodeValue(value, encoding));
} else {
out.println(name);
}
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-httpfs
/** Convert xAttrs json to xAttrs map */
private Map<String, byte[]> createXAttrMap(JSONArray jsonArray)
throws IOException {
Map<String, byte[]> xAttrs = Maps.newHashMap();
for (Object obj : jsonArray) {
JSONObject jsonObj = (JSONObject) obj;
final String name = (String)jsonObj.get(XATTR_NAME_JSON);
final byte[] value = XAttrCodec.decodeValue(
(String)jsonObj.get(XATTR_VALUE_JSON));
xAttrs.put(name, value);
}
return xAttrs;
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
private void printXAttr(String name, byte[] value) throws IOException{
if (value != null) {
if (value.length != 0) {
out.println(name + "=" + XAttrCodec.encodeValue(value, encoding));
} else {
out.println(name);
}
}
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public byte[] getXAttrValue() throws IOException {
final String v = getValue();
return XAttrCodec.decodeValue(v);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs
private static Map<String, Object> toJsonMap(final XAttr xAttr,
final XAttrCodec encoding) throws IOException {
if (xAttr == null) {
return null;
}
final Map<String, Object> m = new TreeMap<String, Object>();
m.put("name", XAttrHelper.getPrefixName(xAttr));
m.put("value", xAttr.getValue() != null ?
XAttrCodec.encodeValue(xAttr.getValue(), encoding) : null);
return m;
}
内容来源于网络,如有侵权,请联系作者删除!