public ListgetHdfsstatus() {
if (conf == null) {
conf = new Configuration();
conf.set("fs.default.name","hdfs://CRNOPHQDW2:54310");
}
FileSystem fs = null;
try {
fs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
return null;
}
Listret = new ArrayList ();
DistributedFileSystem dfs = (DistributedFileSystem) fs;
org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus ds = null;
try {
ds = dfs.getDiskStatus();
} catch (IOException e) {
e.printStackTrace();
}
long capacity = ds.getCapacity();
long used = ds.getDfsUsed();
long remaining = ds.getRemaining();
long presentCapacity = used + remaining;
String name = "GrobleStatus";
String value = "GrobleValue";
BaseItem bitem = new BaseItem(name, value);
bitem.setMap_value("Configured Capacity"+getunittype(StringUtils.byteDesc(capacity)),
getunitvalue(StringUtils.byteDesc(capacity)));
bitem.setMap_value("Present Capacity"+getunittype(StringUtils.byteDesc(presentCapacity)),
getunitvalue(StringUtils.byteDesc(presentCapacity)));
bitem.setMap_value("DFS Remaining"+getunittype(StringUtils.byteDesc(remaining)), getunitvalue(StringUtils.byteDesc(remaining)));
bitem.setMap_value("DFS Used"+getunittype(StringUtils.byteDesc(used)), getunitvalue(StringUtils.byteDesc(used)));
bitem.setMap_value(
"DFS Used%",
StringUtils
.limitDecimalTo2(((1.0D * (double) used) / (double) presentCapacity) )
);
ret.add(bitem);
DatanodeInfo live[] = null;
try {
live = dfs
.getClient()
.datanodeReport(
org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType.LIVE);
} catch (IOException e) {
e.printStackTrace();
return null;
}
DatanodeInfo dead[] = null;
try {
dead = dfs
.getClient()
.datanodeReport(
org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType.DEAD);
} catch (IOException e) {
e.printStackTrace();
return null;
}
for (int i = 0; i < live.length; i++) {
DatanodeInfo dn = live[i];
ret.add(getbItem(dn));
}
for (int j = 0; j < dead.length; j++) {
DatanodeInfo dn = dead[j];
ret.add(getbItem(dn));
}
return ret;
}
以前写过的 类似的 你可以看下。
FileSystem貌似没有吧,有时间下一份源码看看挺好的。。。