用java运行Hadoop程序报错:org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.

时间:2023-11-23 15:13:14

用java运行Hadoop例程报错:org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.所写代码如下:

package com.pcitc.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; /**
* 获取HDFS集群上所有节点名称
* @author lenovo
*
*/
public class GetList {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
conf.set("dfs.default.name", "hdfs://hadoopmaster:9000");
FileSystem fs = FileSystem.get(conf);
DistributedFileSystem hdfs = (DistributedFileSystem) fs;
DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
String[] names = new String[dataNodeStats.length];
for (int i = 0; i < dataNodeStats.length; i++) {
names[i] = dataNodeStats[i].getHostName();
System.out.println("node" + i + "name" + names[i]);
}
}
}

执行之后报如下错误:

Exception in thread "main" java.lang.ClassCastException: org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.hadoop.hdfs.DistributedFileSystem
at org.apache.hadoop.examples.FindFileOnHDFS.getHDFSNodes(FindFileOnHDFS.java:43)
at org.apache.hadoop.examples.FindFileOnHDFS.main(FindFileOnHDFS.java:16)

原因是DistributedFileSystem和LocalFileSystem都是FileSystem的子类,FileSystem.get(conf)得到的是LocalFileSystem的instance, 这个类型应该是默认的,要获得DistributedFileSystem,需要配置conf对象,按照我的写法我觉得应该是配了conf对象了,但是还是保存,最后按照网上的说法进行相应修改就可以了。直接上修改后的代码如下(注意红色部分):

package com.pcitc.hadoop;

import java.io.IOException;
import java.net.URI; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; /**
* 获取HDFS集群上所有节点名称
*
* @author lenovo
*
*/
public class GetList {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
// conf.set("dfs.default.name", "hdfs://hadoopmaster:9000");
String uri = "hdfs://hadoopmaster:9000";
FileSystem fs = FileSystem.get(URI.create(uri), conf);
DistributedFileSystem hdfs = (DistributedFileSystem) fs;
DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
String[] names = new String[dataNodeStats.length];
for (int i = 0; i < dataNodeStats.length; i++) {
names[i] = dataNodeStats[i].getHostName();
System.out.println("node:" + i + ",name:" + names[i]);
}
}
}