public static void main(String[] args) throws IOException, URISyntaxException
{Configuration config = new Configuration();
  config.set("fs.default.name","hdfs://127.0.0.1:50070/dfshealth.jsp");

  FileSystem dfs = FileSystem.get(config);
  String dirName = "TestDirectory";

  Path src = new Path(dfs.getWorkingDirectory()+"/"+dirName);

  dfs.mkdirs(src);
}
}
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
    at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<init>(DefaultMetricsSystem.java:37)
    at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<clinit>(DefaultMetricsSystem.java:34)
    at org.apache.hadoop.security.UgiInstrumentation.create(UgiInstrumentation.java:51)
    at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:217)
    at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:185)
    at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:237)
    at org.apache.hadoop.security.KerberosName.<clinit>(KerberosName.java:79)
    at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:210)
    at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:185)
    at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:237)
    at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:482)
    at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:468)
    at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:1519)
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1420)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:254)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:123)
    at com.TestConnection.main(TestConnection.java:21)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.configuration.Configuration
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    ... 17 more
出现异常
配置正确
问题是什么 ?
任何帮助!

最佳答案

对于您的问题,您必须添加commons-configuration-1.6.jar jar。

我在下面列出了必要的 jar

{
   Configuration config = new Configuration();
   config.addResource(new Path("/etc/hadoop/conf/core-site.xml"));
   config.addResource(new Path("/etc/hadoop/conf/hdfs-site.xml"));

   config.set("fs.hdfs.impl",
            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
        );
       config.set("fs.file.impl",
            org.apache.hadoop.fs.LocalFileSystem.class.getName()
        );
  FileSystem dfs = FileSystem.get(config);
  String dirName = "TestDirectory";
  System.out.println(dfs.getWorkingDirectory() +" this is from /n/n");
  Path src = new Path(dfs.getWorkingDirectory()+"/"+dirName);

   dfs.mkdirs(src);

} }

您必须在构建路径中添加以下jar列表。

commons-cli-1.2.jar

commons-collections-3.2.1.jar

commons-configuration-1.6.jar

commons-lang-2.5.jar

commons-logging-1.1.1.jar

Guava 11.0.2.jar

hadoop-auth.jar

hadoop-common.jar

protobuf-java-2.4.0a.jar

slf4j-api-1.6.1.jar

log4j-1.2.17.jar

hadoop-hdfs.jar

如果是cloudera,这些所有jar都位于hadoop / lib文件夹中。

08-28 15:19