Hadoop安装与配置

问题遇到的现象和发生背景

要安装Hadoop,结果start-all.cmd运行起来一直在报错,求帮忙

用代码块功能插入代码,请勿粘贴截图


2022-11-08 22:55:31,416 ERROR nodemanager.NodeManager: Error starting NodeManager
java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:640)
        at org.apache.hadoop.fs.FileUtil.canRead(FileUtil.java:1201)
        at org.apache.hadoop.util.DiskChecker.checkAccessByFileMethods(DiskChecker.java:118)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:81)
        at org.apache.hadoop.util.BasicDiskValidator.checkStatus(BasicDiskValidator.java:32)
        at org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.testDirs(DirectoryCollection.java:473)
        at org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.checkDirs(DirectoryCollection.java:392)
        at org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService.checkDirs(LocalDirsHandlerService.java:490)
        at org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService.serviceInit(LocalDirsHandlerService.java:222)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:108)
        at org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService.serviceInit(NodeHealthCheckerService.java:59)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:108)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.serviceInit(NodeManager.java:472)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.initAndStartNodeManager(NodeManager.java:918)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.main(NodeManager.java:979)

2022-11-08 22:55:32,088 FATAL resourcemanager.ResourceManager: Error starting ResourceManager
java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:756)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:468)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:74)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:369)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:363)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:362)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        at java.lang.Class.getDeclaredMethods0(Native Method)
        at java.lang.Class.privateGetDeclaredMethods(Class.java:2701)
        at java.lang.Class.getDeclaredMethods(Class.java:1975)
        at com.google.inject.spi.InjectionPoint.getInjectionPoints(InjectionPoint.java:688)
        at com.google.inject.spi.InjectionPoint.forInstanceMethodsAndFields(InjectionPoint.java:380)
        at com.google.inject.spi.InjectionPoint.forInstanceMethodsAndFields(InjectionPoint.java:399)
        at com.google.inject.internal.BindingBuilder.toInstance(BindingBuilder.java:84)
        at org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebApp.setup(RMWebApp.java:60)
        at org.apache.hadoop.yarn.webapp.WebApp.configureServlets(WebApp.java:160)
        at com.google.inject.servlet.ServletModule.configure(ServletModule.java:55)
        at com.google.inject.AbstractModule.configure(AbstractModule.java:62)
        at com.google.inject.spi.Elements$RecordingBinder.install(Elements.java:340)
        at com.google.inject.spi.Elements.getElements(Elements.java:110)
        at com.google.inject.internal.InjectorShell$Builder.build(InjectorShell.java:138)
        at com.google.inject.internal.InternalInjectorCreator.build(InternalInjectorCreator.java:104)
        at com.google.inject.Guice.createInjector(Guice.java:96)
        at com.google.inject.Guice.createInjector(Guice.java:73)
        at com.google.inject.Guice.createInjector(Guice.java:62)
        at org.apache.hadoop.yarn.webapp.WebApps$Builder.build(WebApps.java:379)
        at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:424)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.startWepApp(ResourceManager.java:1161)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.serviceStart(ResourceManager.java:1271)
        at org.apache.hadoop.service.AbstractService.start(AbstractService.java:194)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.main(ResourceManager.java:1467)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        ... 36 more
2022-11-08 22:55:30,005 WARN checker.StorageLocationChecker: Exception checking StorageLocation [DISK]file:/E:/hadoop-3.1.0/data/dfs/datanode
java.lang.RuntimeException: Error while running command to get file permissions : ExitCodeException exitCode=-1073741515:
        at org.apache.hadoop.util.Shell.runCommand(Shell.java:1009)
        at org.apache.hadoop.util.Shell.run(Shell.java:902)
        at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:1227)
        at org.apache.hadoop.util.Shell.execCommand(Shell.java:1321)
        at org.apache.hadoop.util.Shell.execCommand(Shell.java:1303)
        at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1321)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfoByNonNativeIO(RawLocalFileSystem.java:726)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:717)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:678)
        at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:191)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:98)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:239)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:52)
        at org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker$1.call(ThrottledAsyncChecker.java:142)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfoByNonNativeIO(RawLocalFileSystem.java:766)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:717)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:678)
        at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:191)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:98)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:239)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:52)
        at org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker$1.call(ThrottledAsyncChecker.java:142)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
2022-11-08 22:55:30,005 ERROR datanode.DataNode: Exception in secureMain
org.apache.hadoop.util.DiskChecker$DiskErrorException: Too many failed volumes - current valid volumes: 0, volumes configured: 1, volumes failed: 1, volume failures tolerated: 0
        at org.apache.hadoop.hdfs.server.datanode.checker.StorageLocationChecker.check(StorageLocationChecker.java:220)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2762)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2677)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.createDataNode(DataNode.java:2719)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.secureMain(DataNode.java:2863)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.main(DataNode.java:2887)

2022-11-08 22:55:31,904 ERROR namenode.NameNode: Failed to start namenode.
java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:640)
        at org.apache.hadoop.fs.FileUtil.canWrite(FileUtil.java:1220)
        at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:667)
        at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:620)
        at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:371)
        at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:227)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:1086)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:714)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:669)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:731)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.(NameNode.java:968)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.(NameNode.java:947)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1674)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1741)
运行结果及报错内容

img

img

img

img

我的解答思路和尝试过的方法

我尝试过重新配置,但仍不行。

这种多数是因为hadoop的配置文件有写的不对的地方,再检查检查

看你报错截图里面有提示文件权限获取失败,检查一下相关目录权限。另外可以尝试在linux环境上搭建部署

1、配置文件权限
2、建议linux部署,检查配置文件格式,一般windows上面会有问题
3、NTP时间对齐

如果需要支持,请回复~