idea下连接linux中hive报错org.apache.spark.sql.hive.HiveSessionStateBuilder

Error while instantiating 'org.apache.spark.sql.hive.HiveSessionStateBuilder
运行结果及报错内容

    at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1053)
    at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
    at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
    at scala.Option.getOrElse(Option.scala:121)
    at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
    at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:938)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:938)
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:130)
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:130)
    at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236)
    at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
    at scala.collection.mutable.HashMap.foreach(HashMap.scala:130)
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:938)
    at cn.kgc.spark.sparksql.HiveDemo$.main(HiveDemo.scala:24)
    at cn.kgc.spark.sparksql.HiveDemo.main(HiveDemo.scala)
Caused by: org.apache.spark.sql.AnalysisException: java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Ljava/lang/String;I)V;
    at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
    at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
    at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
    at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
    at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
    at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
    at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
    at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
    at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
    at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
    ... 15 more
Caused by: java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Ljava/lang/String;I)V
    at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Native Method)
    at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode(NativeIO.java:524)
    at org.apache.hadoop.fs.RawLocalFileSystem.mkOneDirWithMode(RawLocalFileSystem.java:465)
    at org.apache.hadoop.fs.RawLocalFileSystem.mkdirsWithOptionalPermission(RawLocalFileSystem.java:518)
    at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:496)
    at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:316)
    at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3679)
    at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:597)
    at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
    at org.apache.spark.sql.hive.client.HiveClientImpl.(HiveClientImpl.scala:191)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
    at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
    at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
    at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
    ... 24 more
22/11/23 22:12:30 INFO spark.SparkContext: Invoking stop() from shutdown hook
22/11/23 22:12:30 INFO server.AbstractConnector: Stopped Spark@50eca7c6{HTTP/1.1,[http/1.1]}{0.0.0.0:4040}
22/11/23 22:12:30 INFO ui.SparkUI: Stopped Spark web UI at http://192.168.2.1:4040
22/11/23 22:12:30 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
22/11/23 22:12:30 INFO memory.MemoryStore: MemoryStore cleared
22/11/23 22:12:30 INFO storage.BlockManager: BlockManager stopped
22/11/23 22:12:30 INFO storage.BlockManagerMaster: BlockManagerMaster stopped
22/11/23 22:12:30 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
22/11/23 22:12:30 INFO spark.SparkContext: Successfully stopped SparkContext
22/11/23 22:12:30 INFO util.ShutdownHookManager: Shutdown hook called
22/11/23 22:12:30 INFO util.ShutdownHookManager: Deleting directory C:\Users\20366\AppData\Local\Temp\spark-74246203-b726-4aa7-94a3-598c98aaa4a0

我的解答思路和尝试过的方法 网上的方法基本都试了,环境变量早就配置了,设置dir为空也不行