一、org.apache.hadoop.security.AccessControlException异常处理办法
该异常所产生的原因是由于系统的用户名不同,而导致的权限的缺失。
解决办法去集群的每台机器上的hdfs-site.xml中添加配置,如果有的话将value的true改为false
然后重启集群。1
2
3
4<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
注:这只是测试阶段可用的方法,如果实际使用时最好还是设为true
二、java代码及简单的配置
1.hdfs-site.xml :这里配置要和你的集群的里的hdfs-site.xml对应1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>192.168.122.137:50090</value>
</property>
<property>
<name>dfs.namenode.secondary.https-address</name>
<value>192.168.122.137:50091</value>
</property>
</configuration>
2.core-site.xml:相关参数要与集群的core-site.xml对应1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.122.139:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/hadoop-2.7.5/temp</value>
</property>
</configuration>
3.测试代码:1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101package com.hdfs.hdfs;
import java.io.IOException;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class HDFSTest {
/**
* 构造一个配置参数对象,设置一个参数:我们要访问的hdfs(hadoop分布式文件系统)的URI
* 从而FileSystem.get()方法就知道应该是去构造一个访问hdfs文件系统的客户端,以及hdfs的访问地址
* new Configuration();的时候,它就会去加载jar包中的hdfs-default.xml
* 然后再加载classpath下的hdfs-site.xml
*
* 参数优先级:1、客户端代码中设置的值 2、classpath下的用户自定义配置文件 3、然后是服务器的默认配置
*/
public Configuration config =new Configuration();
public FileSystem fs ;
@Before
public void setup() throws IOException, InterruptedException, URISyntaxException{
fs =FileSystem.get(config);
// fs=FileSystem.get(new URI("hdfs://node2:9000/"), config,"hadoop");
}
@After
public void end()throws Exception{
if(fs!=null){
fs.close();
}
}
//创建目录
@Test
public void mkdir ()throws Exception{
Path path =new Path("/springboot");
fs.mkdirs(path);
//fs.delete(new Path("/test/hadoop"),true);
//fs.rename(new Path("/test.hadoop"), new Path("/test/hadoop2"));
}
/**
* 上传文件
* @throws Exception
*/
public void upload()throws Exception{
Path srcFile = new Path("F:/SpringBoot/SpringBoot.zip");
Path dstfile =new Path("/springboot/");
//要上传的hdfs的目标路径
fs.copyFromLocalFile(srcFile, dstfile);
}
/**
* 从hdfs中复制文件到本地文件系统
*/
public void testDownLoadFileToLocal() {
try {
fs.copyToLocalFile(new Path("/springboot/SpringBoot.zip"), new Path("g:/"));
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testListFiles() {
try {
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
while(listFiles.hasNext()) {
LocatedFileStatus fileStatus = listFiles.next();
System.out.println(fileStatus.getPath().getName());
// System.out.println(fileStatus.getBlockSize());
System.out.println(fileStatus.getPermission());
System.out.println(fileStatus.getLen());
@SuppressWarnings("unused")
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
// for(BlockLocation b1 : blockLocations) {
// System.out.println("block-length:" + b1.getLength() + "--" + "block-offset:" + b1.getOffset());
// String[] hosts = b1.getHosts();
// for(String host : hosts) {
// System.out.println(host);
// }
// }
System.out.println("-----------------------------------");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
4.pom.xml配置:其中build是为了防止默认创建jre1.51
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.zx</groupId>
<artifactId>changdahdfs</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>changdahdfs</name>
<url>http://maven.apache.org</url>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
</project>
三、测试结果
创建目录:
上传本机文件:
下载文件:
显示文件信息:
注:
- 所做的分析都是笔者自己的见解,如有不正确还请见谅。
- 另外,如需代码请访问我的Github:https://github.com/Zxnaruto