龙芯3A4000下编译HADOOP3.2.1
1.首先下载最高版本hadoop-3.2.1-src.tar.gz源码包,下载地址:https://hadoop.apache.org/releases.html或直达地址↓
https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-3.2.1/hadoop-3.2.1-src.tar.gz
2.在LOONGNIX系统启动samba服务,方便传递数据资料:
# systemctl enable sshd.service # 开机启动ssh
①#vi /etc/samba/smb.conf
[slave2]
comment = Public slave2 Home
path = /home
public = yes
writable = yes
browseable = yes
guest ok = yes
valid users = slave2
# smbpasswd –a slave2
samba配置 以及没有权限访问的 解决方法
vi /etc/samba/smb.conf
workgroup = WORKGROUP //修改工作组名
[test] //test为打开samba时你想显示的名字 (共享文件夹名)
comment=This is my samba server (这是注释行,可以不写东西)
path=/**** (指定要共享文件的位置)
writable = yes
browseable =yes
read only = yes
create mode=0664 (这是文件权限)
directory mode=0777 (这是目录权限)
service smb start
[root @linux samba]# useradd samba (建一个名叫samba的用户)
[root @linux samba]# passwd samba (给samba用户添加密码)
[root @linux samba]# smbpasswd -a samba
[root @linux samba]# service smb restart
在Windows客户机的地址栏中输入samba服务器ip(如“\\192.168.138.110)
原文链接:https://blog.csdn.net/u012207077/article/details/11929989
# systemctl enable smb.service # 开机启动samba
# systemctl start smb.service # 立即启动samba
# systemctl restart smb.service # 立即重启samba
# firewall-cmd --permanent --zone=public --add-service=samba # 配置防火墙
# setsebool -P samba_enable_home_dirs on # 命令解除selinux对samba的限制
# systemctl stop firewalld.service # 关闭 防火墙
# systemctl disable firewalld.service
3.开始安装配置hadoop
①检查java版本,并进行升级:
# java –version
openjdk version "1.8.0_242"
OpenJDK Runtime Environment (Loongson 8.1.4-loongson3a-Fedora) (build 1.8.0_242-b08)
OpenJDK 64-Bit Server VM (build 25.242-b08, mixed mode)
# yum update java
②安装maven
# yum install maven
查看版本:
# mvn –version
Apache Maven 3.2.2 (NON-CANONICAL_2015-07-07T11:23:04_root; 2015-07-07T11:23:04+08:00)
Maven home: /usr/share/maven
Java version: 1.8.0_242, vendor: Oracle Corporation
Java home: /usr/lib/jvm/java-1.8.0-openjdk-1.8.0.242-1.b08.8.1.4.fc21.loongson.mips64el/jre
Default locale: en_AU, platform encoding: UTF-8
OS name: "linux", version: "3.10.84-21.fc21.loongson.18.mips64el", arch: "mips64el", family: "unix"
③安装Protocol Buffer 2.5.0,必须是这个版本
# yum install protobuf-devel
查看版本:
# protoc –version
libprotoc 2.5.0
④安装Findbugs(可选)
# yum install findbugs
⑤http://mirrors.hust.edu.cn/apache/hadoop/common/
下载 hadoop3.2.1 的源码包,或者windows下载了放到samba文件夹内。
# tar zxvf hadoop-3.2.1-src.tar.gz
# cd hadoop-3.2.1-src
⑥安装其他依赖包
# yum install -y openssl openssl-devel svn ncurses-devel zlib-devel libtool
# yum install -y snappy snappy-devel bzip2 bzip2-devel lzo lzo-devel lzop autoconf automake
⑦修改maven为阿里云镜像
# vi /etc/maven/ settings.xml
<mirrors>
<!-- mirror
| Specifies a repository mirror site to use instead of a given repository. The repository that
| this mirror serves has an ID that matches the mirrorOf element of this mirror. IDs are used
| for inheritance and direct lookup purposes, and must be unique across the set of mirrors.
|
<mirror>
<id>mirrorId</id>
<mirrorOf>repositoryId</mirrorOf>
<name>Human Readable Name for this Mirror.</name>
<url>http://my.repository.com/repo/path</url>
</mirror>
-->
<mirror>
<id>alimaven</id>
<name>aliyun maven</name>
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
<mirrorOf>central</mirrorOf>
</mirror>
</mirrors>
⑧开始编译,遇到的问题后面继续补充说明解决办法!
# mvn clean package -Pdist,native -DskipTests -Dtar
错误1:[ERROR] Failed to execute goal org.apache.maven.plugins:maven-enforcer-plugin:3.0.0-M1:enforce (clean) on project hadoop-assemblies: Some Enforcer rules have failed. Look above for specific messages explaining why the rule failed. -> [Help 1]
强制关系不对,主要为mvn版本不对,搜下 enforced.maven.version 发现在 hadoop-project/pom.xml 定义了这个变量:
<enforced.maven.version>[3.3.0,)</enforced.maven.version>,对比本机版本3.2.2,改为:<enforced.maven.version>[3.2.1)</enforced.maven.version>
错误2:[ERROR] Failed to execute goal org.apache.hadoop:hadoop-maven-plugins:3.2.1:cmak
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e swit
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please rea
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionE
[ERROR]
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR] mvn <goals> -rf :hadoop-mapreduce-client-nativetask
解决方案,实际是GCC不支持Error: unrecognized opcode `bswap $3'指令,需要修改/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h文件,实现字节反转,大小端转换:
/**
* little-endian to big-endian or vice versa
*/
inline uint32_t bswap(uint32_t val) {
return __builtin_bswap32(val);
/*
#ifdef __aarch64__
__asm__("rev %w[dst], %w[src]" : [dst]"=r"(val) : [src]"r"(val));
#else
__asm__("bswap %0" : "=r" (val) : "0" (val));
#endif
return val;
*/
}
inline uint64_t bswap64(uint64_t val) {
return __builtin_bswap64(val);
/*
#ifdef __aarch64__
__asm__("rev %[dst], %[src]" : [dst]"=r"(val) : [src]"r"(val));
#else
#ifdef __X64
__asm__("bswapq %0" : "=r" (val) : "0" (val));
#else
uint64_t lower = val & 0xffffffffU;
uint32_t higher = (val >> 32) & 0xffffffffU;
lower = bswap(lower);
higher = bswap(higher);
return (lower << 32) + higher;
#endif
#endif
return val;
*/
}
错误3:Failed to find a viable JVM installation under JAVA_HOME
解决方案:hadoop-common-project/hadoop-common/HadoopJNI.cmake,修改增加该文件的红色内容:
#
# Linux-specific JNI configuration.
#
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
# Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
# Since we were invoked from Maven, we know that the JAVA_HOME environment
# variable is valid. So we ignore system paths here and just use JAVA_HOME.
file(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _java_home)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
set(_java_libarch "i386")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
set(_java_libarch "amd64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
set(_java_libarch "arm")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "mips64")
set(_java_libarch "mips64el")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64le")
if(EXISTS "${_java_home}/jre/lib/ppc64le")
set(_java_libarch "ppc64le")
else()
set(_java_libarch "ppc64")
endif()
else()
set(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
endif()
set(_JDK_DIRS "${_java_home}/jre/lib/${_java_libarch}/*"
"${_java_home}/jre/lib/${_java_libarch}"
"${_java_home}/jre/lib/*"
"${_java_home}/jre/lib"
"${_java_home}/lib/*"
"${_java_home}/lib"
"${_java_home}/include/*"
"${_java_home}/include"
"${_java_home}"
)
错误4: Cound not find a SASL library (GSASL (gsasl) or Cyrus SASL (libsasl2).
yum install -y cyrus-sasl*
编译成功后���:hadoop-dist/target下有打包文件hadoop-3.2.1.tar.gz。
[INFO] Reactor Summary:
[INFO]
[INFO] Apache Hadoop Main ................................. SUCCESS [ 2.678 s]
[INFO] Apache Hadoop Build Tools .......................... SUCCESS [ 3.506 s]
[INFO] Apache Hadoop Project POM .......................... SUCCESS [ 2.514 s]
[INFO] Apache Hadoop Annotations .......................... SUCCESS [ 5.096 s]
[INFO] Apache Hadoop Assemblies ........................... SUCCESS [ 0.429 s]
[INFO] Apache Hadoop Project Dist POM ..................... SUCCESS [ 2.885 s]
[INFO] Apache Hadoop Maven Plugins ........................ SUCCESS [ 9.406 s]
[INFO] Apache Hadoop MiniKDC .............................. SUCCESS [ 3.656 s]
[INFO] Apache Hadoop Auth ................................. SUCCESS [ 13.136 s]
[INFO] Apache Hadoop Auth Examples ........................ SUCCESS [ 4.639 s]
[INFO] Apache Hadoop Common ............................... SUCCESS [02:17 min]
[INFO] Apache Hadoop NFS .................................. SUCCESS [ 10.171 s]
[INFO] Apache Hadoop KMS .................................. SUCCESS [ 8.426 s]
[INFO] Apache Hadoop Common Project ....................... SUCCESS [ 0.085 s]
[INFO] Apache Hadoop HDFS Client .......................... SUCCESS [01:03 min]
[INFO] Apache Hadoop HDFS ................................. SUCCESS [02:05 min]
[INFO] Apache Hadoop HDFS Native Client ................... SUCCESS [ 41.364 s]
[INFO] Apache Hadoop HttpFS ............................... SUCCESS [ 13.055 s]
[INFO] Apache Hadoop HDFS-NFS ............................. SUCCESS [ 6.400 s]
[INFO] Apache Hadoop HDFS-RBF ............................. SUCCESS [ 40.502 s]
[INFO] Apache Hadoop HDFS Project ......................... SUCCESS [ 0.086 s]
[INFO] Apache Hadoop YARN ................................. SUCCESS [ 0.086 s]
[INFO] Apache Hadoop YARN API ............................. SUCCESS [ 36.244 s]
[INFO] Apache Hadoop YARN Common .......................... SUCCESS [01:23 min]
[INFO] Apache Hadoop YARN Registry ........................ SUCCESS [ 11.267 s]
[INFO] Apache Hadoop YARN Server .......................... SUCCESS [ 0.123 s]
[INFO] Apache Hadoop YARN Server Common ................... SUCCESS [ 26.934 s]
[INFO] Apache Hadoop YARN NodeManager ..................... SUCCESS [ 36.577 s]
[INFO] Apache Hadoop YARN Web Proxy ....................... SUCCESS [ 7.212 s]
[INFO] Apache Hadoop YARN ApplicationHistoryService ....... SUCCESS [ 13.187 s]
[INFO] Apache Hadoop YARN Timeline Service ................ SUCCESS [ 9.842 s]
[INFO] Apache Hadoop YARN ResourceManager ................. SUCCESS [ 54.365 s]
[INFO] Apache Hadoop YARN Server Tests .................... SUCCESS [ 3.114 s]
[INFO] Apache Hadoop YARN Client .......................... SUCCESS [ 12.816 s]
[INFO] Apache Hadoop YARN SharedCacheManager .............. SUCCESS [ 6.498 s]
[INFO] Apache Hadoop YARN Timeline Plugin Storage ......... SUCCESS [ 6.502 s]
[INFO] Apache Hadoop YARN TimelineService HBase Backend ... SUCCESS [ 0.107 s]
[INFO] Apache Hadoop YARN TimelineService HBase Common .... SUCCESS [ 9.432 s]
[INFO] Apache Hadoop YARN TimelineService HBase Client .... SUCCESS [ 7.990 s]
[INFO] Apache Hadoop YARN TimelineService HBase Servers ... SUCCESS [ 0.082 s]
[INFO] Apache Hadoop YARN TimelineService HBase Server 1.2 SUCCESS [ 7.418 s]
[INFO] Apache Hadoop YARN TimelineService HBase tests ..... SUCCESS [ 5.674 s]
[INFO] Apache Hadoop YARN Router .......................... SUCCESS [ 10.479 s]
[INFO] Apache Hadoop YARN Applications .................... SUCCESS [ 0.072 s]
[INFO] Apache Hadoop YARN DistributedShell ................ SUCCESS [ 6.202 s]
[INFO] Apache Hadoop YARN Unmanaged Am Launcher ........... SUCCESS [ 3.765 s]
[INFO] Apache Hadoop MapReduce Client ..................... SUCCESS [ 0.281 s]
[INFO] Apache Hadoop MapReduce Core ....................... SUCCESS [ 35.878 s]
[INFO] Apache Hadoop MapReduce Common ..................... SUCCESS [ 28.873 s]
[INFO] Apache Hadoop MapReduce Shuffle .................... SUCCESS [ 9.000 s]
[INFO] Apache Hadoop MapReduce App ........................ SUCCESS [ 19.491 s]
[INFO] Apache Hadoop MapReduce HistoryServer .............. SUCCESS [ 11.275 s]
[INFO] Apache Hadoop MapReduce JobClient .................. SUCCESS [ 12.324 s]
[INFO] Apache Hadoop Mini-Cluster ......................... SUCCESS [ 1.920 s]
[INFO] Apache Hadoop YARN Services ........................ SUCCESS [ 0.078 s]
[INFO] Apache Hadoop YARN Services Core ................... SUCCESS [ 5.557 s]
[INFO] Apache Hadoop YARN Services API .................... SUCCESS [ 2.540 s]
[INFO] Apache Hadoop Image Generation Tool ................ SUCCESS [ 7.011 s]
[INFO] Yet Another Learning Platform ...................... SUCCESS [ 7.747 s]
[INFO] Apache Hadoop YARN Site ............................ SUCCESS [ 0.090 s]
[INFO] Apache Hadoop YARN UI .............................. SUCCESS [ 0.150 s]
[INFO] Apache Hadoop YARN Project ......................... SUCCESS [ 16.322 s]
[INFO] Apache Hadoop MapReduce HistoryServer Plugins ...... SUCCESS [ 3.580 s]
[INFO] Apache Hadoop MapReduce NativeTask ................. SUCCESS [ 55.974 s]
[INFO] Apache Hadoop MapReduce Uploader ................... SUCCESS [ 3.973 s]
[INFO] Apache Hadoop MapReduce Examples ................... SUCCESS [ 9.389 s]
[INFO] Apache Hadoop MapReduce ............................ SUCCESS [ 7.425 s]
[INFO] Apache Hadoop MapReduce Streaming .................. SUCCESS [ 22.612 s]
[INFO] Apache Hadoop Distributed Copy ..................... SUCCESS [ 9.895 s]
[INFO] Apache Hadoop Archives ............................. SUCCESS [ 3.973 s]
[INFO] Apache Hadoop Archive Logs ......................... SUCCESS [ 4.475 s]
[INFO] Apache Hadoop Rumen ................................ SUCCESS [ 10.340 s]
[INFO] Apache Hadoop Gridmix .............................. SUCCESS [ 7.934 s]
[INFO] Apache Hadoop Data Join ............................ SUCCESS [ 4.585 s]
[INFO] Apache Hadoop Extras ............................... SUCCESS [ 4.341 s]
[INFO] Apache Hadoop Pipes ................................ SUCCESS [ 11.278 s]
[INFO] Apache Hadoop OpenStack support .................... SUCCESS [ 7.679 s]
[INFO] Apache Hadoop Amazon Web Services support .......... SUCCESS [ 41.114 s]
[INFO] Apache Hadoop Kafka Library support ................ SUCCESS [ 9.197 s]
[INFO] Apache Hadoop Azure support ........................ SUCCESS [ 28.260 s]
[INFO] Apache Hadoop Aliyun OSS support ................... SUCCESS [ 16.968 s]
[INFO] Apache Hadoop Client Aggregator .................... SUCCESS [ 4.107 s]
[INFO] Apache Hadoop Scheduler Load Simulator ............. SUCCESS [ 10.779 s]
[INFO] Apache Hadoop Resource Estimator Service ........... SUCCESS [ 11.466 s]
[INFO] Apache Hadoop Azure Data Lake support .............. SUCCESS [ 12.997 s]
[INFO] Apache Hadoop Tools Dist ........................... SUCCESS [ 18.208 s]
[INFO] Apache Hadoop Tools ................................ SUCCESS [ 0.070 s]
[INFO] Apache Hadoop Client API ........................... SUCCESS [06:48 min]
[INFO] Apache Hadoop Client Runtime ....................... SUCCESS [05:31 min]
[INFO] Apache Hadoop Client Packaging Invariants .......... SUCCESS [ 2.140 s]
[INFO] Apache Hadoop Client Test Minicluster .............. SUCCESS [10:17 min]
[INFO] Apache Hadoop Client Packaging Invariants for Test . SUCCESS [ 0.428 s]
[INFO] Apache Hadoop Client Packaging Integration Tests ... SUCCESS [ 0.309 s]
[INFO] Apache Hadoop Distribution ......................... SUCCESS [ 38.525 s]
[INFO] Apache Hadoop Client Modules ....................... SUCCESS [ 0.088 s]
[INFO] Apache Hadoop Cloud Storage ........................ SUCCESS [ 1.292 s]
[INFO] Apache Hadoop Cloud Storage Project ................ SUCCESS [ 0.090 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 45:40 min
[INFO] Finished at: 2020-07-14T08:06:34+08:00
[INFO] Final Memory: 200M/1265M
[INFO] ------------------------------------------------------------------------
至此就编译完成啦,基本上问题搜索下都是会有答案的@细心耐心。