#hadoop
export HADOOP_HOME=/home/oozie/hadoop-2.7.3
export PATH=$PATH:$HADOOP_HOME/bin
#oozie
export OOZIE_HOME=/home/oozie/oozie-5.1.0
export OOZIE_CONFIG=$OOZIE_HOME/conf
export CLASSPATH=$CLASSPATH:$OOZIE_HOME/bin
export PATH=$PATH:$OOZIE_HOME/bin
export OOZIE_URL=http://hadoop-senior.guppy.com:11000/oozie
wget http://archive.cloudera.com/gplextras/misc/ext-2.2.zip
wget https://www-us.apache.org/dist/oozie/5.1.0/oozie-5.1.0.tar.gz
tar -zxvf /home/software/oozie-5.1.0.tar.gz -C /home/oozie/
bin/mkdistro.sh -DskipTests -Puber
mv oozie-5.1.0 oozie-5.1.0_
tar -zxvf oozie-5.1.0_/distro/target/oozie-5.1.0-distro.tar.gz -C /home/oozie/
cd oozie-5.1.0
cp /opt/software/ext-2.2.zip libext/
cd conf/
mkdir spark-conf
cd spark-conf/
vim spark-defaults.conf
# spark on yarn log spark.yarn.historyServer http://xx:18080 spark.eventLog.dir hdfs://xx:9000/spark/historyLog spark.eventLog.enabled true
vim oozie-site.xml
<property> <name>oozie.service.ProxyUserService.proxyuser.root.hosts</name> <value>*</value> </property> <property> <name>oozie.service.ProxyUserService.proxyuser.root.groups</name> <value>*</value> </property> <property> <name>oozie.service.JPAService.create.db.schema</name> <value>false</value> </property> <property> <name>oozie.service.JPAService.jdbc.driver</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>oozie.service.JPAService.jdbc.url</name> <value>jdbc:mysql://192.168.0.189:3306/oozie?createDatabaseIfNotExist=true</value> </property> <property> <name>oozie.service.JPAService.jdbc.username</name> <value>oozie</value> </property> <property> <name>oozie.service.JPAService.jdbc.password</name> <value>123456</value> </property> <property> <name>oozie.service.HadoopAccessorService.hadoop.configurations</name> <value>*=/home/oozie/hadoop-2.7.3/etc/hadoop</value> </property> <property> <name>oozie.service.HadoopAccessorService.action.configurations</name> <value>*=/home/oozie/hadoop-2.7.3/etc/hadoop</value> </property> <property> <name>oozie.service.SparkConfigurationService.spark.configurations</name> <value>*=spark-conf</value> </property> <property> <name>oozie.service.WorkflowAppService.system.libpath</name> <value>/user/root/share/lib</value> </property> <property> <name>oozie.use.system.libpath</name> <value>true</value> </property> <property> <name>oozie.subworkflow.classpath.inheritance</name> <value>true</value> </property>
tar -zxvf /home/software/hadoop-2.7.3.tar.gz -C /home/oozie/
cd hadoop-2.7.3/
vim etc/hadoop/hadoop-env.sh
export JAVA_HOME=/etc/alternatives/java_sdk_1.8.0/
vim etc/hadoop/hdfs-site.xml
<!-- 指定HDFS副本的数量 --> <property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.secondary.http.address</name> <value>192.168.0.189:50090</value> </property>
vim etc/hadoop/core-site.xml
<!-- 指定HADOOP所使用的文件系统schema(URI),HDFS的老大(NameNode)的地址 --> <property> <name>fs.defaultFS</name> <value>hdfs://192.168.0.189:9000</value> </property> <!-- 指定hadoop运行时产生文件的存储目录 --> <property> <name>hadoop.tmp.dir</name> <value>/home/hadoop/hadoop-2.7.3/tmp</value> </property> <!-- OOZIE --> <property> <name>hadoop.proxyuser.root.hosts</name> <value>master</value> </property> <property> <name>hadoop.proxyuser.root.groups</name> <value>*</value> </property>
bin/hdfs namenode -format
sbin/start-all.sh
bin/hadoop dfs -mkdir /user/root/examples
bin/hadoop dfs -put /home/oozie/oozie-5.1.0/examples/* /user/root/examples
vim oozie-5.1.0/conf/oozie-site.xml
bin/oozie-setup.sh
bin/oozie-setup.sh sharelib create -fs hdfs://192.168.0.189:9000 -locallib oozie-sharelib-5.1.0.tar.gz
cp /opt/modules/hive-2.3.6/lib/mysql-connector-java-5.1.46.jar ./lib
bin/ooziedb.sh create -sqlfile oozie.sql -run
bin/oozied.sh start
tar zxvf oozie-examples.tar.gz
vim examples/apps/spark/job.properties
vim examples/apps/spark/workflow.xml
bin/oozie job -oozie http://192.168.0.189:11000/oozie -config examples/apps/spark/job.properties -run
相关推荐
从零开始讲解大数据调度系统构成,集成大数据计算任务构建大数据工作流,基于Oozie构建实现企业级自动化任务开发 课程亮点 1,知识体系完备,从小白到大神各阶段读者均能学有所获。 2,生动形象,化繁为简,讲解...
Oozie 部署 1 1、Maven 安装 1 3、整合Oozie和Hadoop 8 Oozie提交MapReduce任务事例 8 1上传lib和wf到hdfs上 8 2修改job.properties文件 8 3:修改workflow.xml 9 4:执行oozie客户端命令执行mapreduce 10 配置oozie...
Apache Oozie Essentials starts off with the basics right from installing and configuring Oozie from source code on your Hadoop cluster to managing your complex clusters. You will learn how to create ...
oozie配置mysql所需表结构。Apache Oozie是用于Hadoop平台的一种工作流调度引擎。
Hadoop oozie报错:Table ‘oozie.VALIDATE_CONN’ doesn’t exist 。 oozie建表sql,直接下载执行
oozie调用hive介绍,在华为大数据平台下通过oozie调用hive,并解决认证问题
oozie-4.1源码。github下载的。 oozie-4.1源码。github下载的。
1、内容概要:Hadoop+Spark+Hive+HBase+Oozie+Kafka+Flume+Flink+Elasticsearch+Redash等大数据集群及组件搭建指南(详细搭建步骤+实践过程问题总结)。 2、适合人群:大数据运维、大数据相关技术及组件初学者。 3、...
oozie 提交任务参数传递到下一个任务节点 oozie 提交任务参数传递到下一个任务节点
oozie-core
CDH安装oozie后不能看console,必须把ext依赖放入oozie的libex
hadoop oozie启动或运行报错:Table ‘oozie.VALIDATE_CONN’ doesn’t exist
oozie-5.1.0.tar.gz 编译结果,受限上传大小,文件分3部分
oozie mysql数据库表结构。文档里是oozie mysql的建表语句
extjs2.2 oozie可以使用的 oozie需要使用extjs2.2
oozie介绍及使用详解
大数据技术之Oozie入门到精通
介绍了常见的集中调度器Azkaban&Oozie&Airflow的GUI设计
oozie所需的依赖包ext-2.3.0下载!