(1)编辑NameNode配置文件${hadoop_home}/etc/hadoop/yarn-site.xml和mapred-site.xml
yarn.nodemanager.aux-services mapreduce_shuffle yarn.log-aggregation-enable true yarn.log-aggregation.retain-seconds 10080 yarn.nodemanager.remote-app-log-dir /flink/log yarn.log.server.url http://localhost:19888/jobhistory/logs mapreduce.jobhistory.intermediate-done-dir /history/done_intermediate mapreduce.jobhistory.done-dir /history/done
mapreduce.framework.name yarn
mapreduce.jobhistroy.address hadoop101:10020
mapreduce.jobhistroy.webapp.address hadoop101:19888
#复制配置文件到集群的其他机器
scp mapred-site.xml 用户@IP地址:/目标机器文件夹路径
scp yarn-site.xml 用户@IP地址:/目标机器文件夹路径
(3)重启yarn,重启历史服务
./stop-yarn.sh && ./start-yarn.sh
#进入到hadoop的安装目录
cd ${hadoophome}/hadoop/sbin
kill -9 117681 && ./mr-jobhistory-daemon.sh start historyserver
(4)查看服务运行情况
jps
(5)运行flink on yarn
./bin/flink run -m yarn-cluster -c com.lixiang.app.FlinkDemo ./flink-demo-jar-with-dependencies.jar
(6)查看hadoop控制台
javax.jdo.option.ConnectionURL jdbc:mysql://192.168.139.101:3306/metastore?useSSL=false javax.jdo.option.ConnectionDriverName com.mysql.jdbc.Driver javax.jdo.option.ConnectionUserName root hive.metastore.warehouse.dir /user/hive/warehouse javax.jdo.option.ConnectionPassword 123456 hive.metastore.schema.verification false hive.metastore.event.db.notification.api.auth false hive.cli.print.current.db true hive.cli.print.header true hive.server2.thrift.bind.host ip hive.server2.thrift.port 10000
CREATE EXTERNAL TABLE tweetsCOMMENT "A table backed by Avro data with the Avro schema embedded in the CREATE TABLE statement"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'STORED ASINPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'LOCATION '/user/hive/warehouse'TBLPROPERTIES ('avro.schema.literal'='{"type": "record","name": "Tweet","namespace": "com.miguno.avro","fields": [{ "name":"username", "type":"string"},{ "name":"tweet", "type":"string"},{ "name":"timestamp", "type":"long"}]}');insert into tweets values('zhaoliu','Hello word',13800000000);select * from tweets;//建立外部 schema
CREATE EXTERNAL TABLE avro_test1
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
STORED AS
INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
LOCATION '/user/tmp'
TBLPROPERTIES (
'avro.schema.url'='hdfs:///user/hive/warehouse/student.avsc'
);
{"type":"record","name":"student","namespace":"com.tiejia.avro","fields":[{"name":"SID","type":"string","default":""},{"name":"Name","type":"string","default":""},{"name":"Dept","type":"string","default":""},{"name":"Phone","type":"string","default":""},{"name":"Age","type":"string","default":""},{"name":"Date","type":"string","default":""}]
}"type": "record","name": "Tweet","namespace": "com.miguno.avro","fields": [{"name": "username","type": "string"},{"name": "tweet","type": "string"},{"name": "timestamp","type": "long"}]
}CREATE EXTERNAL TABLE tweets
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
STORED AS
INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
LOCATION '/user/tmp'
TBLPROPERTIES (
'avro.schema.url'='hdfs:///user/hive/warehouse/tweets.avsc'
);
o.AvroSerDe’
STORED AS
INPUTFORMAT ‘org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat’
OUTPUTFORMAT ‘org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat’
LOCATION ‘/user/tmp’
TBLPROPERTIES (
‘avro.schema.url’=‘hdfs:///user/hive/warehouse/tweets.avsc’
);
上一篇:charles+夜神模拟器抓包
下一篇:Java 基础面试题——关键字