设为首页 收藏本站
查看: 899|回复: 0

Hadoop学习笔记(一)HBase脚本分析(三)hbase

[复制链接]

尚未签到

发表于 2015-11-11 14:51:21 | 显示全部楼层 |阅读模式
#! /usr/bin/env bash
#
#/**
# * Copyright 2007 The Apache Software Foundation
# *
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements.  See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership.  The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License.  You may obtain a copy of the License at
# *
# *     http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
#
# The hbase command script.  Based on the hadoop command script putting
# in hbase classes, libs and configurations ahead of hadoop's.
#
# TODO: Narrow the amount of duplicated code.
#
# Environment Variables:
#
#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
#
#   HBASE_CLASSPATH  Extra Java CLASSPATH entries.
#
#   HBASE_HEAPSIZE   The maximum amount of heap to use, in MB.
#                    Default is 1000.
#
#   HBASE_LIBRARY_PATH  HBase additions to JAVA_LIBRARY_PATH for adding
#                    native libaries.
#
#   HBASE_OPTS       Extra Java runtime options.
#
#   HBASE_CONF_DIR   Alternate conf dir. Default is ${HBASE_HOME}/conf.
#
#   HBASE_ROOT_LOGGER The root appender. Default is INFO,console
#
#   MAVEN_HOME       Where mvn is installed.
#
bin=`dirname "$0"`
bin=`cd "$bin">/dev/null; pwd`
# This will set HBASE_HOME, etc.
. "$bin"/hbase-config.sh
cygwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
esac
# Detect if we are in hbase sources dir
in_dev_env=false
if [ -d "${HBASE_HOME}/target" ]; then
in_dev_env=true
fi
# if no args specified, show usage
if [ $# = 0 ]; then
echo &quot;Usage: hbase <command>&quot;
echo &quot;where <command> an option from one of these categories:&quot;
echo &quot;&quot;
echo &quot;DBA TOOLS&quot;
echo &quot;  shell            run the HBase shell&quot;
echo &quot;  hbck             run the hbase 'fsck' tool&quot;
echo &quot;  hlog             write-ahead-log analyzer&quot;
echo &quot;  hfile            store file analyzer&quot;
echo &quot;  zkcli            run the ZooKeeper shell&quot;
echo &quot;&quot;
echo &quot;PROCESS MANAGEMENT&quot;
echo &quot;  master           run an HBase HMaster node&quot;
echo &quot;  regionserver     run an HBase HRegionServer node&quot;
echo &quot;  zookeeper        run a Zookeeper server&quot;
echo &quot;  rest             run an HBase REST server&quot;
echo &quot;  thrift           run an HBase Thrift server&quot;
echo &quot;  avro             run an HBase Avro server&quot;
echo &quot;&quot;
echo &quot;PACKAGE MANAGEMENT&quot;
echo &quot;  classpath        dump hbase CLASSPATH&quot;
echo &quot;  version          print the version&quot;
echo &quot;&quot;
echo &quot; or&quot;
echo &quot;  CLASSNAME        run the class named CLASSNAME&quot;
echo &quot;Most commands print help when invoked w/o parameters.&quot;
exit 1
fi
# get arguments
COMMAND=$1
shift
JAVA=$JAVA_HOME/bin/java
JAVA_HEAP_MAX=-Xmx1000m
MVN=&quot;mvn&quot;
if [ &quot;$MAVEN_HOME&quot; != &quot;&quot; ]; then
MVN=${MAVEN_HOME}/bin/mvn
fi
# check envvars which might override default args
if [ &quot;$HBASE_HEAPSIZE&quot; != &quot;&quot; ]; then
#echo &quot;run with heapsize $HBASE_HEAPSIZE&quot;
JAVA_HEAP_MAX=&quot;-Xmx&quot;&quot;$HBASE_HEAPSIZE&quot;&quot;m&quot;
#echo $JAVA_HEAP_MAX
fi
# so that filenames w/ spaces are handled correctly in loops below
IFS=
# CLASSPATH initially contains $HBASE_CONF_DIR
CLASSPATH=&quot;${HBASE_CONF_DIR}&quot;
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
add_maven_deps_to_classpath() {
# Need to generate classpath from maven pom. This is costly so generate it
# and cache it. Save the file into our target dir so a mvn clean will get
# clean it up and force us create a new one.
f=&quot;${HBASE_HOME}/target/cached_classpath.txt&quot;
if [ ! -f &quot;${f}&quot; ]
then
${MVN} -f &quot;${HBASE_HOME}/pom.xml&quot; dependency:build-classpath -Dmdep.outputFile=&quot;${f}&quot; &> /dev/null
fi
CLASSPATH=${CLASSPATH}:`cat &quot;${f}&quot;`
}
add_maven_main_classes_to_classpath() {
if [ -d &quot;$HBASE_HOME/target/classes&quot; ]; then
CLASSPATH=${CLASSPATH}:$HBASE_HOME/target/classes
fi
}
add_maven_test_classes_to_classpath() {
# For developers, add hbase classes to CLASSPATH
f=&quot;$HBASE_HOME/target/test-classes&quot;
if [ -d &quot;${f}&quot; ]; then
CLASSPATH=${CLASSPATH}:${f}
fi
}
# Add maven target directory
if $in_dev_env; then
add_maven_deps_to_classpath
add_maven_main_classes_to_classpath
add_maven_test_classes_to_classpath
fi
# For releases, add hbase & webapps to CLASSPATH
# Webapps must come first else it messes up Jetty 添加HBASE_HOME所有的jar到CLASSPATH
if [ -d &quot;$HBASE_HOME/hbase-webapps&quot; ]; then
CLASSPATH=${CLASSPATH}:$HBASE_HOME
fi
if [ -d &quot;$HBASE_HOME/target/hbase-webapps&quot; ]; then
CLASSPATH=&quot;${CLASSPATH}:${HBASE_HOME}/target&quot;
fi
for f in $HBASE_HOME/hbase*.jar; do
if [[ $f = *sources.jar ]]
then
: # Skip sources.jar
elif [ -f $f ]
then
CLASSPATH=${CLASSPATH}:$f;
fi
done
# Add libs to CLASSPATH
for f in $HBASE_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# Add user-specified CLASSPATH last
if [ &quot;$HBASE_CLASSPATH&quot; != &quot;&quot; ]; then
CLASSPATH=${CLASSPATH}:${HBASE_CLASSPATH}
fi
# default log directory & file
if [ &quot;$HBASE_LOG_DIR&quot; = &quot;&quot; ]; then
HBASE_LOG_DIR=&quot;$HBASE_HOME/logs&quot;
fi
if [ &quot;$HBASE_LOGFILE&quot; = &quot;&quot; ]; then
HBASE_LOGFILE='hbase.log'
fi
# cygwin path translation
if $cygwin; then
CLASSPATH=`cygpath -p -w &quot;$CLASSPATH&quot;`
HBASE_HOME=`cygpath -d &quot;$HBASE_HOME&quot;`
HBASE_LOG_DIR=`cygpath -d &quot;$HBASE_LOG_DIR&quot;`
fi
function append_path() {
if [ -z &quot;$1&quot; ]; then
echo $2
else
echo $1:$2
fi
}
JAVA_PLATFORM=&quot;&quot;
#If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH 添加系统中现有HADOOP的CLASSPATH配置
#根据HADOOP_HOME或PATH获取当前系统的HADOOP参数,调用,因此,本地HBase模式需清除Hadoop的HADOOP_HOME或PATH
HADOOP_IN_PATH=$(PATH=&quot;${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH&quot; which hadoop 2>/dev/null)   
if [ -f ${HADOOP_IN_PATH} ]; then
HADOOP_JAVA_LIBRARY_PATH=$(HADOOP_CLASSPATH=&quot;$CLASSPATH&quot; ${HADOOP_IN_PATH} \
org.apache.hadoop.hbase.util.GetJavaProperty java.library.path 2>/dev/null)
if [ -n &quot;$HADOOP_JAVA_LIBRARY_PATH&quot; ]; then
JAVA_LIBRARY_PATH=$(append_path &quot;${JAVA_LIBRARY_PATH}&quot; &quot;$HADOOP_JAVA_LIBRARY_PATH&quot;)
fi
CLASSPATH=$(append_path &quot;${CLASSPATH}&quot; `${HADOOP_IN_PATH} classpath 2>/dev/null`)
fi
if [ -d &quot;${HBASE_HOME}/build/native&quot; -o -d &quot;${HBASE_HOME}/lib/native&quot; ]; then
if [ -z $JAVA_PLATFORM ]; then
JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e &quot;s/ /_/g&quot;`
fi
if [ -d &quot;$HBASE_HOME/build/native&quot; ]; then
JAVA_LIBRARY_PATH=$(append_path &quot;$JAVA_LIBRARY_PATH&quot; ${HBASE_HOME}/build/native/${JAVA_PLATFORM}/lib)
fi
if [ -d &quot;${HBASE_HOME}/lib/native&quot; ]; then
JAVA_LIBRARY_PATH=$(append_path &quot;$JAVA_LIBRARY_PATH&quot; ${HBASE_HOME}/lib/native/${JAVA_PLATFORM})
fi
fi
# cygwin path translation
if $cygwin; then
JAVA_LIBRARY_PATH=`cygpath -p &quot;$JAVA_LIBRARY_PATH&quot;`
fi
# restore ordinary behaviour
unset IFS
# figure out which class to run
if [ &quot;$COMMAND&quot; = &quot;shell&quot; ] ; then
CLASS=&quot;org.jruby.Main -X+O ${HBASE_HOME}/bin/hirb.rb&quot;
elif [ &quot;$COMMAND&quot; = &quot;hbck&quot; ] ; then
CLASS='org.apache.hadoop.hbase.util.HBaseFsck'
elif [ &quot;$COMMAND&quot; = &quot;hlog&quot; ] ; then
CLASS='org.apache.hadoop.hbase.regionserver.wal.HLogPrettyPrinter'
elif [ &quot;$COMMAND&quot; = &quot;hfile&quot; ] ; then
CLASS='org.apache.hadoop.hbase.io.hfile.HFile'
elif [ &quot;$COMMAND&quot; = &quot;zkcli&quot; ] ; then
# ZooKeeperMainServerArg returns '-server HOST:PORT' or empty string.
SERVER_ARG=`&quot;$bin&quot;/hbase org.apache.hadoop.hbase.zookeeper.ZooKeeperMainServerArg`
CLASS=&quot;org.apache.zookeeper.ZooKeeperMain ${SERVER_ARG}&quot;
elif [ &quot;$COMMAND&quot; = &quot;master&quot; ] ; then
CLASS='org.apache.hadoop.hbase.master.HMaster'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_MASTER_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;regionserver&quot; ] ; then
CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_REGIONSERVER_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;thrift&quot; ] ; then
CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_THRIFT_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;rest&quot; ] ; then
CLASS='org.apache.hadoop.hbase.rest.Main'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_REST_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;avro&quot; ] ; then
CLASS='org.apache.hadoop.hbase.avro.AvroServer'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_AVRO_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;zookeeper&quot; ] ; then
CLASS='org.apache.hadoop.hbase.zookeeper.HQuorumPeer'
if [ &quot;$1&quot; != &quot;stop&quot; ] ; then
HBASE_OPTS=&quot;$HBASE_OPTS $HBASE_ZOOKEEPER_OPTS&quot;
fi
elif [ &quot;$COMMAND&quot; = &quot;classpath&quot; ] ; then
echo $CLASSPATH
exit 0
elif [ &quot;$COMMAND&quot; = &quot;version&quot; ] ; then
CLASS='org.apache.hadoop.hbase.util.VersionInfo'
else
CLASS=$COMMAND
fi
# Have JVM dump heap if we run out of memory.  Files will be 'launch directory'
# and are named like the following: java_pid21612.hprof. Apparently it doesn't
# 'cost' to have this flag enabled. Its a 1.6 flag only. See:
# http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
HBASE_OPTS=&quot;$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR&quot;
HBASE_OPTS=&quot;$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE&quot;
HBASE_OPTS=&quot;$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME&quot;
HBASE_OPTS=&quot;$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING&quot;
HBASE_OPTS=&quot;$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}&quot;
if [ &quot;x$JAVA_LIBRARY_PATH&quot; != &quot;x&quot; ]; then
HBASE_OPTS=&quot;$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH&quot;
fi
# Exec unless HBASE_NOEXEC is set.
if [ &quot;${HBASE_NOEXEC}&quot; != &quot;&quot; ]; then
&quot;$JAVA&quot; -XX:OnOutOfMemoryError=&quot;kill -9 %p&quot; $JAVA_HEAP_MAX $HBASE_OPTS -classpath &quot;$CLASSPATH&quot; $CLASS &quot;$@&quot;
else
exec &quot;$JAVA&quot; -XX:OnOutOfMemoryError=&quot;kill -9 %p&quot; $JAVA_HEAP_MAX $HBASE_OPTS -classpath &quot;$CLASSPATH&quot; $CLASS &quot;$@&quot;
fi

版权声明:本文为博主原创文章,未经博主允许不得转载。

运维网声明 1、欢迎大家加入本站运维交流群:群②:261659950 群⑤:202807635 群⑦870801961 群⑧679858003
2、本站所有主题由该帖子作者发表,该帖子作者与运维网享有帖子相关版权
3、所有作品的著作权均归原作者享有,请您和我们一样尊重他人的著作权等合法权益。如果您对作品感到满意,请购买正版
4、禁止制作、复制、发布和传播具有反动、淫秽、色情、暴力、凶杀等内容的信息,一经发现立即删除。若您因此触犯法律,一切后果自负,我们对此不承担任何责任
5、所有资源均系网友上传或者通过网络收集,我们仅提供一个展示、介绍、观摩学习的平台,我们不对其内容的准确性、可靠性、正当性、安全性、合法性等负责,亦不承担任何法律责任
6、所有作品仅供您个人学习、研究或欣赏,不得用于商业或者其他用途,否则,一切后果均由您自己承担,我们对此不承担任何法律责任
7、如涉及侵犯版权等问题,请您及时通知我们,我们将立即采取措施予以解决
8、联系人Email:admin@iyunv.com 网址:www.yunweiku.com

所有资源均系网友上传或者通过网络收集,我们仅提供一个展示、介绍、观摩学习的平台,我们不对其承担任何法律责任,如涉及侵犯版权等问题,请您及时通知我们,我们将立即处理,联系人Email:kefu@iyunv.com,QQ:1061981298 本贴地址:https://www.yunweiku.com/thread-137991-1-1.html 上篇帖子: Hadoop yarn OutOfMemoryError: unable to create new native thread 下篇帖子: Hadoop 从零开始学习系列-hive与hbase外部关联表建立及数据导入
您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

扫码加入运维网微信交流群X

扫码加入运维网微信交流群

扫描二维码加入运维网微信交流群,最新一手资源尽在官方微信交流群!快快加入我们吧...

扫描微信二维码查看详情

客服E-mail:kefu@iyunv.com 客服QQ:1061981298


QQ群⑦:运维网交流群⑦ QQ群⑧:运维网交流群⑧ k8s群:运维网kubernetes交流群


提醒:禁止发布任何违反国家法律、法规的言论与图片等内容;本站内容均来自个人观点与网络等信息,非本站认同之观点.


本站大部分资源是网友从网上搜集分享而来,其版权均归原作者及其网站所有,我们尊重他人的合法权益,如有内容侵犯您的合法权益,请及时与我们联系进行核实删除!



合作伙伴: 青云cloud

快速回复 返回顶部 返回列表