Fix bugs when install HDFS
1. use ~/ instead of $DEST/ 2. use "__slashplaceholder__" instead of @ 3. add changed PATH to the first line of ~/.bashrc 3. Fix the auto exit issue in start_hdfs.sh, make sure all hdfs services start 4. Add test_hdfs method after hdfs start Change-Id: I82a70d67209f27edead50d64dea03e5cb5ce925a
This commit is contained in:
parent
457bcce648
commit
680b855618
@ -10,10 +10,10 @@ function install_hdfs {
|
|||||||
install_package openssh-server expect
|
install_package openssh-server expect
|
||||||
|
|
||||||
# Set ssh with no password
|
# Set ssh with no password
|
||||||
if [[ ! -e $DEST/.ssh/id_rsa.pub ]]; then
|
if [[ ! -e ~/.ssh/id_rsa.pub ]]; then
|
||||||
ssh-keygen -q -N '' -t rsa -f $DEST/.ssh/id_rsa
|
ssh-keygen -q -N '' -t rsa -f ~/.ssh/id_rsa
|
||||||
fi
|
fi
|
||||||
cat $DEST/.ssh/id_rsa.pub >> $DEST/.ssh/authorized_keys
|
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
|
||||||
|
|
||||||
if [[ -z $JAVA_HOME ]]; then
|
if [[ -z $JAVA_HOME ]]; then
|
||||||
install_package openjdk-7-jre openjdk-7-jdk
|
install_package openjdk-7-jre openjdk-7-jdk
|
||||||
@ -36,10 +36,10 @@ function configure_hdfs {
|
|||||||
# edit core-site.xml & hdfs-site.xml
|
# edit core-site.xml & hdfs-site.xml
|
||||||
cp $HDFS_PLUGIN_LIB_DIR/template/* $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/
|
cp $HDFS_PLUGIN_LIB_DIR/template/* $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/
|
||||||
|
|
||||||
path=${HDFS_PLUGIN_DIR//\//@}
|
path=${HDFS_PLUGIN_DIR//\//__slashplaceholder__}
|
||||||
|
|
||||||
sed -i "s/__PLACEHOLDER__/$path/g" $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/hdfs-site.xml
|
sed -i "s/__PLACEHOLDER__/$path/g" $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/hdfs-site.xml
|
||||||
sed -i 's/@/\//g' $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/hdfs-site.xml
|
sed -i 's/__slashplaceholder__/\//g' $HDFS_PLUGIN_HADOOP_DIR/etc/hadoop/hdfs-site.xml
|
||||||
|
|
||||||
# formate namenode
|
# formate namenode
|
||||||
$HDFS_PLUGIN_HADOOP_DIR/bin/hdfs namenode -format
|
$HDFS_PLUGIN_HADOOP_DIR/bin/hdfs namenode -format
|
||||||
@ -49,11 +49,22 @@ function configure_hdfs {
|
|||||||
function start_hdfs {
|
function start_hdfs {
|
||||||
# start
|
# start
|
||||||
$HDFS_PLUGIN_LIB_DIR/start_hdfs.sh $HDFS_PLUGIN_HADOOP_DIR/sbin/start-dfs.sh
|
$HDFS_PLUGIN_LIB_DIR/start_hdfs.sh $HDFS_PLUGIN_HADOOP_DIR/sbin/start-dfs.sh
|
||||||
|
|
||||||
# add hadoop/bin to PATH
|
# add hadoop/bin to PATH
|
||||||
echo "export PATH=$PATH:$HDFS_PLUGIN_HADOOP_DIR/bin " >> ~/.bashrc
|
ori_path=$PATH:$HDFS_PLUGIN_HADOOP_DIR/bin
|
||||||
|
no_slash_path=${ori_path//\//__slashplaceholder__}
|
||||||
|
new_path=${no_slash_path//\:/__colonplaceholder__}
|
||||||
|
sed -i "1 s/^/PATH=${new_path}\n/" ~/.bashrc
|
||||||
|
sed -i 's/__slashplaceholder__/\//g' ~/.bashrc
|
||||||
|
sed -i 's/__colonplaceholder__/\:/g' ~/.bashrc
|
||||||
source ~/.bashrc
|
source ~/.bashrc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# test_hdfs() - Testing HDFS
|
||||||
|
function test_hdfs {
|
||||||
|
hdfs fsck /
|
||||||
|
}
|
||||||
|
|
||||||
# Stop running hdfs service
|
# Stop running hdfs service
|
||||||
# Triggered from devstack/plugin.sh as part of devstack "unstack"
|
# Triggered from devstack/plugin.sh as part of devstack "unstack"
|
||||||
function stop_hdfs {
|
function stop_hdfs {
|
||||||
|
@ -5,7 +5,6 @@ cmd=$1
|
|||||||
expect <<EOD
|
expect <<EOD
|
||||||
spawn $cmd
|
spawn $cmd
|
||||||
expect {
|
expect {
|
||||||
"(yes/no)?" {send "yes\n"}
|
"(yes/no)?" {send "yes\n"; exp_continue}
|
||||||
-re . { exp_continue }
|
|
||||||
}
|
}
|
||||||
EOD
|
EOD
|
||||||
|
@ -34,6 +34,8 @@ if [[ "$1" == "stack" && "$2" == "pre-install" ]]; then
|
|||||||
configure_hdfs
|
configure_hdfs
|
||||||
echo_summary "Starting HDFS"
|
echo_summary "Starting HDFS"
|
||||||
start_hdfs
|
start_hdfs
|
||||||
|
echo_summary "Testing HDFS"
|
||||||
|
test_hdfs
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$1" == "unstack" ]]; then
|
if [[ "$1" == "unstack" ]]; then
|
||||||
|
Loading…
x
Reference in New Issue
Block a user