Skip to content

Commit

Permalink
Convert service scripts to systemd scripts. This should address issue #1
Browse files Browse the repository at this point in the history
  • Loading branch information
minyk committed Jun 19, 2017
1 parent 84cb952 commit 447b7a7
Show file tree
Hide file tree
Showing 11 changed files with 122 additions and 50 deletions.
6 changes: 6 additions & 0 deletions resources/hadoop/hadoop.default
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
HADOOP_PREFIX=/usr/local/hadoop
HADOOP_YARN_HOME=/usr/local/hadoop
HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
YARN_LOG_DIR=/usr/local/hadoop/logs
YARN_IDENT_STRING=root
HADOOP_MAPRED_IDENT_STRING=root
17 changes: 17 additions & 0 deletions resources/hadoop/hdfs-datanode.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[Unit]
Description=HDFS Service: distributed file system.
After=hdfs-namenode.service

[Service]
Restart=always
StartLimitInterval=0
RestartSec=15
LimitNOFILE=infinity
PermissionsStartOnly=True
SyslogIdentifier=hdfs
EnvironmentFile=/etc/default/java
EnvironmentFile=/etc/default/hadoop
ExecStart=/usr/local/hadoop/bin/hdfs --config $HADOOP_CONF_DIR datanode

[Install]
WantedBy=default.target
17 changes: 17 additions & 0 deletions resources/hadoop/hdfs-namenode.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[Unit]
Description=HDFS Service: distributed file system.

[Service]
Restart=always
StartLimitInterval=0
RestartSec=15
LimitNOFILE=infinity
PermissionsStartOnly=True
SyslogIdentifier=hdfs
EnvironmentFile=/etc/default/java
EnvironmentFile=/etc/default/hadoop
ExecStartPre=/bin/echo ${HADOOP_CONF_DIR}
ExecStart=/usr/local/hadoop/bin/hdfs --config $HADOOP_CONF_DIR namenode

[Install]
WantedBy=default.target
9 changes: 9 additions & 0 deletions resources/kafka/kafka.default
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
KAFKA_HOME=/usr/local/kafka
LOG_DIR=/usr/local/kafka/logs
ZK_LOG_FILE=/usr/local/kafka/logs/zookeeper.log
KAFKA_LOG_FILE=/usr/local/kafka/logs/kafka.log
ZK_PID_FILE=/usr/local/kafka/zookeeper.pid
KAFKA_PID_FILE=/usr/local/kafka/kafka.pid
CONF_DIR=/usr/local/kafka/config
ZOOKEEPER_EXEC_PATH=/usr/local/kafka/bin/zookeeper-server-start.sh
KAFKA_EXEC_PATH=/usr/local/kafka/bin/kafka-server-start.sh
17 changes: 17 additions & 0 deletions resources/kafka/kafka.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[Unit]
Description=Kafka Service: distributed message queue
After=zookeeper.service

[Service]
Restart=always
StartLimitInterval=0
RestartSec=15
LimitNOFILE=infinity
PermissionsStartOnly=True
SyslogIdentifier=kafka
EnvironmentFile=/etc/default/java
EnvironmentFile=/etc/default/kafka
ExecStart=/usr/local/kafka/bin/kafka-server-start.sh /usr/local/kafka/config/server.properties

[Install]
WantedBy=default.target
16 changes: 16 additions & 0 deletions resources/kafka/zookeeper.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[Unit]
Description=Zookeeper Service: distributed coordination service.

[Service]
Restart=always
StartLimitInterval=0
RestartSec=15
LimitNOFILE=infinity
PermissionsStartOnly=True
SyslogIdentifier=zookeeper
EnvironmentFile=/etc/default/java
EnvironmentFile=/etc/default/kafka
ExecStart=/usr/local/kafka/bin/zookeeper-server-start.sh /usr/local/kafka/config/zookeeper.properties

[Install]
WantedBy=default.target
60 changes: 24 additions & 36 deletions scripts/init-start-all-services.sh
Original file line number Diff line number Diff line change
@@ -1,55 +1,43 @@
#!/bin/bash
source "/vagrant/scripts/common.sh"
source /etc/profile.d/java.sh
source /etc/profile.d/hadoop.sh
source /etc/profile.d/kafka.sh
source /etc/profile.d/nifi.sh

function formatNameNode {
$HADOOP_PREFIX/bin/hdfs namenode -format myhadoop -force -noninteractive
/usr/local/hadoop/bin/hdfs namenode -format myhadoop -force -noninteractive
echo "formatted namenode"
}

function startHDFS {
$HADOOP_PREFIX/sbin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script hdfs start namenode
$HADOOP_PREFIX/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script hdfs start datanode
echo "started hdfs"
}

function startYarn {
$HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start resourcemanager
$HADOOP_YARN_HOME/sbin/yarn-daemons.sh --config $HADOOP_CONF_DIR start nodemanager
$HADOOP_YARN_HOME/sbin/yarn-daemon.sh start proxyserver --config $HADOOP_CONF_DIR
$HADOOP_PREFIX/sbin/mr-jobhistory-daemon.sh start historyserver --config $HADOOP_CONF_DIR
echo "started yarn"
}
function startServices {

function createEventLogDir {
$HADOOP_PREFIX/bin/hdfs dfs -mkdir /tmp
echo "created tmp dir"
}
# Start HDFS
systemctl start hdfs-namenode.service
systemctl start hdfs-datanode.service

# Start kafka
systemctl start zookeeper.service
systemctl start kafka.service

function startKafka {
$KAFKA_HOME/start-kafka.sh
echo "started kafka"
# Start nifi
systemctl start nifi
}

function startNifi {
$NIFI_HOME/bin/nifi.sh start

function createEventLogDir {
/usr/local/hadoop/bin/hdfs dfs -mkdir /tmp
echo "created tmp dir"
}

function setupServices {
cp -f /vagrant/scripts/start-all-services.sh /etc/init.d/start-all-services
chmod a+x /etc/init.d/start-all-services
chkconfig start-all-services on
# Refresh services
systemctl daemon-reload

# Enable services
systemctl enable hdfs-namenode.service
systemctl enable hdfs-datanode.service
systemctl enable zookeeper.service
systemctl enable kafka.service
systemctl enable nifi
}

setupServices
formatNameNode
startHDFS
#startYarn
startServices
createEventLogDir
startKafka
startNifi
setupServices
7 changes: 5 additions & 2 deletions scripts/setup-hadoop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,17 @@ function setupHadoop {
mkdir /var/hadoop/mr-history
mkdir /var/hadoop/mr-history/done
mkdir /var/hadoop/mr-history/tmp

echo "copying over hadoop configuration files"
cp -f ${HADOOP_RES_DIR}/* ${HADOOP_CONF_DIR}

cp -f ${HADOOP_RES_DIR}/hdfs-namenode.service /etc/systemd/system/hdfs-namenode.service
cp -f ${HADOOP_RES_DIR}/hdfs-datanode.service /etc/systemd/system/hdfs-datanode.service
}

function setupEnvVars {
echo "creating hadoop environment variables"
cp -f ${HADOOP_RES_DIR}/hadoop.sh /etc/profile.d/hadoop.sh
cp -f ${HADOOP_RES_DIR}/hadoop.default /etc/default/hadoop
}

function installHadoop {
Expand Down
3 changes: 1 addition & 2 deletions scripts/setup-java.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ function setupJava {

function setupEnvVars {
echo "creating java environment variables"
echo export JAVA_HOME=/usr/local/java >> /etc/profile.d/java.sh
echo export PATH=\${JAVA_HOME}/bin:\${PATH} >> /etc/profile.d/java.sh
echo JAVA_HOME=/usr/local/java >> /etc/default/java
}

function installJava {
Expand Down
15 changes: 8 additions & 7 deletions scripts/setup-kafka.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,17 @@ function installRemoteKafka {

function setupKafka {
echo "setup kafka"
cp -f /vagrant/resources/kafka/server.properties /usr/local/kafka/config
cp -f /vagrant/resources/kafka/zookeeper.properties /usr/local/kafka/config
cp -f /vagrant/resources/kafka/start-kafka.sh /usr/local/kafka
cp -f /vagrant/resources/kafka/stop-kafka.sh /usr/local/kafka
mkdir -p /usr/local/kafka/logs
cp -f ${KAFKA_RES_DIR}/server.properties /usr/local/kafka/config
cp -f ${KAFKA_RES_DIR}/zookeeper.properties /usr/local/kafka/config
mkdir -p ${KAFKA_HOME}/logs

cp -f ${KAFKA_RES_DIR}/zookeeper.service /etc/systemd/system/zookeeper.service
cp -f ${KAFKA_RES_DIR}/kafka.service /etc/systemd/system/kafka.service
}

function setupEnvVars {
echo "creating kafka environment variables"
cp -f $KAFKA_RES_DIR/kafka.sh /etc/profile.d/kafka.sh
cp -f $KAFKA_RES_DIR/kafka.default /etc/default/kafka
}

function installKafka {
Expand All @@ -40,4 +41,4 @@ echo "setup kafka"

installKafka
setupKafka
setupEnvVars
setupEnvVars
5 changes: 2 additions & 3 deletions scripts/setup-nifi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,12 @@ function installRemoteNifi {
}

function setupNifi {
echo "copying over nifi configuration files"
# cp -f ${NIFI_RES_DIR}/flume-env.sh ${NIFI_CONF}/flume-env.sh
echo "install Nifi service"
${NIFI_HOME}/bin/nifi.sh install
}

function setupEnvVars {
echo "creating nifi environment variables"
cp -f ${NIFI_RES_DIR}/nifi.sh /etc/profile.d/nifi.sh
cp -f ${NIFI_RES_DIR}/${NIFI_NAME}.properties ${NIFI_HOME}/conf/nifi.properties
}

Expand Down

0 comments on commit 447b7a7

Please sign in to comment.