Skip to content

Commit

Permalink
Merge branch '2.0.0-hadoop3.1.1-java8'
Browse files Browse the repository at this point in the history
  • Loading branch information
Giannis Mouchakis committed May 6, 2019
2 parents 627ee46 + e823e6f commit 2c6ec62
Show file tree
Hide file tree
Showing 17 changed files with 267 additions and 281 deletions.
20 changes: 20 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
DOCKER_NETWORK = docker-hadoop_default
ENV_FILE = hadoop.env
current_branch := $(shell git rev-parse --abbrev-ref HEAD)
build:
docker build -t bde2020/hadoop-base:$(current_branch) ./base
docker build -t bde2020/hadoop-namenode:$(current_branch) ./namenode
docker build -t bde2020/hadoop-datanode:$(current_branch) ./datanode
docker build -t bde2020/hadoop-resourcemanager:$(current_branch) ./resourcemanager
docker build -t bde2020/hadoop-nodemanager:$(current_branch) ./nodemanager
docker build -t bde2020/hadoop-historyserver:$(current_branch) ./historyserver
docker build -t bde2020/hadoop-submit:$(current_branch) ./submit

wordcount:
docker build -t hadoop-wordcount ./submit
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} bde2020/hadoop-base:$(current_branch) hdfs dfs -mkdir -p /input/
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} bde2020/hadoop-base:$(current_branch) hdfs dfs -copyFromLocal /opt/hadoop-3.1.1/README.txt /input/
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} hadoop-wordcount
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} bde2020/hadoop-base:$(current_branch) hdfs dfs -cat /output/*
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} bde2020/hadoop-base:$(current_branch) hdfs dfs -rm -r /output
docker run --network ${DOCKER_NETWORK} --env-file ${ENV_FILE} bde2020/hadoop-base:$(current_branch) hdfs dfs -rm -r /input
20 changes: 15 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@

# Changes

Version 1.1.0 introduces healthchecks for the containers.
Version 2.0.0 introduces uses wait_for_it script for the cluster startup

# Hadoop Docker

## Supported Hadoop Versions
* 2.7.1 with OpenJDK 7
* 2.7.1 with OpenJDK 8
See repository branches for supported hadoop versions

## Quick Start

Expand All @@ -17,13 +16,23 @@ To deploy an example HDFS cluster, run:
docker-compose up
```

Run example wordcount job:
```
make wordcount
```

Or deploy in swarm:
```
docker stack deploy -c docker-compose-v3.yml hadoop
```

`docker-compose` creates a docker network that can be found by running `docker network list`, e.g. `dockerhadoop_default`.

Run `docker network inspect` on the network (e.g. `dockerhadoop_default`) to find the IP the hadoop interfaces are published on. Access these interfaces with the following URLs:

* Namenode: http://<dockerhadoop_IP_address>:50070/dfshealth.html#tab-overview
* Namenode: http://<dockerhadoop_IP_address>:9870/dfshealth.html#tab-overview
* History server: http://<dockerhadoop_IP_address>:8188/applicationhistory
* Datanode: http://<dockerhadoop_IP_address>:50075/
* Datanode: http://<dockerhadoop_IP_address>:9864/
* Nodemanager: http://<dockerhadoop_IP_address>:8042/node
* Resource manager: http://<dockerhadoop_IP_address>:8088/

Expand All @@ -49,5 +58,6 @@ The available configurations are:
* /etc/hadoop/yarn-site.xml YARN_CONF
* /etc/hadoop/httpfs-site.xml HTTPFS_CONF
* /etc/hadoop/kms-site.xml KMS_CONF
* /etc/hadoop/mapred-site.xml MAPRED_CONF

If you need to extend some other configuration file, refer to base/entrypoint.sh bash script.
45 changes: 6 additions & 39 deletions base/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,58 +3,25 @@ MAINTAINER Ivan Ermilov <[email protected]>

RUN echo "deb http://ftp.debian.org/debian jessie-backports main" >> /etc/apt/sources.list

RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends openjdk-8-jdk
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -t jessie-backports -y --no-install-recommends openjdk-8-jdk
ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/

RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends net-tools curl
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends net-tools curl netcat

RUN gpg --keyserver pool.sks-keyservers.net --recv-keys \
07617D4968B34D8F13D56E20BE5AAA0BA210C095 \
2CAC83124870D88586166115220F69801F27E622 \
4B96409A098DBD511DF2BC18DBAF69BEA7239D59 \
9DD955653083EFED6171256408458C39E964B5FF \
B6B3F7EDA5BA7D1E827DE5180DFF492D8EE2F25C \
6A67379BEFC1AE4D5595770A34005598B8F47547 \
47660BC98BC433F01E5C90581209E7F13D0C92B9 \
CE83449FDC6DACF9D24174DCD1F99F6EE3CD2163 \
A11DF05DEA40DA19CE4B43C01214CF3F852ADB85 \
686E5EDF04A4830554160910DF0F5BBC30CD0996 \
5BAE7CB144D05AD1BB1C47C75C6CC6EFABE49180 \
AF7610D2E378B33AB026D7574FB955854318F669 \
6AE70A2A38F466A5D683F939255ADF56C36C5F0F \
70F7AB3B62257ABFBD0618D79FDB12767CC7352A \
842AAB2D0BC5415B4E19D429A342433A56D8D31A \
1B5D384B734F368052862EB55E43CAB9AEC77EAF \
785436A782586B71829C67A04169AA27ECB31663 \
5E49DA09E2EC9950733A4FF48F1895E97869A2FB \
A13B3869454536F1852C17D0477E02D33DD51430 \
A6220FFCC86FE81CE5AAC880E3814B59E4E11856 \
EFE2E7C571309FE00BEBA78D5E314EEF7340E1CB \
EB34498A9261F343F09F60E0A9510905F0B000F0 \
3442A6594268AC7B88F5C1D25104A731B021B57F \
6E83C32562C909D289E6C3D98B25B9B71EFF7770 \
E9216532BF11728C86A11E3132CF4BF4E72E74D3 \
E8966520DA24E9642E119A5F13971DA39475BD5D \
1D369094D4CFAC140E0EF05E992230B1EB8C6EFA \
A312CE6A1FA98892CB2C44EBA79AB712DE5868E6 \
0445B7BFC4515847C157ECD16BA72FF1C99785DE \
B74F188889D159F3D7E64A7F348C6D7A0DCED714 \
4A6AC5C675B6155682729C9E08D51A0A7501105C \
8B44A05C308955D191956559A5CEE20A90348D47
RUN curl -O https://dist.apache.org/repos/dist/release/hadoop/common/KEYS

RUN gpg --keyserver pool.sks-keyservers.net --recv-key C36C5F0F
RUN gpg --import KEYS

ENV HADOOP_VERSION 2.7.1
ENV HADOOP_VERSION 3.1.1
ENV HADOOP_URL https://www.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz
RUN set -x \
&& curl -fSL "$HADOOP_URL" -o /tmp/hadoop.tar.gz \
&& curl -fSL "$HADOOP_URL.asc" -o /tmp/hadoop.tar.gz.asc \
&& gpg --verify /tmp/hadoop.tar.gz.asc \
&& tar -xvf /tmp/hadoop.tar.gz -C /opt/ \
&& rm /tmp/hadoop.tar.gz*

RUN ln -s /opt/hadoop-$HADOOP_VERSION/etc/hadoop /etc/hadoop
RUN cp /etc/hadoop/mapred-site.xml.template /etc/hadoop/mapred-site.xml
RUN mkdir /opt/hadoop-$HADOOP_VERSION/logs

RUN mkdir /hadoop-data
Expand Down
36 changes: 36 additions & 0 deletions base/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ configure /etc/hadoop/hdfs-site.xml hdfs HDFS_CONF
configure /etc/hadoop/yarn-site.xml yarn YARN_CONF
configure /etc/hadoop/httpfs-site.xml httpfs HTTPFS_CONF
configure /etc/hadoop/kms-site.xml kms KMS_CONF
configure /etc/hadoop/mapred-site.xml mapred MAPRED_CONF

if [ "$MULTIHOMED_NETWORK" = "1" ]; then
echo "Configuring for multihomed network"
Expand Down Expand Up @@ -78,4 +79,39 @@ if [ -n "$GANGLIA_HOST" ]; then
done > /etc/hadoop/hadoop-metrics2.properties
fi

function wait_for_it()
{
local serviceport=$1
local service=${serviceport%%:*}
local port=${serviceport#*:}
local retry_seconds=5
local max_try=100
let i=1

nc -z $service $port
result=$?

until [ $result -eq 0 ]; do
echo "[$i/$max_try] check for ${service}:${port}..."
echo "[$i/$max_try] ${service}:${port} is not available yet"
if (( $i == $max_try )); then
echo "[$i/$max_try] ${service}:${port} is still not available; giving up after ${max_try} tries. :/"
exit 1
fi

echo "[$i/$max_try] try in ${retry_seconds}s once again ..."
let "i++"
sleep $retry_seconds

nc -z $service $port
result=$?
done
echo "[$i/$max_try] $service:${port} is available."
}

for i in ${SERVICE_PRECONDITION[@]}
do
wait_for_it ${i}
done

exec $@
6 changes: 3 additions & 3 deletions datanode/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM bde2020/hadoop-base:1.1.0-hadoop2.7.1-java8
FROM bde2020/hadoop-base:2.0.0-hadoop3.1.1-java8
MAINTAINER Ivan Ermilov <[email protected]>

HEALTHCHECK CMD curl -f http://localhost:50075/ || exit 1
HEALTHCHECK CMD curl -f http://localhost:9864/ || exit 1

ENV HDFS_CONF_dfs_datanode_data_dir=file:///hadoop/dfs/data
RUN mkdir -p /hadoop/dfs/data
Expand All @@ -10,6 +10,6 @@ VOLUME /hadoop/dfs/data
ADD run.sh /run.sh
RUN chmod a+x /run.sh

EXPOSE 50075
EXPOSE 9864

CMD ["/run.sh"]
95 changes: 0 additions & 95 deletions docker-compose-local.yml

This file was deleted.

71 changes: 0 additions & 71 deletions docker-compose-nginx.yml

This file was deleted.

Loading

0 comments on commit 2c6ec62

Please sign in to comment.