# 베이스 이미지
FROM ubuntu:latest
# 메타데이터 추가
LABEL maintainer="JM jmahn1819@gmail.com"
# 필요한 패키지 설치 및 APT 캐시 정리 ,사용자 hadoop 생성 및 설정
RUN apt-get update -y && \\:/
apt-get install -y sudo passwd openssh-server wget nano net-tools iputils-ping openjdk-8-jdk rsync && \\
apt-get clean && \\
rm -rf /var/lib/apt/lists/* && \\
groupadd hadoop && \\
useradd -m -d /home/hadoop -s /bin/bash -g hadoop -G sudo hadoop && \\
echo "hadoop:1234" | chpasswd && \\
mkdir -p /home/hadoop/.ssh && \\
chown -R hadoop:hadoop /home/hadoop
# 하둡 설치 디렉토리 생성 및 다운로드, 로컬 폴더 생성, 환경 변수 설정, SSH 데몬 설정
RUN mkdir -p /home/hadoop && \\
wget <https://archive.apache.org/dist/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz> -P /home/hadoop && \\
tar -xvzf /home/hadoop/hadoop-3.4.1.tar.gz -C /home/hadoop && \\
rm /home/hadoop/hadoop-3.4.1.tar.gz && \\
chown -R hadoop:hadoop /home/hadoop/hadoop-3.4.1 && \\
mkdir -p /data/hadoop/tmp && \\
mkdir -p /data/hadoop/dfs/name && \\
mkdir -p /data/hadoop/dfs/data && \\
chown -R hadoop:hadoop /data/hadoop && \\
chmod -R 777 /data/hadoop && \\
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64" >> /etc/profile.d/hadoop_env.sh && \\
echo "export HADOOP_HOME=/home/hadoop/hadoop-3.4.1" >> /etc/profile.d/hadoop_env.sh && \\
echo "export HADOOP_CONF_DIR=\\$HADOOP_HOME/etc/hadoop" >> /etc/profile.d/hadoop_env.sh && \\
echo "export PATH=\\$PATH:\\$JAVA_HOME/bin:\\$HADOOP_HOME/bin:\\$HADOOP_HOME/sbin" >> /etc/profile.d/hadoop_env.sh && \\
chmod +x /etc/profile.d/hadoop_env.sh && \\
chown hadoop:hadoop /home/hadoop/.bashrc && \\
mkdir -p /var/run/sshd && \\
echo 'PermitRootLogin no' >> /etc/ssh/sshd_config && \\
echo 'PasswordAuthentication yes' >> /etc/ssh/sshd_config
# SSH 포트 열기
EXPOSE 22
# SSH 서버 실행
CMD ["/bin/bash", "-c", "service ssh start && exec /bin/bash"]
docker build
PS D:\\pmj\\dockerfile_data\\hadoop> docker run -itd --name hdfs_mj --hostname datanode -p 9870:9870 -p 9000:9000 -p 8025:8025 -p 8088:8088 -p 25:22 -v D:/dockerdata/datanode:/data/hadoop/dfs/name hadoop:1.0
a12be0058563643469796520922ca72ce040c3cc817df2065ccf94ad45cdffd5
PS C:\\Users\\Admin> ssh hadoop@210.119.14.71
The authenticity of host '210.119.14.71 (210.119.14.71)' can't be established.
ED25519 key fingerprint is SHA256:HJwyGT3zRqEcWMgTgcgDBl7Xqeb58oiOY5PA5yIE4fM.
This key is not known by any other names.
Are you sure you want to continue connecting (yes/no/[fingerprint])? yes
Warning: Permanently added '210.119.14.71' (ED25519) to the list of known hosts.
hadoop@210.119.14.71's password:
Connection closed by 210.119.14.71 port 22
PS C:\\Users\\Admin> ssh hadoop@210.119.14.71
hadoop@210.119.14.71's password:
Welcome to Ubuntu 24.04.2 LTS (GNU/Linux 5.15.167.4-microsoft-standard-WSL2 x86_64)
* Documentation: <https://help.ubuntu.com>
* Management: <https://landscape.canonical.com>
* Support: <https://ubuntu.com/pro>
This system has been minimized by removing packages and content that are
not required on a system that users do not log into.
To restore this content, you can run the 'unminimize' command.
The programs included with the Ubuntu system are free software;
the exact distribution terms for each program are described in the
individual files in /usr/share/doc/*/copyright.
Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by
applicable law.
To run a command as administrator (user "root"), use "sudo <command>".
See "man sudo_root" for details.
hadoop@datanode:~$ Connection to 210.119.14.71 closed by remote host.
Connection to 210.119.14.71 closed.
PS C:\\Users\\Admin> ssh hadoop@210.119.14.71
ssh: connect to host 210.119.14.71 port 22: Connection refused
PS C:\\Users\\Admin> ssh hadoop@210.119.14.71 -p 25
hadoop@210.119.14.71's password:
Welcome to Ubuntu 24.04.2 LTS (GNU/Linux 5.15.167.4-microsoft-standard-WSL2 x86_64)
* Documentation: <https://help.ubuntu.com>
* Management: <https://landscape.canonical.com>
* Support: <https://ubuntu.com/pro>