51 Commits

Author SHA1 Message Date
dsk-minchulahn
baad3bba16 Ansible - Server settings - inventory 수정 2024-02-07 13:59:08 +09:00
dsk-minchulahn
72e5675f14 Terraform - CloudFront - dsk-alert-images 버킷 접근을 위한 CloudFront 생성 2024-02-05 11:44:08 +09:00
dsk-minchulahn
f728769c96 Terraform - Buckets - dsk-alert-images private 으로 변경 2024-02-05 11:43:41 +09:00
dsk-minchulahn
8f1760ee66 Terraform - agent ec2 security group 수정 2024-02-05 10:31:19 +09:00
dsk-minchulahn
49deb4864a Terraform - Buckets - dsk-alert-images lifecycle 임시 해제 2024-02-02 11:28:26 +09:00
dsk-minchulahn
bbe2b9331b Terraform - Buckets 추가 2024-02-01 16:14:05 +09:00
dsk-minchulahn
f794df086e Terraform - api s3 권한 수정 2024-01-31 14:49:43 +09:00
dsk-minchulahn
1f75f3e907 Terraform - dsk-devops 사용자 권한 수정 2024-01-31 14:03:31 +09:00
dsk-minchulahn
d0589a0de6 Terraform - KMS Policy 추가 2024-01-31 14:03:17 +09:00
dsk-minchulahn
f3e9a26a95 Terraform - IAM user, role, policies 최신화 2024-01-30 17:46:36 +09:00
dsk-minchulahn
886e60ab28 Terraform - dsk-devops 사용자에 steampipe 동작을 위한 policy 추가 2024-01-30 11:13:25 +09:00
dsk-minchulahn
7b290f8267 Terraform - Policies 파일 명 변경 2024-01-30 11:12:54 +09:00
dsk-minchulahn
66d4cf04a7 Terraform - Lambda 실행에 필요한 권한 추가 2024-01-29 17:15:45 +09:00
dsk-minchulahn
773ce08893 AWS - Lambda - spot intance 파일명 변경 2024-01-29 14:32:59 +09:00
dsk-minchulahn
30376d0a27 AWS - Lambda - agent start/stop 추가 2024-01-29 14:31:54 +09:00
dsk-minchulahn
3c9be964a5 Terraform - Lambda - agent ec2 stop 추가 2024-01-29 14:20:01 +09:00
dsk-minchulahn
61149888de Terraform - IAM - Role, Policies 구성 2024-01-26 17:20:37 +09:00
dsk-minchulahn
33fbacaa2c Terraform - IAM - 기본 구성 2024-01-26 16:55:00 +09:00
dsk-minchulahn
4307cf5261 Terraform - IAM - readonly 사용자 추가 2024-01-25 17:04:03 +09:00
dsk-minchulahn
c8f14a323d Terraform - EC2 - Agent에서 arm test를 위한 ec2 생성 2024-01-25 14:26:26 +09:00
변정훈
75fcc67a4c Delete ansible/server_settings/passwd_inventory 2024-01-15 15:11:05 +09:00
변정훈
8342efe8e2 Update invenotry 2024-01-15 15:10:47 +09:00
변정훈
f354a3e556 Update server_settings.yml 2024-01-15 15:06:43 +09:00
변정훈
e50f815f40 변수 추가 2024-01-12 14:05:12 +09:00
변정훈
fee97e57a5 수정 2024-01-12 14:04:26 +09:00
변정훈
688cd78be8 수정 2024-01-12 13:13:40 +09:00
ByeonJungHun
3dab70aadf READM.md 갱신 2024-01-12 10:34:19 +09:00
ByeonJungHun
7ebe45c873 .DS_Store 삭제 2024-01-12 10:32:45 +09:00
ByeonJungHun
8690a16580 검사 항목 수정 2024-01-11 16:28:14 +09:00
ByeonJungHun
76584fb0ba task 추가 2024-01-11 15:50:49 +09:00
변정훈
2a9caec167 key 제거 2024-01-11 11:29:21 +09:00
ByeonJungHun
33134809a2 README 수정 2024-01-11 11:28:11 +09:00
ByeonJungHun
c6cacc096f 검사 결과 업데이트 2024-01-11 10:56:16 +09:00
ByeonJungHun
efdc0d86ba 검사 결과 업데이트 2024-01-11 10:53:55 +09:00
ByeonJungHun
4fedf1eb47 보안 설정 Role 추가 2024-01-11 10:53:02 +09:00
변정훈
bdce12d232 내용 수정 2024-01-11 10:00:42 +09:00
ByeonJungHun
0f0f1037a6 업데이트 2024-01-10 16:38:27 +09:00
ByeonJungHun
b1f72b0a10 security setting role 추가 및 script 수정 2024-01-10 16:14:53 +09:00
ByeonJungHun
349cd3ff7a 검사 결과 업데이트 2024-01-10 16:10:59 +09:00
ByeonJungHun
3378119998 role 추가 및 script 수정 2024-01-10 15:48:06 +09:00
ByeonJungHun
b669d2c4b6 검사 결과 업데이트 2024-01-10 15:47:03 +09:00
ByeonJungHun
150c51e717 검사 결과 업데이트 2024-01-10 14:43:13 +09:00
ByeonJungHun
1fee5a6c73 검사 결과 업데이트 2024-01-10 14:41:57 +09:00
ByeonJungHun
eadbe72498 검사 결과 업데이트 2024-01-10 14:37:12 +09:00
ByeonJungHun
a85ce91982 검사 결과 업데이트 2024-01-10 14:35:39 +09:00
ByeonJungHun
a02684c30c 검사 결과 업데이트 2024-01-10 14:33:40 +09:00
ByeonJungHun
ab8f65c82f 검사 결과 업데이트 2024-01-10 14:19:15 +09:00
변정훈
f4ed852443 252 서버 제외 2024-01-10 11:40:36 +09:00
ByeonJungHun
c58e7a5caf 체크 항목 수정 2024-01-10 11:38:51 +09:00
변정훈
9ede2d3731 Update 2024-01-10 10:04:49 +09:00
변정훈
843dffafcb Update 2024-01-10 10:04:08 +09:00
131 changed files with 2657 additions and 165 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,8 @@
[defaults]
become = true
inventory = checklist
roles_path = roles
deprecation_warnings = false
display_skipped_hosts = no
ansible_home = .
stdout_callback = debug

View File

@@ -0,0 +1,46 @@
# Password aging settings
os_auth_pw_max_age: 90
os_auth_pw_min_age: 1
os_auth_pw_warn_age: 7
passhistory: 2
# Inactivity and Failed attempts lockout settings
fail_deny: 5
fail_unlock: 0
inactive_lock: 0
shell_timeout: 300
# tally settings
onerr: 'fail'
deny: 5
unlock_time: 300
# Password complexity settings
pwquality_minlen: 9
pwquality_maxrepeat: 3
pwquality_lcredit: -1
pwquality_ucredit: -1
pwquality_dcredit: -1
pwquality_ocredit: -1
# SSH settings
sshrootlogin: 'yes'
sshmainport: 22
ssh_service_name: sshd
# Crictl setup
crictl_app: crictl
crictl_version: 1.25.0
crictl_os: linux
crictl_arch: amd64
crictl_dl_url: https://github.com/kubernetes-sigs/cri-tools/releases/download/v{{ crictl_version }}/{{ crictl_app }}-v{{ crictl_version }}-{{ crictl_os }}-{{ crictl_arch }}.tar.gz
crictl_bin_path: /usr/local/bin
crictl_file_owner: root
crictl_file_group: root
# temp
username: root
password: saasadmin1234
# common user flag
common_user: False

View File

@@ -0,0 +1,18 @@
#!/bin/sh
echo "-------------------------------------------------------------------------------\n"
echo " _╓g@DDKg╓_ \033[0;31m=╗╗╗╗,\033[0;0m \033[0;34m,╗╗╗╗╤\033[0;0m ,╔╗DDKg╔_ ╓g@DD╗╔_ ╓g@DD╗╔_"
echo " ╓D╝╙\` \`╠╠H \033[0;31m╙╠╠╠╠▒\033[0;0m \033[0;34mÆ╬╬╬╬╩\033[0;0m _j╠╙\` 1╠R j╠R^ \`╙╠▒,j╠R^ \`╙╠▒,"
echo " 1╠^ ,╠╝ \033[0;31m╝╠R\033[0;0m \033[0;34m╓▓╬╬╬╝\033[0;0m j╠H 1╠^ ╠╠ ╚╠H ╚╠H"
echo "j╠⌐ j╠Γ \033[0;31m'\033[0;0m \033[0;34mÆ╬╬╬╬╙\033[0;0m ╠H ╔╠R ╠╠ ╠╠ ╠╠"
echo "╠╠ ╒╠R \033[0;34m╔╣╬╬╬\033[0;33m╬▒\033[0;0m j╠H _D╝\` ╠╠ ╠╠ ╠╠"
echo "'╠H 1╠^ .. \033[0;34m,╣╬╬╬╣\033[0;33m╬╣╣▓┐\033[0;0m ╠D ╔╚╙ ╔_ ╠╠ ╠╠ ╠╠"
echo " '╠▒╓░╙ _╔╔^ \033[0;34m¢╬╬╬╬╩\033[0;33m ╚╣╣╣╣▌\033[0;0m ╚▒╓░╙ ╔░H ╠╠ ╠╠ ╠╠"
echo " ⁿ╚╠K≥╔╔╔1▒╝^ \033[0;34m╒▓╬╬╬╩^\033[0;33m \`╣╣╣╣▓╕\033[0;0m \`╚╠▒g╔╔╔gD╝╙ ╠╠ ╠╠ ╠╠\n"
echo "-------------------------------------------------------------------------------"
echo ""
echo " - 알 림 - "
echo ""
echo " 현재 접속하신 서버는 DataSaker 개발 서버 입니다. "
echo " 인가되지 않은 사용자의 접근, 수정 등 행위 시 처벌을 받을 수 있습니다. "
echo ""
echo "-------------------------------------------------------------------------------"

View File

@@ -0,0 +1,6 @@
---
- name: restart sshd
service:
name: "{{ ssh_service_name }}"
state: restarted
enabled: true

View File

@@ -0,0 +1,7 @@
---
- name: user change
user:
name: "{{ username }}"
password: "{{ password | password_hash('sha512') }}"
state: present

View File

@@ -0,0 +1,29 @@
---
- name: Create a tar.gz archive of a single file.
archive:
path: /etc/update-motd.d/*
dest: /etc/update-motd.d/motd.tar.gz
format: gz
force_archive: true
- name: remove a motd.d files
file:
path: /etc/update-motd.d/{{ item }}
state: absent
with_items:
- 10-help-text
- 85-fwupd
- 90-updates-available
- 91-release-upgrade
- 95-hwe-eol
- 98-fsck-at-reboot
- 50-motd-news
- 88-esm-announce
- name: Create login banner
copy:
src: login_banner
dest: /etc/update-motd.d/00-header
owner: root
group: root
mode: 0755

View File

@@ -0,0 +1,19 @@
---
- name: Downloading and extracting {{ crictl_app }} {{ crictl_version }}
unarchive:
src: "{{ crictl_dl_url }}"
dest: "{{ crictl_bin_path }}"
owner: "{{ crictl_file_owner }}"
group: "{{ crictl_file_group }}"
extra_opts:
- crictl
remote_src: yes
- name: Crictl command crontab setting
ansible.builtin.cron:
name: crontab command
minute: "0"
hour: "3"
user: root
job: "/usr/local/bin/crictl rmi --prune"

View File

@@ -0,0 +1,48 @@
---
- name: Set pass max days
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^PASS_MAX_DAYS.*$'
line: "PASS_MAX_DAYS\t{{os_auth_pw_max_age}}"
backrefs: yes
- name: Set pass min days
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^PASS_MIN_DAYS.*$'
line: "PASS_MIN_DAYS\t{{os_auth_pw_min_age}}"
backrefs: yes
- name: Set pass min length
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^PASS_MIN_LEN.*$'
line: "PASS_MIN_LEN\t{{pwquality_minlen}}"
backrefs: yes
- name: Set pass warn days
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^PASS_WARN_AGE.*$'
line: "PASS_WARN_AGE\t{{os_auth_pw_warn_age}}"
backrefs: yes
- name: Set password encryption to SHA512
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^ENCRYPT_METHOD\s.*$'
line: "ENCRYPT_METHOD\tSHA512"
backrefs: yes
- name: Disable MD5 crypt explicitly
lineinfile:
dest: /etc/login.defs
state: present
regexp: '^MD5_CRYPT_ENAB.*$'
line: "MD5_CRYPT_ENAB NO"
backrefs: yes

View File

@@ -0,0 +1,24 @@
---
- include: login_defs.yml
tags: login_defs
- include: pam.yml
tags: pam
- include: sshd_config.yml
tags: sshd_config
- include: sudoers.yml
tags: sudoers
- include: profile.yml
tags: profile
- include: banner.yml
tags: banner
- include: crictl.yml
tags: crictl
- include: admin_set.yml
tags: admin_set

View File

@@ -0,0 +1,50 @@
---
- name: Add pam_tally2.so
template:
src: common-auth.j2
dest: /etc/pam.d/common-auth
owner: root
group: root
mode: 0644
- name: Create pwquality.conf password complexity configuration
block:
- apt:
name: libpam-pwquality
state: present
install_recommends: false
- template:
src: pwquality.conf.j2
dest: /etc/security/pwquality.conf
owner: root
group: root
mode: 0644
- name: Add pam_tally2.so
block:
- lineinfile:
dest: /etc/pam.d/common-account
regexp: '^account\srequisite'
line: "account requisite pam_deny.so"
- lineinfile:
dest: /etc/pam.d/common-account
regexp: '^account\srequired'
line: "account required pam_tally2.so"
- name: password reuse is limited
lineinfile:
dest: /etc/pam.d/common-password
line: "password required pam_pwhistory.so remember=5"
- name: password hashing algorithm is SHA-512
lineinfile:
dest: /etc/pam.d/common-password
regexp: '^password\s+\[success'
line: "password [success=1 default=ignore] pam_unix.so sha512"
- name: Shadow Password Suite Parameters
lineinfile:
dest: /etc/pam.d/common-password
regexp: '^password\s+\[success'
line: "password [success=1 default=ignore] pam_unix.so sha512"

View File

@@ -0,0 +1,24 @@
---
- name: Set session timeout
lineinfile:
dest: /etc/profile
regexp: '^TMOUT=.*'
insertbefore: '^readonly TMOUT'
line: 'TMOUT={{shell_timeout}}'
state: "{{ 'absent' if (shell_timeout == 0) else 'present' }}"
- name: Set TMOUT readonly
lineinfile:
dest: /etc/profile
regexp: '^readonly TMOUT'
insertafter: 'TMOUT={{shell_timeout}}'
line: 'readonly TMOUT'
state: "{{ 'absent' if (shell_timeout == 0) else 'present' }}"
- name: Set export TMOUT
lineinfile:
dest: /etc/profile
regexp: '^export TMOUT.*'
insertafter: 'readonly TMOUT'
line: 'export TMOUT'
state: "{{ 'absent' if (shell_timeout == 0) else 'present' }}"

View File

@@ -0,0 +1,30 @@
---
- name: Configure ssh root login to {{sshrootlogin}}
lineinfile:
dest: /etc/ssh/sshd_config
regexp: '^(#)?PermitRootLogin.*'
line: 'PermitRootLogin {{sshrootlogin}}'
insertbefore: '^Match.*'
state: present
owner: root
group: root
mode: 0640
notify: restart sshd
- name: SSH Listen on Main Port
lineinfile:
dest: /etc/ssh/sshd_config
insertbefore: '^#*AddressFamily'
line: 'Port {{sshmainport}}'
state: present
owner: root
group: root
mode: 0640
notify: restart sshd
- name: "Setting sshd allow users"
template:
src: allow_users.j2
dest: "/etc/ssh/sshd_config.d/allow_users.conf"
notify: restart sshd

View File

@@ -0,0 +1,94 @@
---
- name: "Create devops group"
ansible.builtin.group:
name: "devops"
state: present
- name: "get current users"
shell: "cat /etc/passwd | egrep -iv '(false|nologin|sync|root|dev2-iac)' | awk -F: '{print $1}'"
register: deleting_users
- name: "Delete users"
ansible.builtin.user:
name: "{{ item }}"
state: absent
remove: yes
with_items: "{{ deleting_users.stdout_lines }}"
when: item != ansible_user
ignore_errors: true
- name: "Create admin user"
ansible.builtin.user:
name: "{{ item.name }}"
group: "devops"
shell: "/bin/bash"
system: yes
state: present
with_items: "{{ admin_users }}"
when:
- item.name is defined
ignore_errors: true
- name: "admin user password change"
user:
name: "{{ item.name }}"
password: "{{ password | password_hash('sha512') }}"
state: present
with_items: "{{ admin_users }}"
when:
- item.name is defined
ignore_errors: true
- name: "Add admin user key"
authorized_key:
user: "{{ item.name }}"
state: present
key: "{{ item.key }}"
with_items: "{{ admin_users }}"
when:
- item.name is defined
- item.key is defined
- common_user is defined
ignore_errors: true
- name: "Create common user"
ansible.builtin.user:
name: "{{ item.name }}"
group: "users"
shell: "/bin/bash"
system: yes
state: present
with_items: "{{ allow_users }}"
when:
- item.name is defined
- common_user is defined
ignore_errors: true
- name: "Change common user password change"
user:
name: "{{ item.name }}"
password: "{{ password | password_hash('sha512') }}"
state: present
with_items: "{{ allow_users }}"
when:
- item.name is defined
- common_user is defined
ignore_errors: true
- name: "Add common user key"
authorized_key:
user: "{{ item.name }}"
state: present
key: "{{ item.key }}"
with_items: "{{ allow_users }}"
when:
- item.name is defined
- item.key is defined
- common_user is defined
ignore_errors: true
- name: "Setting sudoers allow users"
template:
src: sudoers_users.j2
dest: "/etc/sudoers.d/sudoers_users"
ignore_errors: true

View File

@@ -0,0 +1,11 @@
AllowUsers dev2-iac@10.10.43.*
{% if admin_users is defined %}
{% for user in admin_users %}
AllowUsers {{ user.name }}@{{ user.ip }}
{% endfor %}
{% endif %}
{% if allow_users is defined %}
{% for user in allow_users %}
AllowUsers {{ user.name }}@{{ user.ip }}
{% endfor %}
{% endif %}

View File

@@ -0,0 +1,27 @@
#
# /etc/pam.d/common-auth - authentication settings common to all services
#
# This file is included from other service-specific PAM config files,
# and should contain a list of the authentication modules that define
# the central authentication scheme for use on the system
# (e.g., /etc/shadow, LDAP, Kerberos, etc.). The default is to use the
# traditional Unix authentication mechanisms.
#
# As of pam 1.0.1-6, this file is managed by pam-auth-update by default.
# To take advantage of this, it is recommended that you configure any
# local modules either before or after the default block, and use
# pam-auth-update to manage selection of other modules. See
# pam-auth-update(8) for details.
auth required pam_tally2.so onerr={{onerr}} even_deny_root deny={{deny}} unlock_time={{unlock_time}}
# here are the per-package modules (the "Primary" block)
auth [success=1 default=ignore] pam_unix.so nullok
# here's the fallback if no module succeeds
auth requisite pam_deny.so
# prime the stack with a positive return value if there isn't one already;
# this avoids us returning an error just because nothing sets a success code
auth required pam_permit.so
# since the modules above will each just jump around
# and here are more per-package modules (the "Additional" block)
auth optional pam_cap.so
# end of pam-auth-update config

View File

@@ -0,0 +1,50 @@
# Configuration for systemwide password quality limits
# Defaults:
#
# Number of characters in the new password that must not be present in the
# old password.
# difok = 5
#
# Minimum acceptable size for the new password (plus one if
# credits are not disabled which is the default). (See pam_cracklib manual.)
# Cannot be set to lower value than 6.
minlen = {{pwquality_minlen}}
#
# The maximum credit for having digits in the new password. If less than 0
# it is the minimum number of digits in the new password.
dcredit = {{pwquality_dcredit}}
#
# The maximum credit for having uppercase characters in the new password.
# If less than 0 it is the minimum number of uppercase characters in the new
# password.
ucredit = {{pwquality_ucredit}}
#
# The maximum credit for having lowercase characters in the new password.
# If less than 0 it is the minimum number of lowercase characters in the new
# password.
lcredit = {{pwquality_lcredit}}
#
# The maximum credit for having other characters in the new password.
# If less than 0 it is the minimum number of other characters in the new
# password.
ocredit = {{pwquality_ocredit}}
#
# The minimum number of required classes of characters for the new
# password (digits, uppercase, lowercase, others).
# minclass = 0
#
# The maximum number of allowed consecutive same characters in the new password.
# The check is disabled if the value is 0.
maxrepeat = {{pwquality_maxrepeat}}
#
# The maximum number of allowed consecutive characters of the same class in the
# new password.
# The check is disabled if the value is 0.
# maxclassrepeat = 0
#
# Whether to check for the words from the passwd entry GECOS string of the user.
# The check is enabled if the value is not 0.
# gecoscheck = 0
#
# Path to the cracklib dictionaries. Default is to use the cracklib default.
# dictpath =

View File

@@ -0,0 +1,6 @@
dev2-iac ALL=(ALL) NOPASSWD: ALL
{% if allow_users is defined %}
{% for user in admin_users %}
{{ user.name }} ALL=(ALL) NOPASSWD: ALL
{% endfor %}
{% endif %}

View File

@@ -0,0 +1,6 @@
---
- hosts: all
become: true
gather_facts: true
roles:
- role: security-settings

View File

@@ -0,0 +1,65 @@
[prod-demo-master]
10.10.43.100 ansible_port=2222 ansible_user=dev2
[prod-demo-worker]
10.10.43.101 ansible_port=2222 ansible_user=dev2
[dev-demo-master]
10.10.43.105 ansible_port=2222 ansible_user=dev2
[dev-demo-worker]
10.10.43.106 ansible_port=2222 ansible_user=dev2
[saas_mgmt_master]
10.10.43.240 ansible_port=2222 ansible_user=dev2
[saas_mgmt_node]
10.10.43.[241:243] ansible_port=2222 ansible_user=dev2
[dsk_dev_master]
10.10.43.[111:113] ansible_port=2222 ansible_user=dev2
[dsk_dev_node]
10.10.43.[114:153] ansible_port=2222 ansible_user=dev2
[bastion]
10.10.43.43 ansible_port=2222 ansible_user=havelight
[agent_host]
10.10.43.177 ansible_port=2222 ansible_user=dev2
10.10.43.178 ansible_port=2222 ansible_user=dev2
10.10.43.179 ansible_port=2222 ansible_user=dev2
10.10.43.180 ansible_port=2222 ansible_user=dev2
10.10.43.181 ansible_port=2222 ansible_user=dev2
10.10.43.182 ansible_port=2222 ansible_user=dev2
[agent_cri_master]
10.10.43.185 ansible_port=2222 ansible_user=dev2
[agent_cri_worker]
10.10.43.186 ansible_port=2222 ansible_user=dev2
10.10.43.187 ansible_port=2222 ansible_user=dev2
10.10.43.188 ansible_port=2222 ansible_user=dev2
[agent_middleware_master]
10.10.43.189 ansible_port=2222 ansible_user=dev2
[agent_middleware_worker]
10.10.43.190 ansible_port=2222 ansible_user=dev2
10.10.43.191 ansible_port=2222 ansible_user=dev2
10.10.43.192 ansible_port=2222 ansible_user=dev2
10.10.43.193 ansible_port=2222 ansible_user=dev2
10.10.43.194 ansible_port=2222 ansible_user=dev2
10.10.43.199 ansible_port=2222 ansible_user=dev2
[all:children]
saas_mgmt_master
saas_mgmt_node
dsk_dev_master
dsk_dev_node
bastion
agent_host
agent_cri_master
agent_cri_worker
agent_middleware_master
agent_middleware_worker

View File

@@ -0,0 +1,31 @@
[all]
10.10.43.195 ansible_user=dev2 ansible_port=2222
10.10.43.196 ansible_user=dev2 ansible_port=2222
10.10.43.197 ansible_user=dev2 ansible_port=2222
10.10.43.200 ansible_user=dev2 ansible_port=2222
10.10.43.201 ansible_user=dev2 ansible_port=2222
10.10.43.202 ansible_user=dev2 ansible_port=2222
10.10.43.203 ansible_user=dev2 ansible_port=2222
10.10.43.204 ansible_user=dev2 ansible_port=2222
10.10.43.205 ansible_user=dev2 ansible_port=2222
10.10.43.206 ansible_user=dev2 ansible_port=2222
10.10.43.207 ansible_user=dev2 ansible_port=2222
10.10.43.208 ansible_user=dev2 ansible_port=2222
10.10.43.210 ansible_user=dev2 ansible_port=2222
10.10.43.211 ansible_user=dev2 ansible_port=2222
10.10.43.212 ansible_user=dev2 ansible_port=2222
10.10.43.213 ansible_user=dev2 ansible_port=2222
10.10.43.214 ansible_user=dev2 ansible_port=2222
10.10.43.215 ansible_user=dev2 ansible_port=2222
10.10.43.216 ansible_user=dev2 ansible_port=2222
10.10.43.217 ansible_user=dev2 ansible_port=2222
10.10.43.218 ansible_user=dev2 ansible_port=2222
10.10.43.224 ansible_user=dev2 ansible_port=2222
10.10.43.225 ansible_user=dev2 ansible_port=2222
10.10.43.226 ansible_user=dev2 ansible_port=2222
10.10.43.227 ansible_user=dev2 ansible_port=2222
10.10.43.228 ansible_user=dev2 ansible_port=2222
10.10.43.230 ansible_user=dev2 ansible_port=2222
10.10.43.235 ansible_user=dev2 ansible_port=2222
10.10.43.236 ansible_user=dev2 ansible_port=2222
10.10.43.252 ansible_user=dev2 ansible_port=2222

View File

@@ -4,15 +4,21 @@ ansible script 구조
.
├── 00_old
├── 01_old
├── README.md
├── infra_setting
├── kubespray
├── security_check
├── server_settings
├── teleport_setting
└── zabbix_agent
```
|디렉토리|설명|
|---|---|
|00_old|과거 스크립트 백업|
|01_old|기존 Ansible Script 백업|
|infra_setting|Drop IP 설정, dev2 그룹 생성, dev2-iac 및 dev2 유저 생성, ssh key 등록<br>sudo 설정 추가, selinux 종료, 방화벽 종료, ssh port 변경, ssh root 로그인 비활성<br>ssh 접속 ip 설정, 패스워드 변경, vault 등록, Excel 저장|
|teleport_setting|Teleport Agent 배포, Teleport 등록|
|zabbix_agent|Zabbix Agent 배포, Zabbix 등록|
|kubespray|dsk dev 환경 kubernetes 관리용 kubespray|
| 디렉토리 | 설명 |
| --- | --- |
| 00_old | 과거 스크립트 백업 |
| 01_old | 기존 Ansible Script 백업 |
| infra_setting | Drop IP 설정, dev2 그룹 생성, dev2-iac 및 dev2 유저 생성, ssh key 등록<br>sudo 설정 추가, selinux 종료, 방화벽 종료, ssh port 변경, ssh root 로그인 비활성<br>ssh 접속 ip 설정, 패스워드 변경, vault 등록, Excel 저장 |
| teleport_setting | Teleport Agent 배포, Teleport 등록 |
| zabbix_agent | Zabbix Agent 배포, Zabbix 등록 |
| kubespray | dsk dev 환경 kubernetes 관리용 kubespray |
| security_check | 주요정보통신기반시설 기술적 취약점 분석·평가 상세가이드를 기반으로 취약점을 점검하는 스크립트를 각 서버에서 실행<br>최종적으로 결과를 NAS 에 저장 후 요약을 Git 레파지토리에 README.md 로 확인할 수 있게 구성<br>상세 정보 같은 경우 NAS 혹은 README.md에 함께 작성되는 링크를 통해 확인 가능 |
| security_settings | 기본적으로 적용되어야 하는 취약점 대응 |

View File

@@ -1,31 +1,31 @@
[all]
10.10.43.195
10.10.43.196
10.10.43.197
10.10.43.200
10.10.43.201
10.10.43.202
10.10.43.203
10.10.43.204
10.10.43.205
10.10.43.206
10.10.43.207
10.10.43.208
10.10.43.210
10.10.43.211
10.10.43.212
10.10.43.213
10.10.43.214
10.10.43.215
10.10.43.216
10.10.43.217
10.10.43.218
10.10.43.224
10.10.43.225
10.10.43.226
10.10.43.227
10.10.43.228
10.10.43.230
10.10.43.235
10.10.43.236
10.10.43.252
10.10.43.195 ansible_user=dev2-iac ansible_port=2222
10.10.43.196 ansible_user=dev2-iac ansible_port=2222
10.10.43.197 ansible_user=dev2-iac ansible_port=2222
10.10.43.200 ansible_user=dev2-iac ansible_port=2222
10.10.43.201 ansible_user=dev2-iac ansible_port=2222
10.10.43.202 ansible_user=dev2-iac ansible_port=2222
10.10.43.203 ansible_user=dev2-iac ansible_port=2222
10.10.43.204 ansible_user=dev2-iac ansible_port=2222
10.10.43.205 ansible_user=dev2-iac ansible_port=2222
10.10.43.206 ansible_user=dev2-iac ansible_port=2222
10.10.43.207 ansible_user=dev2-iac ansible_port=2222
10.10.43.208 ansible_user=dev2-iac ansible_port=2222
10.10.43.210 ansible_user=dev2-iac ansible_port=2222
10.10.43.211 ansible_user=dev2-iac ansible_port=2222
10.10.43.212 ansible_user=dev2-iac ansible_port=2222
10.10.43.213 ansible_user=dev2-iac ansible_port=2222
10.10.43.214 ansible_user=dev2-iac ansible_port=2222
10.10.43.215 ansible_user=dev2-iac ansible_port=2222
10.10.43.216 ansible_user=dev2-iac ansible_port=2222
10.10.43.217 ansible_user=dev2-iac ansible_port=2222
10.10.43.218 ansible_user=dev2-iac ansible_port=2222
10.10.43.224 ansible_user=dev2-iac ansible_port=2222
10.10.43.225 ansible_user=dev2-iac ansible_port=2222
10.10.43.226 ansible_user=dev2-iac ansible_port=2222
10.10.43.227 ansible_user=dev2-iac ansible_port=2222
10.10.43.228 ansible_user=dev2-iac ansible_port=2222
10.10.43.230 ansible_user=dev2-iac ansible_port=2222
10.10.43.235 ansible_user=dev2-iac ansible_port=2222
10.10.43.236 ansible_user=dev2-iac ansible_port=2222
10.10.43.252 ansible_user=dev2-iac ansible_port=2222

Binary file not shown.

View File

@@ -1,6 +1,6 @@
---
- include: 00_host_setting.yml
tags: host
#- include: 00_host_setting.yml
# tags: host
- include: 01_get_password.yml
tags: password

View File

@@ -1,103 +1,3 @@
| 이름 | 아이피 | 상태 요약 | 상세 보기 |
| --- | --- | --- | --- |
| agent-master-docker | 10.10.43.185 | 취약 | http://10.10.43.42:8080/agent-master-docker.10.10.43.185.txt |
| agent-worker2-containerd | 10.10.43.187 | 취약 | http://10.10.43.42:8080/agent-worker2-containerd.10.10.43.187.txt |
| agent-worker3-crio | 10.10.43.188 | 취약 | http://10.10.43.42:8080/agent-worker3-crio.10.10.43.188.txt |
| amazon-2023 | 10.10.43.175 | 취약 | http://10.10.43.42:8080/amazon-2023.10.10.43.175.txt |
| centos-7 | 10.10.43.167 | 취약 | http://10.10.43.42:8080/centos-7.10.10.43.167.txt |
| centos-8 | 10.10.43.168 | 취약 | http://10.10.43.42:8080/centos-8.10.10.43.168.txt |
| centos-9 | 10.10.43.169 | 취약 | http://10.10.43.42:8080/centos-9.10.10.43.169.txt |
| cmoa-jaeger-master | 10.10.43.203 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-master.10.10.43.203.txt |
| cmoa-jaeger-master | 10.10.43.213 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-master.10.10.43.213.txt |
| cmoa-jaeger-worker1 | 10.10.43.204 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-worker1.10.10.43.204.txt |
| cmoa-jaeger-worker1 | 10.10.43.214 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-worker1.10.10.43.214.txt |
| cmoa-jaeger-worker2-crio | 10.10.43.205 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-worker2-crio.10.10.43.205.txt |
| cmoa-jaeger-worker2 | 10.10.43.215 | 취약 | http://10.10.43.42:8080/cmoa-jaeger-worker2.10.10.43.215.txt |
| cmoa-jspd-master | 10.10.43.206 | 취약 | http://10.10.43.42:8080/cmoa-jspd-master.10.10.43.206.txt |
| cmoa-jspd-master | 10.10.43.216 | 취약 | http://10.10.43.42:8080/cmoa-jspd-master.10.10.43.216.txt |
| cmoa-jspd-worker1 | 10.10.43.207 | 취약 | http://10.10.43.42:8080/cmoa-jspd-worker1.10.10.43.207.txt |
| cmoa-jspd-worker1 | 10.10.43.217 | 취약 | http://10.10.43.42:8080/cmoa-jspd-worker1.10.10.43.217.txt |
| cmoa-jspd-worker2 | 10.10.43.208 | 취약 | http://10.10.43.42:8080/cmoa-jspd-worker2.10.10.43.208.txt |
| cmoa-jspd-worker2 | 10.10.43.218 | 취약 | http://10.10.43.42:8080/cmoa-jspd-worker2.10.10.43.218.txt |
| cmoa-master-1 | 10.10.43.200 | 취약 | http://10.10.43.42:8080/cmoa-master-1.10.10.43.200.txt |
| cmoa-master-2 | 10.10.43.210 | 취약 | http://10.10.43.42:8080/cmoa-master-2.10.10.43.210.txt |
| cmoa-worker1-1 | 10.10.43.201 | 취약 | http://10.10.43.42:8080/cmoa-worker1-1.10.10.43.201.txt |
| cmoa-worker1-2 | 10.10.43.211 | 취약 | http://10.10.43.42:8080/cmoa-worker1-2.10.10.43.211.txt |
| cmoa-worker2-1 | 10.10.43.202 | 취약 | http://10.10.43.42:8080/cmoa-worker2-1.10.10.43.202.txt |
| cmoa-worker2-2 | 10.10.43.212 | 취약 | http://10.10.43.42:8080/cmoa-worker2-2.10.10.43.212.txt |
| cmoamgmtmaster | 10.10.43.227 | 취약 | http://10.10.43.42:8080/cmoamgmtmaster.10.10.43.227.txt |
| cmoamgmtworker | 10.10.43.228 | 취약 | http://10.10.43.42:8080/cmoamgmtworker.10.10.43.228.txt |
| db-env | 10.10.43.176 | 취약 | http://10.10.43.42:8080/db-env.10.10.43.176.txt |
| debian-12 | 10.10.43.173 | 취약 | http://10.10.43.42:8080/debian-12.10.10.43.173.txt |
| docker-node | 10.10.43.186 | 취약 | http://10.10.43.42:8080/docker-node.10.10.43.186.txt |
| docker | 10.10.43.180 | 취약 | http://10.10.43.42:8080/docker.10.10.43.180.txt |
| dsk-dev-data-common-a1 | 10.10.43.133 | 취약 | http://10.10.43.42:8080/dsk-dev-data-common-a1.10.10.43.133.txt |
| dsk-dev-data-common-b1 | 10.10.43.134 | 취약 | http://10.10.43.42:8080/dsk-dev-data-common-b1.10.10.43.134.txt |
| dsk-dev-data-common-c1 | 10.10.43.135 | 취약 | http://10.10.43.42:8080/dsk-dev-data-common-c1.10.10.43.135.txt |
| dsk-dev-data-druid-a1 | 10.10.43.114 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-a1.10.10.43.114.txt |
| dsk-dev-data-druid-a3 | 10.10.43.139 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-a3.10.10.43.139.txt |
| dsk-dev-data-druid-b1 | 10.10.43.115 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-b1.10.10.43.115.txt |
| dsk-dev-data-druid-c2 | 10.10.43.138 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-c2.10.10.43.138.txt |
| dsk-dev-data-druid-n1 | 10.10.43.117 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-n1.10.10.43.117.txt |
| dsk-dev-data-druid-n2 | 10.10.43.118 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-n2.10.10.43.118.txt |
| dsk-dev-data-druid-n3 | 10.10.43.119 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-n3.10.10.43.119.txt |
| dsk-dev-data-druid-small | 10.10.43.147 | 취약 | http://10.10.43.42:8080/dsk-dev-data-druid-small.10.10.43.147.txt |
| dsk-dev-data-kafka-a1 | 10.10.43.120 | 취약 | http://10.10.43.42:8080/dsk-dev-data-kafka-a1.10.10.43.120.txt |
| dsk-dev-data-kafka-b1 | 10.10.43.121 | 취약 | http://10.10.43.42:8080/dsk-dev-data-kafka-b1.10.10.43.121.txt |
| dsk-dev-data-kafka-c1 | 10.10.43.122 | 취약 | http://10.10.43.42:8080/dsk-dev-data-kafka-c1.10.10.43.122.txt |
| dsk-dev-data-kafka-n1 | 10.10.43.148 | 취약 | http://10.10.43.42:8080/dsk-dev-data-kafka-n1.10.10.43.148.txt |
| dsk-dev-demo-master | 10.10.43.105 | 취약 | http://10.10.43.42:8080/dsk-dev-demo-master.10.10.43.105.txt |
| dsk-dev-demo-worker | 10.10.43.106 | 취약 | http://10.10.43.42:8080/dsk-dev-demo-worker.10.10.43.106.txt |
| dsk-dev-master-a1 | 10.10.43.111 | 취약 | http://10.10.43.42:8080/dsk-dev-master-a1.10.10.43.111.txt |
| dsk-dev-master-b1 | 10.10.43.112 | 취약 | http://10.10.43.42:8080/dsk-dev-master-b1.10.10.43.112.txt |
| dsk-dev-master-c1 | 10.10.43.113 | 취약 | http://10.10.43.42:8080/dsk-dev-master-c1.10.10.43.113.txt |
| dsk-dev-process-a1 | 10.10.43.123 | 취약 | http://10.10.43.42:8080/dsk-dev-process-a1.10.10.43.123.txt |
| dsk-dev-process-a2 | 10.10.43.126 | 취약 | http://10.10.43.42:8080/dsk-dev-process-a2.10.10.43.126.txt |
| dsk-dev-process-a3 | 10.10.43.129 | 취약 | http://10.10.43.42:8080/dsk-dev-process-a3.10.10.43.129.txt |
| dsk-dev-process-a4 | 10.10.43.116 | 취약 | http://10.10.43.42:8080/dsk-dev-process-a4.10.10.43.116.txt |
| dsk-dev-process-b1 | 10.10.43.124 | 취약 | http://10.10.43.42:8080/dsk-dev-process-b1.10.10.43.124.txt |
| dsk-dev-process-b2 | 10.10.43.127 | 취약 | http://10.10.43.42:8080/dsk-dev-process-b2.10.10.43.127.txt |
| dsk-dev-process-b3 | 10.10.43.130 | 취약 | http://10.10.43.42:8080/dsk-dev-process-b3.10.10.43.130.txt |
| dsk-dev-process-b4 | 10.10.43.136 | 취약 | http://10.10.43.42:8080/dsk-dev-process-b4.10.10.43.136.txt |
| dsk-dev-process-c1 | 10.10.43.125 | 취약 | http://10.10.43.42:8080/dsk-dev-process-c1.10.10.43.125.txt |
| dsk-dev-process-c2 | 10.10.43.128 | 취약 | http://10.10.43.42:8080/dsk-dev-process-c2.10.10.43.128.txt |
| dsk-dev-process-c3 | 10.10.43.131 | 취약 | http://10.10.43.42:8080/dsk-dev-process-c3.10.10.43.131.txt |
| dsk-dev-process-c4 | 10.10.43.137 | 취약 | http://10.10.43.42:8080/dsk-dev-process-c4.10.10.43.137.txt |
| dsk-dev-prometheus | 10.10.43.142 | 취약 | http://10.10.43.42:8080/dsk-dev-prometheus.10.10.43.142.txt |
| dsk-dev-temp-a1 | 10.10.43.132 | 취약 | http://10.10.43.42:8080/dsk-dev-temp-a1.10.10.43.132.txt |
| dsk-dev-temp-b1 | 10.10.43.140 | 취약 | http://10.10.43.42:8080/dsk-dev-temp-b1.10.10.43.140.txt |
| dsk-dev-temp-c1 | 10.10.43.141 | 취약 | http://10.10.43.42:8080/dsk-dev-temp-c1.10.10.43.141.txt |
| dsk-minio-master1 | 10.10.43.235 | 취약 | http://10.10.43.42:8080/dsk-minio-master1.10.10.43.235.txt |
| dsk-minio-worker1 | 10.10.43.236 | 취약 | http://10.10.43.42:8080/dsk-minio-worker1.10.10.43.236.txt |
| infra-master | 10.10.43.224 | 취약 | http://10.10.43.42:8080/infra-master.10.10.43.224.txt |
| infra-worker001 | 10.10.43.225 | 취약 | http://10.10.43.42:8080/infra-worker001.10.10.43.225.txt |
| infra-worker002 | 10.10.43.226 | 취약 | http://10.10.43.42:8080/infra-worker002.10.10.43.226.txt |
| kafka-multi-0 | 10.10.43.151 | 취약 | http://10.10.43.42:8080/kafka-multi-0.10.10.43.151.txt |
| kafka-multi-1 | 10.10.43.152 | 취약 | http://10.10.43.42:8080/kafka-multi-1.10.10.43.152.txt |
| kafka-multi-2 | 10.10.43.153 | 취약 | http://10.10.43.42:8080/kafka-multi-2.10.10.43.153.txt |
| master | 10.10.43.189 | 취약 | http://10.10.43.42:8080/master.10.10.43.189.txt |
| opensearch-data-0 | 10.10.43.144 | 취약 | http://10.10.43.42:8080/opensearch-data-0.10.10.43.144.txt |
| opensearch-data-1 | 10.10.43.145 | 취약 | http://10.10.43.42:8080/opensearch-data-1.10.10.43.145.txt |
| opensearch-master-0 | 10.10.43.143 | 취약 | http://10.10.43.42:8080/opensearch-master-0.10.10.43.143.txt |
| opensearch-search-0 | 10.10.43.146 | 취약 | http://10.10.43.42:8080/opensearch-search-0.10.10.43.146.txt |
| opensearch-test-0 | 10.10.43.195 | 취약 | http://10.10.43.42:8080/opensearch-test-0.10.10.43.195.txt |
| opensearch-test-1 | 10.10.43.196 | 취약 | http://10.10.43.42:8080/opensearch-test-1.10.10.43.196.txt |
| opensearch-test-2 | 10.10.43.197 | 취약 | http://10.10.43.42:8080/opensearch-test-2.10.10.43.197.txt |
| opensearch00 | 10.10.43.194 | 취약 | http://10.10.43.42:8080/opensearch00.10.10.43.194.txt |
| opensearch01 | 10.10.43.192 | 취약 | http://10.10.43.42:8080/opensearch01.10.10.43.192.txt |
| opensearch02 | 10.10.43.193 | 취약 | http://10.10.43.42:8080/opensearch02.10.10.43.193.txt |
| opensearch03 | 10.10.43.199 | 취약 | http://10.10.43.42:8080/opensearch03.10.10.43.199.txt |
| openshift-4-13 | 10.10.43.171 | 취약 | http://10.10.43.42:8080/openshift-4-13.10.10.43.171.txt |
| oracle-linux-9 | 10.10.43.174 | 취약 | http://10.10.43.42:8080/oracle-linux-9.10.10.43.174.txt |
| redhat-7 | 10.10.43.177 | 취약 | http://10.10.43.42:8080/redhat-7.10.10.43.177.txt |
| redhat-8 | 10.10.43.178 | 취약 | http://10.10.43.42:8080/redhat-8.10.10.43.178.txt |
| redhat-9 | 10.10.43.179 | 취약 | http://10.10.43.42:8080/redhat-9.10.10.43.179.txt |
| release-master | 10.10.43.100 | 취약 | http://10.10.43.42:8080/release-master.10.10.43.100.txt |
| releaseworker | 10.10.43.101 | 취약 | http://10.10.43.42:8080/releaseworker.10.10.43.101.txt |
| ubuntu-18-04 | 10.10.43.164 | 취약 | http://10.10.43.42:8080/ubuntu-18-04.10.10.43.164.txt |
| ubuntu-20-04 | 10.10.43.165 | 취약 | http://10.10.43.42:8080/ubuntu-20-04.10.10.43.165.txt |
| ubuntu-22-04 | 10.10.43.166 | 취약 | http://10.10.43.42:8080/ubuntu-22-04.10.10.43.166.txt |
| ubuntu2004 | 10.10.43.181 | 취약 | http://10.10.43.42:8080/ubuntu2004.10.10.43.181.txt |
| ubuntu2204 | 10.10.43.182 | 취약 | http://10.10.43.42:8080/ubuntu2204.10.10.43.182.txt |
| worker01 | 10.10.43.190 | 취약 | http://10.10.43.42:8080/worker01.10.10.43.190.txt |
| worker02 | 10.10.43.191 | 취약 | http://10.10.43.42:8080/worker02.10.10.43.191.txt |
| datasaker | 10.10.43.109 | 양호 | http://10.10.43.42:8080/datasaker.10.10.43.109.txt |

View File

@@ -6,5 +6,4 @@ deprecation_warnings = false
display_skipped_hosts = no
ansible_home = .
stdout_callback = debug
host_key_checking = false
host_key_checking = False

View File

@@ -105,7 +105,6 @@ nas
10.10.43.228 ansible_port=2222 ansible_user=dev2
10.10.43.235 ansible_port=2222 ansible_user=dev2
10.10.43.236 ansible_port=2222 ansible_user=dev2
10.10.43.252 ansible_port=2222 ansible_user=dev2
[nas]
10.10.43.42 ansible_port=2222 ansible_user=exemdev2

View File

@@ -75,9 +75,9 @@ U_01() {
fi
fi
# sshd_config 파일의 존재 여부를 검색하고, 존재한다면 ssh 서비스가 실행 중일 때 점검할 별도의 배열에 저장함
sshd_config_count=`find / -name 'sshd_config' -type f 2> /dev/null | wc -l`
sshd_config_count=`find /etc/ssh -name 'sshd_config' -type f 2> /dev/null | wc -l`
if [ $sshd_config_count -gt 0 ]; then
sshd_config_file=(`find / -name 'sshd_config' -type f 2> /dev/null`)
sshd_config_file=(`find /etc/ssh -name 'sshd_config' -type f 2> /dev/null`)
fi
# /etc/services 파일 내 ssh 서비스의 포트 번호가 설정되어 있는지 확인하고, 설정되어 있다면 실행 중인지 확인함
if [ -f /etc/services ]; then
@@ -3684,15 +3684,15 @@ echo "#
echo "##############################################################################" >> $resultfile 2>&1
U_01 # root 계정 원격 접속 제한
#U_02 # 랜덤 패스워드를 사용중이라 제외
U_02 # 랜덤 패스워드를 사용중이라 제외
U_03 # 계정 잠금 임계값 설정
U_04 # 패스워드 파일 보호
U_05 # root 홈, 패스 디렉터리 권한 및 패스 설정
U_06 # 파일 및 디렉터리 소유자 설정
#U_06 # kubernetes를 사용함으로 어쩔 수 없는 부분
U_07 # /etc/passwd 파일 소유자 및 권한 설정
U_08 # /etc/shadow 파일 소유자 및 권한 설정
U_09 # /etc/hosts 파일 소유자 및 권한 설정
U_10 # /etc/(x)inetd.conf 파일 소유자 및 권한 설정
#U_10 # /etc/(x)inetd.conf 파일 소유자 및 권한 설정
U_11 # /etc/syslog.conf 파일 소유자 및 권한 설정
U_12 # /etc/services 파일 소유자 및 권한 설정
#U_13 # kubernetes 사용에 의한 어쩔 수 없음 제외
@@ -3769,4 +3769,4 @@ echo " ★ 양호 개수 = `cat $resultfile | grep
echo " ☆ N/A 개수 = `cat $resultfile | grep '결과 : N/A' | wc -l`" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1
echo "==============================================================================" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1

View File

@@ -75,9 +75,9 @@ U_01() {
fi
fi
# sshd_config 파일의 존재 여부를 검색하고, 존재한다면 ssh 서비스가 실행 중일 때 점검할 별도의 배열에 저장함
sshd_config_count=`find / -name 'sshd_config' -type f 2> /dev/null | wc -l`
sshd_config_count=`find /etc/ssh -name 'sshd_config' -type f 2> /dev/null | wc -l`
if [ $sshd_config_count -gt 0 ]; then
sshd_config_file=(`find / -name 'sshd_config' -type f 2> /dev/null`)
sshd_config_file=(`find /etc/ssh -name 'sshd_config' -type f 2> /dev/null`)
fi
# /etc/services 파일 내 ssh 서비스의 포트 번호가 설정되어 있는지 확인하고, 설정되어 있다면 실행 중인지 확인함
if [ -f /etc/services ]; then
@@ -98,7 +98,7 @@ U_01() {
sshd_permitrootlogin_no_count=`grep -vE '^#|^\s#' ${sshd_config_file[$j]} | grep -i 'permitrootlogin' | grep -i 'no' | wc -l`
if [ $sshd_permitrootlogin_no_count -eq 0 ]; then
echo "※ U-01 결과 : 취약(Vulnerable)" >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다." >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다.1" >> $resultfile 2>&1
return 0
fi
done
@@ -122,7 +122,7 @@ U_01() {
sshd_permitrootlogin_no_count=`grep -vE '^#|^\s#' ${sshd_config_file[$k]} | grep -i 'permitrootlogin' | grep -i 'no' | wc -l`
if [ $sshd_permitrootlogin_no_count -eq 0 ]; then
echo "※ U-01 결과 : 취약(Vulnerable)" >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다." >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다.2" >> $resultfile 2>&1
return 0
fi
done
@@ -143,8 +143,9 @@ U_01() {
do
sshd_permitrootlogin_no_count=`grep -vE '^#|^\s#' ${sshd_config_file[$i]} | grep -i 'permitrootlogin' | grep -i 'no' | wc -l`
if [ $sshd_permitrootlogin_no_count -eq 0 ]; then
echo "${sshd_config_file[$i]}" >> $resultfile 2>&1
echo "※ U-01 결과 : 취약(Vulnerable)" >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다." >> $resultfile 2>&1
echo " ssh 서비스를 사용하고, sshd_config 파일에서 root 계정의 원격 접속이 허용되어 있습니다.3" >> $resultfile 2>&1
return 0
fi
done
@@ -3617,15 +3618,15 @@ echo "#
echo "##############################################################################" >> $resultfile 2>&1
U_01 # root 계정 원격 접속 제한
# U_02 # 랜덤 패스워드를 사용중이라 제외
U_02 # 랜덤 패스워드를 사용중이라 제외
U_03 # 계정 잠금 임계값 설정
U_04 # 패스워드 파일 보호
U_05 # root 홈, 패스 디렉터리 권한 및 패스 설정
U_06 # 파일 및 디렉터리 소유자 설정
#U_06 # kubernetes를 사용함으로 어쩔 수 없는 부분
U_07 # /etc/passwd 파일 소유자 및 권한 설정
U_08 # /etc/shadow 파일 소유자 및 권한 설정
U_09 # /etc/hosts 파일 소유자 및 권한 설정
U_10 # /etc/(x)inetd.conf 파일 소유자 및 권한 설정
#U_10 # /etc/(x)inetd.conf 파일 소유자 및 권한 설정
U_11 # /etc/syslog.conf 파일 소유자 및 권한 설정
U_12 # /etc/services 파일 소유자 및 권한 설정
#U_13 # kubernetes 사용에 의한 어쩔 수 없음 제외
@@ -3702,4 +3703,4 @@ echo " ★ 양호 개수 = `cat $resultfile | grep
echo " ☆ N/A 개수 = `cat $resultfile | grep '결과 : N/A' | wc -l`" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1
echo "==============================================================================" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1
echo "" >> $resultfile 2>&1

View File

@@ -3,7 +3,6 @@
become: false
gather_facts: true
vars:
git_user: sa_8001
git_key: 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
ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
roles:
- role: security_check

View File

@@ -0,0 +1,9 @@
[all:children]
server
nas
[server]
10.10.43.109 ansible_user=datasaker ansible_port=2222
[nas]
10.10.43.42 ansible_port=2222 ansible_user=exemdev2

View File

@@ -0,0 +1,9 @@
[defaults]
become = true
inventory = checklist
roles_path = roles
deprecation_warnings = false
display_skipped_hosts = no
ansible_home = .
stdout_callback = debug
host_key_checking = false

View File

@@ -0,0 +1,105 @@
[all]
10.10.43.51 ansible_port=2222 ansible_user=dev2
10.10.43.100 ansible_port=2222 ansible_user=dev2
10.10.43.101 ansible_port=2222 ansible_user=dev2
10.10.43.105 ansible_port=2222 ansible_user=dev2
10.10.43.106 ansible_port=2222 ansible_user=dev2
10.10.43.111 ansible_port=2222 ansible_user=dev2
10.10.43.112 ansible_port=2222 ansible_user=dev2
10.10.43.113 ansible_port=2222 ansible_user=dev2
10.10.43.114 ansible_port=2222 ansible_user=dev2
10.10.43.115 ansible_port=2222 ansible_user=dev2
10.10.43.116 ansible_port=2222 ansible_user=dev2
10.10.43.117 ansible_port=2222 ansible_user=dev2
10.10.43.118 ansible_port=2222 ansible_user=dev2
10.10.43.119 ansible_port=2222 ansible_user=dev2
10.10.43.120 ansible_port=2222 ansible_user=dev2
10.10.43.121 ansible_port=2222 ansible_user=dev2
10.10.43.122 ansible_port=2222 ansible_user=dev2
10.10.43.123 ansible_port=2222 ansible_user=dev2
10.10.43.124 ansible_port=2222 ansible_user=dev2
10.10.43.125 ansible_port=2222 ansible_user=dev2
10.10.43.126 ansible_port=2222 ansible_user=dev2
10.10.43.127 ansible_port=2222 ansible_user=dev2
10.10.43.128 ansible_port=2222 ansible_user=dev2
10.10.43.129 ansible_port=2222 ansible_user=dev2
10.10.43.130 ansible_port=2222 ansible_user=dev2
10.10.43.131 ansible_port=2222 ansible_user=dev2
10.10.43.132 ansible_port=2222 ansible_user=dev2
10.10.43.133 ansible_port=2222 ansible_user=dev2
10.10.43.134 ansible_port=2222 ansible_user=dev2
10.10.43.135 ansible_port=2222 ansible_user=dev2
10.10.43.136 ansible_port=2222 ansible_user=dev2
10.10.43.137 ansible_port=2222 ansible_user=dev2
10.10.43.138 ansible_port=2222 ansible_user=dev2
10.10.43.139 ansible_port=2222 ansible_user=dev2
10.10.43.140 ansible_port=2222 ansible_user=dev2
10.10.43.141 ansible_port=2222 ansible_user=dev2
10.10.43.142 ansible_port=2222 ansible_user=dev2
10.10.43.143 ansible_port=2222 ansible_user=dev2
10.10.43.144 ansible_port=2222 ansible_user=dev2
10.10.43.145 ansible_port=2222 ansible_user=dev2
10.10.43.146 ansible_port=2222 ansible_user=dev2
10.10.43.147 ansible_port=2222 ansible_user=dev2
10.10.43.148 ansible_port=2222 ansible_user=dev2
10.10.43.151 ansible_port=2222 ansible_user=dev2
10.10.43.152 ansible_port=2222 ansible_user=dev2
10.10.43.153 ansible_port=2222 ansible_user=dev2
10.10.43.164 ansible_port=2222 ansible_user=dev2
10.10.43.165 ansible_port=2222 ansible_user=dev2
10.10.43.166 ansible_port=2222 ansible_user=dev2
10.10.43.167 ansible_port=2222 ansible_user=dev2
10.10.43.168 ansible_port=2222 ansible_user=dev2
10.10.43.169 ansible_port=2222 ansible_user=dev2
10.10.43.171 ansible_port=2222 ansible_user=dev2
10.10.43.172 ansible_port=2222 ansible_user=dev2
10.10.43.173 ansible_port=2222 ansible_user=dev2
10.10.43.174 ansible_port=2222 ansible_user=dev2
10.10.43.175 ansible_port=2222 ansible_user=dev2
10.10.43.176 ansible_port=2222 ansible_user=dev2
10.10.43.177 ansible_port=2222 ansible_user=dev2
10.10.43.178 ansible_port=2222 ansible_user=dev2
10.10.43.179 ansible_port=2222 ansible_user=dev2
10.10.43.180 ansible_port=2222 ansible_user=dev2
10.10.43.181 ansible_port=2222 ansible_user=dev2
10.10.43.182 ansible_port=2222 ansible_user=dev2
10.10.43.185 ansible_port=2222 ansible_user=dev2
10.10.43.186 ansible_port=2222 ansible_user=dev2
10.10.43.187 ansible_port=2222 ansible_user=dev2
10.10.43.188 ansible_port=2222 ansible_user=dev2
10.10.43.189 ansible_port=2222 ansible_user=dev2
10.10.43.190 ansible_port=2222 ansible_user=dev2
10.10.43.191 ansible_port=2222 ansible_user=dev2
10.10.43.192 ansible_port=2222 ansible_user=dev2
10.10.43.193 ansible_port=2222 ansible_user=dev2
10.10.43.194 ansible_port=2222 ansible_user=dev2
10.10.43.199 ansible_port=2222 ansible_user=dev2
10.10.43.195 ansible_port=2222 ansible_user=dev2
10.10.43.196 ansible_port=2222 ansible_user=dev2
10.10.43.197 ansible_port=2222 ansible_user=dev2
10.10.43.200 ansible_port=2222 ansible_user=dev2
10.10.43.201 ansible_port=2222 ansible_user=dev2
10.10.43.202 ansible_port=2222 ansible_user=dev2
10.10.43.203 ansible_port=2222 ansible_user=dev2
10.10.43.204 ansible_port=2222 ansible_user=dev2
10.10.43.205 ansible_port=2222 ansible_user=dev2
10.10.43.206 ansible_port=2222 ansible_user=dev2
10.10.43.207 ansible_port=2222 ansible_user=dev2
10.10.43.208 ansible_port=2222 ansible_user=dev2
10.10.43.210 ansible_port=2222 ansible_user=dev2
10.10.43.211 ansible_port=2222 ansible_user=dev2
10.10.43.212 ansible_port=2222 ansible_user=dev2
10.10.43.213 ansible_port=2222 ansible_user=dev2
10.10.43.214 ansible_port=2222 ansible_user=dev2
10.10.43.215 ansible_port=2222 ansible_user=dev2
10.10.43.216 ansible_port=2222 ansible_user=dev2
10.10.43.217 ansible_port=2222 ansible_user=dev2
10.10.43.218 ansible_port=2222 ansible_user=dev2
10.10.43.224 ansible_port=2222 ansible_user=dev2
10.10.43.225 ansible_port=2222 ansible_user=dev2
10.10.43.226 ansible_port=2222 ansible_user=dev2
10.10.43.227 ansible_port=2222 ansible_user=dev2
10.10.43.228 ansible_port=2222 ansible_user=dev2
10.10.43.235 ansible_port=2222 ansible_user=dev2
10.10.43.236 ansible_port=2222 ansible_user=dev2
10.10.43.252 ansible_port=2222 ansible_user=dev2

View File

@@ -0,0 +1,38 @@
Role Name
=========
A brief description of the role goes here.
Requirements
------------
Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
Role Variables
--------------
A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
Dependencies
------------
A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
Example Playbook
----------------
Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
- hosts: servers
roles:
- { role: username.rolename, x: 42 }
License
-------
BSD
Author Information
------------------
An optional section for the role authors to include contact information, or a website (HTML is not allowed).

View File

@@ -0,0 +1,15 @@
---
# defaults file for password
encrypt: 0 # strings 0 , encrypted 1
debug_mode: False
sshrootlogin: forced-commands-only
sshmainport: 2222
iptables_rules:
- { source: "10.10.45.0/24", target: "DROP" }
- { source: "10.10.47.0/24", target: "DROP" }
- { source: "10.10.48.0/24", target: "DROP" }
- { source: "10.10.50.0/24", target: "DROP" }
- { source: "10.10.37.0/24", target: "DROP" }
delete_rule: False
add_rule: True

View File

@@ -0,0 +1,44 @@
#!/usr/bin/python3
import base64, random, string, os
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad, unpad
try:
encrypt_flag=True if os.sys.argv[1].lower()=='1' else False
except Exception as err:
encrypt_flag=False
def generate_password(length=8, num_uppercase=1, num_lowercase=1, num_digits=1, num_sp_char=1):
sp_char = '!@#$'
all_chars = string.ascii_letters + string.digits + sp_char
password = [
*random.choices(string.ascii_uppercase, k=num_uppercase),
*random.choices(string.ascii_lowercase, k=num_lowercase),
*random.choices(string.digits, k=num_digits),
*random.choices(sp_char, k=num_sp_char)
]
remaining_length = length - (num_uppercase + num_lowercase + num_digits + num_sp_char)
password += random.choices(all_chars, k=remaining_length)
random.shuffle(password)
return ''.join(password)
def encrypt(plain_text, key):
manual_iv = b'PhilinnovatorDEV'
cipher = AES.new(key, AES.MODE_CBC, iv=manual_iv)
ct_bytes = cipher.encrypt(pad(plain_text.encode(), 16))
ct = base64.b64encode(ct_bytes).decode('utf-8')
return ct
key = b'PhilinnovatorDEVPhilinnovatorDEV'
plain_text = generate_password()
if encrypt_flag:
encrypted_text = encrypt(plain_text, key)
print(encrypted_text)
else:
print(plain_text)

View File

@@ -0,0 +1,11 @@
import hvac
str_url = "http://10.10.43.98:31080"
str_token = "hvs.CAESIMV6zCg-GpUP4pQgVA5f1ZXkgyJZrqOC6QDCegrpiAX9Gh4KHGh2cy5ORkpkc2ZyVUxYd09qUVFtQldRNDBjS3I"
client = hvac.Client(url=str_url, token=str_token)
str_mount_point = 'kv'
str_secret_path = 'host1'
read_secret_result = client.secrets.kv.v1.read_secret(mount_point=str_mount_point, path=str_secret_path)
print(read_secret_result)

View File

@@ -0,0 +1,108 @@
#!/usr/bin/python3
#-*- coding: utf-8 -*-
import os, sys, time, errno, socket, signal, psutil, random, logging.handlers, subprocess, paramiko, hvac
from xlwt import Workbook, XFStyle, Borders, Font, Pattern
from socket import error as SocketError
process_time = time.strftime("%Y%m%d_%H%M", time.localtime())
excel_file_name = '/mnt/e/excel/{}.xls'.format(process_time)
def process_close(flag=True, result=''):
if flag:
print("[Success]")
else:
print("[Fail]:{}".format(result))
sys.exit(0)
def set_header(sheet, header_list):
# 폰트 설정
font = Font()
font.bold = True
# 테두리 설정
borders = Borders()
borders.left = Borders.THIN
borders.right = Borders.THIN
borders.top = Borders.THIN
borders.bottom = Borders.THIN
# 배경색 설정
pattern = Pattern()
pattern.pattern = Pattern.SOLID_PATTERN
pattern.pattern_fore_colour = 22 # #E2EFDA는 xlwt에서 인덱스 22에 해당하는 색입니다.
hdrstyle = XFStyle()
hdrstyle.font = font
hdrstyle.borders = borders
hdrstyle.pattern = pattern
for idx, header in enumerate(header_list):
sheet.write(0, idx, header, hdrstyle)
sheet.col(idx).width = len(header) * 800
def write_data(sheet, data_list):
datestyle = XFStyle()
datestyle.num_format_str = 'YYYY-MM-DD'
for row_num, data in enumerate(data_list, start=1):
for col_num, cell_data in enumerate(data):
if col_num == 7:
sheet.write(row_num, col_num, cell_data, datestyle)
elif col_num in [1, 4, 5]:
formatted_data = u'{}'.format(cell_data) if cell_data else ''
sheet.write(row_num, col_num, formatted_data)
else:
sheet.write(row_num, col_num, cell_data)
def excel_write(header_list=[], data_list=[], filename='', sheetTitle=''):
workbook = Workbook(style_compression=2, encoding='utf-8')
sheet = workbook.add_sheet(sheetTitle)
set_header(sheet, header_list)
write_data(sheet, data_list)
sheet.panes_frozen = True
sheet.vert_split_pos = 0
sheet.horz_split_pos = 1
workbook.save(filename)
def main():
header_list=['번호','호스트 유형','호스트명','호스트 IP','포트번호','프로토콜','인증방법','1차 로그인 계정명','1차 로그인 비밀번호','1차 로그인 계정명','2차 로그인 비밀번호','용도','비고']
data_list=[]
openfile=open('/tmp/host_list','r')
readfile=openfile.readlines()
openfile.close()
for idx, host_data in enumerate(readfile):
try:
if idx==0: continue
host_num=idx
hosttype=host_data.strip().split(' ')[0]
print(hosttype)
hostname=host_data.strip().split(' ')[1]
host_ips=host_data.strip().split(' ')[2]
port_num=int(host_data.strip().split(' ')[3])
protocol='SSH'
auth_con='Password'
username=host_data.strip().split(' ')[4]
first_pw=host_data.strip().split(' ')[5]
rootuser=host_data.strip().split(' ')[6]
secon_pw=host_data.strip().split(' ')[7]
descript='-'
remarks_='-'
data_list.append([host_num,hosttype,hostname,host_ips,port_num,protocol,auth_con,username,first_pw,rootuser,secon_pw,descript,remarks_,])
except:
continue
excel_write(header_list, data_list, excel_file_name, 'TEST')
DEBUG=False
try:
if os.sys.argv[1]: DEBUG=True
except:
pass
main()
process_close()

View File

@@ -0,0 +1,21 @@
#!/usr/bin/python3
#-*- coding: utf-8 -*-
import base64, random, string, os
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad, unpad
try:
encrypted_text=os.sys.argv[1]
except:
encrypted_text="q6i1/JxyNe1OUrO0JKu+Z4WQTyQZam2yIJTp43dl1pI="
def decrypt(ct, key):
manual_iv = b'PhilinnovatorDEV'
ct_bytes = base64.b64decode(ct)
cipher = AES.new(key, AES.MODE_CBC, iv=manual_iv)
return unpad(cipher.decrypt(ct_bytes), 16).decode('utf-8')
key = b'PhilinnovatorDEVPhilinnovatorDEV'
print(decrypt(encrypted_text, key))

View File

@@ -0,0 +1,45 @@
#!/usr/bin/python3
#-*- coding: utf-8 -*-
import base64, random, string, os
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad, unpad
try:
encrypt_flag=True if os.sys.argv[1].lower()=='1' else False
except Exception as err:
encrypt_flag=False
def generate_password(length=12, num_uppercase=3, num_lowercase=4, num_digits=3, num_sp_char=2):
sp_char = '!@#$'
all_chars = string.ascii_letters + string.digits + sp_char
password = [
*random.choices(string.ascii_uppercase, k=num_uppercase),
*random.choices(string.ascii_lowercase, k=num_lowercase),
*random.choices(string.digits, k=num_digits),
*random.choices(sp_char, k=num_sp_char)
]
remaining_length = length - (num_uppercase + num_lowercase + num_digits + num_sp_char)
password += random.choices(all_chars, k=remaining_length)
random.shuffle(password)
return ''.join(password)
def encrypt(plain_text, key):
manual_iv = b'PhilinnovatorDEV'
cipher = AES.new(key, AES.MODE_CBC, iv=manual_iv)
ct_bytes = cipher.encrypt(pad(plain_text.encode(), 16))
ct = base64.b64encode(ct_bytes).decode('utf-8')
return ct
key = b'PhilinnovatorDEVPhilinnovatorDEV'
plain_text = generate_password()
if encrypt_flag:
encrypted_text = encrypt(plain_text, key)
print(encrypted_text)
else:
print(plain_text)

View File

@@ -0,0 +1,17 @@
#!/usr/bin/python3
#-*- coding: utf-8 -*-
import hvac
import os
hostname=os.sys.argv[1]
str_url = "http://10.10.43.240:30803"
client = hvac.Client(url=str_url)
client.auth.approle.login(role_id="e96c5fd8-abde-084a-fde7-7450a9348a70", secret_id="5371706b-414a-11d3-f3fd-6cf98871aad1")
try:
data = client.secrets.kv.v2.read_secret_version(mount_point='host', path=hostname, raise_on_deleted_version=True)['data']['data']
print(data)
except Exception as err:
print(err)

View File

@@ -0,0 +1,21 @@
#!/usr/bin/python3
#-*- coding: utf-8 -*-
import hvac
import os
hostname=os.sys.argv[1]
accountid=os.sys.argv[2]
password=os.sys.argv[3]
adminuser=os.sys.argv[4]
adminpass=os.sys.argv[5]
str_url = "http://10.10.43.240:30803"
client = hvac.Client(url=str_url)
client.auth.approle.login(role_id="e96c5fd8-abde-084a-fde7-7450a9348a70", secret_id="5371706b-414a-11d3-f3fd-6cf98871aad1")
client.secrets.kv.v2.create_or_update_secret(
mount_point='host',
path=hostname,
secret=dict(accountid=f'{accountid}',password=f'{password}',adminuser=f'{adminuser}',adminpass=f'{adminpass}')
)

View File

@@ -0,0 +1,16 @@
---
- name: Reload systemd configuration
ansible.builtin.systemd:
daemon_reload: True
- name: Restart teleport service
ansible.builtin.systemd:
name: teleport
enabled: true
state: restarted
- name: restart sshd
service:
name: sshd
state: restarted
enabled: true

View File

@@ -0,0 +1,52 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
#
# Provide a list of supported platforms, and for each platform a list of versions.
# If you don't wish to enumerate all versions for a particular platform, use 'all'.
# To view available platforms and versions (or releases), visit:
# https://galaxy.ansible.com/api/v1/platforms/
#
# platforms:
# - name: Fedora
# versions:
# - all
# - 25
# - name: SomePlatform
# versions:
# - all
# - 1.0
# - 7
# - 99.99
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

View File

@@ -0,0 +1,103 @@
---
- name: "Create datasaker group"
ansible.builtin.group:
name: "datasaker"
state: present
when:
- add_rule == True
- name: Ensure user datasaker exists
user:
name: "{{ item }}"
create_home: yes
home: "/home/{{ item }}"
group: datasaker
shell: /bin/bash
with_items:
- datasaker
when:
- add_rule == True
- name: "Ensure .ssh directory exists for datasaker"
file:
path: /home/datasaker/.ssh
state: directory
owner: datasaker
group: datasaker
mode: '0700'
when:
- add_rule == True
- name: "Add authorized key for datasaker"
authorized_key:
user: datasaker
key: "{{ item }}"
with_items:
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDRP/Kjn7UBudTO4ZLtWXRJNDcOPGbm+5jLKax+1tVgN2n0MCmwwrbFJQJvdaE/wp4+PnMtEyt+IqdwFdUDah8tu9CIYZ2Jk2T18oU7hYGvymh+QJmZgCNvYcmM9ATJbXpns7y8VLDVbkSq9EJIB+emLt1ZV/C8cyvhlmBUwGQA6c3zMgzWl9MT0HLa7H88cNVVknZPY0vGIw+H0Y2JtDr62xyVNT7w8B+jh7Yu6nCnQchwx3IRWGATuKfi2FB3rhkDqNvM1h00JJosu5ooBn3g5xll+w+sVKIQxEWShI9zatYP9/zrce+uVYeZLfz52X8giJ9dns66vqEKdJtdp4By5RPxRSsdQ2QGAQ0UuBHKgweU2EzivLynu49oiShAiJPxmru4TiGtchl52dvw/E9rjZiCKTq697azHHLbwTiOgbHpnu7GrxNRMdXCON70RYJpfERg/SGxxmUNF9OhYUeQJGNc8DcWnlBUrT/9Wi3Ryh1rKx2wtZt6eDkrehJ1lgU="
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDmxGUDo5rdB/XA+cyH4a7Kn8zGWHqbL0AZDL55j5JLRLXC/z482Rp2cIx/FsQRtwEslEVXHHSowpJWHvQ4Z6NcInh0/0psJK2K8qnApLDHhPoiQzpGL+nG4JIho/10QPGpJ2aDcXdushvUME97j0A8hfaoR2xhBl2C9r865Vred0M971A5SRchwN/cmsTh2OMYGXKHD9RC6OFud2sQjyidkSTW58yBoN2B5CoAO4GMV09jX6Wp43jot19xJ5lX65NAHLsNIXMWiURmQDieIKqEiwWlPgwo7geErHlMOoNoypU9yTaN9NMYWZBG1xVL5skjmkdTEd+cnHBLAvhVtW1w5pOA7S8OUXkmiu0UITLYyWfzUx4uwzb7nGcb6aDboRVX6w8H4+GVgpYWJq+fh0ZZ9JbsdP6+PjRz1vgptM7K4Ji5ZRvqV5WMT0cvpySBaJakLSiPSa+dxGi6nfowXvUEAzMIVyaScNgCs1/NpdgN8dwffZlYB9WBUxY+5IjBQc8="
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKDxtkcfx2ITlT2Yh7ZCT79do/25YQ2vROz38m8veAuBhOw+75oZJ4nN//zOWaaMvpC3Z7NIzOR+3UeukhnLZ591q8AaHcKjV8JEJMo2pvpH1vdLcTL9baLqWrxzgRimnZUNf5n5HNr+AKoXuPp//aVSJSoeznb66r04/rJSetT0QGDC8Kj5Q+MNvdd0/3U/nu7JxW9LIEaLoeiX6mVb4PpV7kl3rI3Vut/GnWakOhbS4yNvIFdR6d8rv305/BXJOz/aWy+0j7qK+NBzbSsI/l0vVUHfeD3whYGePCpWmj73ZsMTMjIjrC8DpRQlOJlAZ0GVpQnd/ayIWi4+V8VjvFcd6vSqrhhsNoOyo0Y/6cyO6iyvKqohMK6+HF1w6aXoaGCFFSl/3gw63saNAsdZPArnwf5yZ6GfPa/9bRn2k9g5xfp97Itpo6Iqq+PuRcZOes0EiIQe2hOoYQEIHIRhf8CZ+Xf6W1+XZB+WxEzUe4GCCwgUdTB6RIr4ThDxwCBV0="
when:
- add_rule == True
- name: "sudoers_users file"
file:
path: /etc/sudoers.d/sudoers_users
state: touch
when:
- add_rule == True
- name: "Allow user to sudo"
lineinfile:
path: /etc/sudoers.d/sudoers_users
line: "{{ item }} ALL=(ALL) NOPASSWD:ALL"
state: present
with_items:
- datasaker
when:
- add_rule == True
- name: "selinux permissive"
command: "setenforce 0"
ignore_errors: yes
when:
- ansible_facts.os_family == "RedHat"
- name: "firewalld stop"
systemd:
name: firewalld
state: stopped
enabled: false
ignore_errors: yes
when:
- ansible_facts.os_family == "RedHat"
- name: Remove existing Port lines
lineinfile:
path: /etc/ssh/sshd_config
regexp: '^Port'
state: absent
- name: SSH Listen on Main Port
lineinfile:
dest: /etc/ssh/sshd_config
insertbefore: '^#*AddressFamily'
line: 'Port {{sshmainport}}'
state: present
owner: root
group: root
mode: 0640
notify: restart sshd
- name: "Create sshd_config.d directory"
ansible.builtin.file:
path: "/etc/ssh/sshd_config.d/"
state: directory
recurse: yes
owner: root
group: root
- name: "Setting sshd allow users"
template:
src: allow_users.j2
dest: "/etc/ssh/sshd_config.d/allow_users.conf"
notify: restart sshd

View File

@@ -0,0 +1,36 @@
---
- name: get password
command: "{{ role_path }}/files/gen_password {{ encrypt }}"
register: user_password
delegate_to: 127.0.0.1
when: manual_password is not defined
- name: get admin password
command: "{{ role_path }}/files/gen_password {{ encrypt }}"
register: admin_password
delegate_to: 127.0.0.1
when: manual_password is not defined
- name: set fact user password
block:
- set_fact:
user_password: "{{ user_password.stdout }}"
rescue:
- set_fact:
user_password: "{{ manual_password }}"
always:
- debug:
msg: "{{ username }} : {{ user_password }}"
when: debug_mode == True
- name: set fact admin password
block:
- set_fact:
admin_password: "{{ admin_password.stdout }}"
rescue:
- set_fact:
admin_password: "{{ manual_password }}"
always:
- debug:
msg: "{{ adminuser }} : {{ admin_password }}"
when: debug_mode == True

View File

@@ -0,0 +1,21 @@
---
- include_tasks: 99_decrypt_password.yml
when:
- encrypt == 1
- manual_password is not defined
- name: user password change
user:
name: "{{ item }}"
password: "{{ user_password | password_hash('sha512') }}"
state: present
with_items:
- "{{ username }}"
- name: admin password change
user:
name: "{{ item }}"
password: "{{ admin_password | password_hash('sha512') }}"
state: present
with_items:
- "{{ adminuser }}"

View File

@@ -0,0 +1,21 @@
---
- name: Check if ansible_port is defined
set_fact:
ansible_port: "{{ ansible_port | default(22) }}"
- debug:
msg: "{{ ansible_distribution }} {{ ansible_hostname }} {{ ansible_default_ipv4.address }} {{ ansible_port }} {{ username }} {{ user_password }} {{ adminuser }} {{ admin_password }}"
when: debug_mode == True
- name: put vault
command: "{{ role_path }}/files/vault_put {{ ansible_default_ipv4.address }} {{ username }} {{ user_password }} {{ adminuser }} {{ admin_password }}"
delegate_to: 127.0.0.1
- name: get vault
command: "{{ role_path }}/files/vault_get {{ ansible_default_ipv4.address }} {{ username }} {{ user_password }} {{ adminuser }} {{ admin_password }}"
register: get_vault
delegate_to: 127.0.0.1
- debug:
msg: "{{get_vault.stdout_lines}}"
when: debug_mode == True

View File

@@ -0,0 +1,27 @@
---
- name: user_password decrypt
command: "{{ role_path }}/files/decrypt_password {{ user_password }}"
register: user_password
delegate_to: 127.0.0.1
- name: admin_password decrypt
command: "{{ role_path }}/files/decrypt_password {{ admin_password }}"
register: admin_password
delegate_to: 127.0.0.1
when:
- encrypt == 1
- manual_password is not defined
- name: admin_password re fact
set_fact:
admin_password: "{{ admin_password.stdout }}"
when:
- encrypt == 1
- manual_password is not defined
- name: user_password re fact
set_fact:
user_password: "{{ user_password.stdout }}"
when:
- encrypt == 1
- manual_password is not defined

View File

@@ -0,0 +1,15 @@
---
- include: 00_host_setting.yml
tags: host
- include: 01_get_password.yml
tags: password
- include: 02_change_password.yml
tags: change
- include: 03_vault.yml
tags: vault
#
#- include: 04_excel_export.yml
# tags: excel

View File

@@ -0,0 +1,22 @@
AllowUsers datasaker@10.10.43.*
AllowUsers *@10.20.142.*
{% if ansible_distribution == "Ubuntu" %}
AllowUsers ubuntu@10.10.43.*
{% endif %}
{% if ansible_distribution == "CentOS" %}
AllowUsers centos@10.10.43.*
{% endif %}
{% if ansible_distribution == "RedHat" %}
AllowUsers redhat@10.10.43.*
{% endif %}
{% if admin_users is defined %}
{% for user in admin_users %}
AllowUsers {{ user.name }}@{{ user.ip }}
{% endfor %}
{% endif %}
{% if allow_users is defined %}
{% for user in allow_users %}
AllowUsers {{ user.name }}@{{ user.ip }}
{% endfor %}
{% endif %}

View File

@@ -0,0 +1,2 @@
localhost

View File

@@ -0,0 +1,5 @@
---
- hosts: localhost
remote_user: root
roles:
- password

View File

@@ -0,0 +1,2 @@
---
# vars file for password

View File

@@ -0,0 +1,8 @@
---
debian_retry: 5
debina_minlen: 8
debian_lcredit: -1
debian_ucredit: -1
debian_dcredit: -1
debian_ocredit: -1

View File

@@ -0,0 +1,10 @@
---
- name: Reload systemd configuration
ansible.builtin.systemd:
daemon_reload: True
- name: restart sshd
service:
name: sshd
state: restarted
enabled: true

View File

@@ -0,0 +1,13 @@
---
- name: search non-existent device
shell: find /dev -type f -exec ls -l {} \; | awk '{print $NF}'
register: search_result
- debug:
msg: "발견된 존재하지 않는 디바이스 {{ search_result.stdout_lines }}"
- name: delete non-existent device
file:
path: "{{ item }}"
state: absent
with_items: "{{ search_result.stdout_lines }}"

View File

@@ -0,0 +1,59 @@
---
- name: shadow mode change
file:
path: /etc/shadow
mode: 0400
- name: hosts mode change
file:
path: /etc/hosts
#mode: u=rw,g=r,o=r
mode: 0600
- name: rsyslog mode change
file:
path: /etc/rsyslog.conf
mode: 0640
- name: crontab mode change
file:
path: /usr/bin/crontab
mode: 0750
- name: cron file mode change
file:
path: "{{ item }}"
mode: 0640
with_items:
- /etc/crontab
- /etc/cron.hourly/.placeholder
- /etc/cron.daily/logrotate
- /etc/cron.daily/apt-compat
- /etc/cron.daily/popularity-contest
- /etc/cron.daily/apport
- /etc/cron.daily/.placeholder
- /etc/cron.daily/update-notifier-common
- /etc/cron.daily/bsdmainutils
- /etc/cron.daily/dpkg
- /etc/cron.daily/man-db
- /etc/cron.weekly/.placeholder
- /etc/cron.weekly/man-db
- /etc/cron.weekly/update-notifier-common
- /etc/cron.monthly/.placeholder
- name: cron file owner change
file:
path: /var/spool/cron/atjobs/.SEQ
owner: root
- name: at mode change
file:
path: /usr/bin/at
mode: 640
- name: create at.allow file
file:
path: /etc/at.allow
state: touch
mode: 0640
owner: root

View File

@@ -0,0 +1,11 @@
- name: Configure ssh root login to no
lineinfile:
dest: /etc/ssh/sshd_config
regexp: '^(#)?PermitRootLogin.*'
line: 'PermitRootLogin no'
insertbefore: '^Match.*'
state: present
owner: root
group: root
mode: 0640
notify: restart sshd

View File

@@ -0,0 +1,29 @@
---
- name: Setting EXEM Banner (Debian)
template:
src: banner.j2
dest: /etc/update-motd.d/00-header
mode: 0755
owner: root
group: root
- name: Setting Sysinfo
template:
src: sysinfo.j2
dest: /usr/share/landscape/landscape-sysinfo.wrapper
mode: 0755
owner: root
group: root
- name: Delete ETC file
file:
path: "{{ item }}"
state: absent
with_items:
- /etc/update-motd.d/10-help-text
- /etc/update-motd.d/50-motd-news
- /etc/update-motd.d/85-fwupd
- /etc/update-motd.d/90-updates-available
- /etc/update-motd.d/91-release-upgrade
- /etc/update-motd.d/95-hwe-eol
- /etc/update-motd.d/98-fsck-at-reboot

View File

@@ -0,0 +1,16 @@
---
- name: Setting Password Rule (Debian)
template:
src: common-password.j2
dest: /etc/pam.d/common-password
owner: root
group: root
mode: u=rw,g=r,o=r
- name: Setting Password Auth Rule (Debian)
template:
src: common-auth.j2
dest: /etc/pam.d/common-auth
owner: root
group: root
mode: u=rw,g=r,o=r

View File

@@ -0,0 +1,18 @@
---
# SSH 접속 시 Banner 설정
- include: debian_setting_banner.yml
when: ansible_facts.os_family == 'Debian'
# root 사용자를 사용한 ssh 접속 비활성화
- include: all_setting_root_ssh.yml
# 패스워드 정책 설정
- include: debian_setting_password_rule.yml
when: ansible_facts.os_family == 'Debian'
# 일부 파일 권한 설정
- include: all_setting_mode_change.yml
# /dev 경로의 불필요 디바이스 검색 및 제거
- include: all_setting_device_organize.yml

View File

@@ -0,0 +1,18 @@
#!/bin/sh
echo "-------------------------------------------------------------------------------\n"
echo " _╓g@DDKg╓_ \033[0;31m=╗╗╗╗,\033[0;0m \033[0;34m,╗╗╗╗╤\033[0;0m ,╔╗DDKg╔_ ╓g@DD╗╔_ ╓g@DD╗╔_"
echo " ╓D╝╙\` \`╠╠H \033[0;31m╙╠╠╠╠▒\033[0;0m \033[0;34mÆ╬╬╬╬╩\033[0;0m _j╠╙\` 1╠R j╠R^ \`╙╠▒,j╠R^ \`╙╠▒,"
echo " 1╠^ ,╠╝ \033[0;31m╝╠R\033[0;0m \033[0;34m╓▓╬╬╬╝\033[0;0m j╠H 1╠^ ╠╠ ╚╠H ╚╠H"
echo "j╠⌐ j╠Γ \033[0;31m'\033[0;0m \033[0;34mÆ╬╬╬╬╙\033[0;0m ╠H ╔╠R ╠╠ ╠╠ ╠╠"
echo "╠╠ ╒╠R \033[0;34m╔╣╬╬╬\033[0;33m╬▒\033[0;0m j╠H _D╝\` ╠╠ ╠╠ ╠╠"
echo "'╠H 1╠^ .. \033[0;34m,╣╬╬╬╣\033[0;33m╬╣╣▓┐\033[0;0m ╠D ╔╚╙ ╔_ ╠╠ ╠╠ ╠╠"
echo " '╠▒╓░╙ _╔╔^ \033[0;34m¢╬╬╬╬╩\033[0;33m ╚╣╣╣╣▌\033[0;0m ╚▒╓░╙ ╔░H ╠╠ ╠╠ ╠╠"
echo " ⁿ╚╠K≥╔╔╔1▒╝^ \033[0;34m╒▓╬╬╬╩^\033[0;33m \`╣╣╣╣▓╕\033[0;0m \`╚╠▒g╔╔╔gD╝╙ ╠╠ ╠╠ ╠╠\n"
echo "-------------------------------------------------------------------------------"
echo ""
echo " - 알 림 - "
echo ""
echo " 현재 접속하신 서버는 SaaS기술연구팀 개발 서버 입니다. "
echo " 인가되지 않은 사용자의 접근, 수정 등 행위 시 처벌을 받을 수 있습니다. "
echo ""
echo "-------------------------------------------------------------------------------"

View File

@@ -0,0 +1,29 @@
#
# /etc/pam.d/common-auth - authentication settings common to all services
#
# This file is included from other service-specific PAM config files,
# and should contain a list of the authentication modules that define
# the central authentication scheme for use on the system
# (e.g., /etc/shadow, LDAP, Kerberos, etc.). The default is to use the
# traditional Unix authentication mechanisms.
#
# As of pam 1.0.1-6, this file is managed by pam-auth-update by default.
# To take advantage of this, it is recommended that you configure any
# local modules either before or after the default block, and use
# pam-auth-update to manage selection of other modules. See
# pam-auth-update(8) for details.
# here are the per-package modules (the "Primary" block)
auth [success=1 default=ignore] pam_unix.so nullok
# here's the fallback if no module succeeds
auth requisite pam_deny.so
# prime the stack with a positive return value if there isn't one already;
# this avoids us returning an error just because nothing sets a success code
# since the modules above will each just jump around
auth required pam_permit.so
# and here are more per-package modules (the "Additional" block)
auth optional pam_cap.so
# end of pam-auth-update config
## Add Ansible Playbook - Securtiy_Settings ##
auth required pam_tally2.so onerr=fail even_deny_root deny=5 unlock_time=300

View File

@@ -0,0 +1,37 @@
#
# /etc/pam.d/common-password - password-related modules common to all services
#
# This file is included from other service-specific PAM config files,
# and should contain a list of modules that define the services to be
# used to change user passwords. The default is pam_unix.
# Explanation of pam_unix options:
#
# The "sha512" option enables salted SHA512 passwords. Without this option,
# the default is Unix crypt. Prior releases used the option "md5".
#
# The "obscure" option replaces the old `OBSCURE_CHECKS_ENAB' option in
# login.defs.
#
# See the pam_unix manpage for other options.
# As of pam 1.0.1-6, this file is managed by pam-auth-update by default.
# To take advantage of this, it is recommended that you configure any
# local modules either before or after the default block, and use
# pam-auth-update to manage selection of other modules. See
# pam-auth-update(8) for details.
# here are the per-package modules (the "Primary" block)
password [success=1 default=ignore] pam_unix.so sha512
# here's the fallback if no module succeeds
password requisite pam_deny.so
# prime the stack with a positive return value if there isn't one already;
# this avoids us returning an error just because nothing sets a success code
# since the modules above will each just jump around
password required pam_permit.so
# and here are more per-package modules (the "Additional" block)
# end of pam-auth-update config
password required pam_pwhistory.so remember=5
## Add Ansible Playbook - Securtiy_Settings ##
password requisite pam_pwquality.so retry={{ debian_retry }} minlen={{ debina_minlen }} lcredit={{ debian_lcredit }} ucredit={{ debian_ucredit }} dcredit={{ debian_dcredit }} ocredit={{ debian_ocredit }}

View File

@@ -0,0 +1,19 @@
#!/bin/sh
# pam_motd does not carry the environment
[ -f /etc/default/locale ] && . /etc/default/locale
export LANG
cores=$(grep -c ^processor /proc/cpuinfo 2>/dev/null)
[ "$cores" -eq "0" ] && cores=1
threshold="${cores:-1}.0"
if [ $(echo "`cut -f1 -d ' ' /proc/loadavg` < $threshold" | bc) -eq 1 ]; then
echo
echo -n " System information as of "
/bin/date
echo
/usr/bin/landscape-sysinfo
else
echo
echo " System information disabled due to load higher than $threshold"
fi
echo ""

View File

@@ -0,0 +1,20 @@
---
- hosts: all
become: true
gather_facts: true
vars:
username: datasaker
adminuser: root
#manual_password: saasadmin1234
sshmainport: 2222
iptables_rules:
- { source: "10.10.45.0/24", target: "DROP" }
- { source: "10.10.47.0/24", target: "DROP" }
- { source: "10.10.48.0/24", target: "DROP" }
- { source: "10.10.50.0/24", target: "DROP" }
- { source: "10.10.37.0/24", target: "DROP" }
delete_rule: False
add_rule: True
roles:
- role: password-settings
- role: security_settings

Binary file not shown.

Binary file not shown.

View File

@@ -17,6 +17,9 @@ auth_service:
enabled: "no"
ssh_service:
enabled: "yes"
pam:
enabled: true
service_name: "sshd"
labels:
ipaddr: {{ansible_default_ipv4.address}}
group: {{ group_names[-1] }}

View File

@@ -11,10 +11,10 @@
10.10.43.106 ansible_port=2222 ansible_user=dev2
[saas_mgmt_master]
10.10.43.240 ansible_port=2222 ansible_user=dev2
10.10.43.240 ansible_port=2222 ansible_user=dev2-iac
[saas_mgmt_node]
10.10.43.[241:243] ansible_port=2222 ansible_user=dev2
10.10.43.[241:243] ansible_port=2222 ansible_user=dev2-iac
[dsk_dev_master]
10.10.43.[111:113] ansible_port=2222 ansible_user=dev2

View File

@@ -7,5 +7,5 @@
teleport_uri: teleport.kr.datasaker.io
# remove: True
# custom_labels: 'user=havelight,company=exem'
# update: True
install: True
update: True
# install: True

View File

@@ -0,0 +1,2 @@
[all]
10.10.43.43 ansible_port=2222 ansible_user=dev2

View File

@@ -0,0 +1,5 @@
#!/bin/bash
lambda_function="dsk-agent-ec2-start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json
rm response.json

View File

@@ -0,0 +1,5 @@
#!/bin/bash
lambda_function="dsk-agent-ec2-stop"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json
rm response.json

25
terraform/buckets/.terraform.lock.hcl generated Normal file
View File

@@ -0,0 +1,25 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/aws" {
version = "5.34.0"
constraints = "~> 5.0"
hashes = [
"h1:Tbq6dKE+XyXmkup6+7eQj2vH+eCJipk8R3VXhebVYi4=",
"zh:01bb20ae12b8c66f0cacec4f417a5d6741f018009f3a66077008e67cce127aa4",
"zh:3b0c9bdbbf846beef2c9573fc27898ceb71b69cf9d2f4b1dd2d0c2b539eab114",
"zh:5226ecb9c21c2f6fbf1d662ac82459ffcd4ad058a9ea9c6200750a21a80ca009",
"zh:6021b905d9b3cd3d7892eb04d405c6fa20112718de1d6ef7b9f1db0b0c97721a",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
"zh:9e61b8e0ccf923979cd2dc1f1140dbcb02f92248578e10c1996f560b6306317c",
"zh:ad6bf62cdcf531f2f92f6416822918b7ba2af298e4a0065c6baf44991fda982d",
"zh:b698b041ef38837753bbe5265dddbc70b76e8b8b34c5c10876e6aab0eb5eaf63",
"zh:bb799843c534f6a3f072a99d93a3b53ff97c58a96742be15518adf8127706784",
"zh:cebee0d942c37cd3b21e9050457cceb26d0a6ea886b855dab64bb67d78f863d1",
"zh:e061fdd1cb99e7c81fb4485b41ae000c6792d38f73f9f50aed0d3d5c2ce6dcfb",
"zh:eeb4943f82734946362696928336357cd1d36164907ae5905da0316a67e275e1",
"zh:ef09b6ad475efa9300327a30cbbe4373d817261c8e41e5b7391750b16ef4547d",
"zh:f01aab3881cd90b3f56da7c2a75f83da37fd03cc615fc5600a44056a7e0f9af7",
"zh:fcd0f724ebc4b56a499eb6c0fc602de609af18a0d578befa2f7a8df155c55550",
]
}

View File

@@ -0,0 +1,15 @@
resource "aws_s3_bucket" "bucket" {
for_each = var.buckets
bucket = each.key
}
resource "aws_s3_bucket_versioning" "versioning" {
for_each = var.buckets
bucket = aws_s3_bucket.bucket[each.key].id
versioning_configuration {
status = each.value.versioning
}
}

View File

@@ -0,0 +1,18 @@
resource "aws_s3_bucket_lifecycle_configuration" "lifecycle" {
for_each = {for bucket, value in var.buckets : bucket => value if value.lifecycle.status == "Enabled"}
bucket = aws_s3_bucket.bucket[each.key].id
rule {
id = "expire_objects"
status = each.value.lifecycle.status
noncurrent_version_expiration {
noncurrent_days = each.value.lifecycle.noncurrent_days
}
expiration {
days = each.value.lifecycle.expiration_days
}
}
}

View File

@@ -0,0 +1,3 @@
provider "aws" {
region = var.aws_region
}

View File

@@ -0,0 +1,73 @@
resource "aws_s3_bucket_ownership_controls" "ownership" {
for_each = var.buckets
bucket = aws_s3_bucket.bucket[each.key].id
rule {
object_ownership = each.value.object_ownership
}
}
resource "aws_s3_bucket_public_access_block" "public_access_block" {
for_each = {for bucket, value in var.buckets : bucket => value if value.public_access == true}
bucket = aws_s3_bucket.bucket[each.key].id
block_public_acls = false
block_public_policy = false
ignore_public_acls = false
restrict_public_buckets = false
}
resource "aws_s3_bucket_public_access_block" "private_access_block" {
for_each = {for bucket, value in var.buckets : bucket => value if value.public_access == false}
bucket = aws_s3_bucket.bucket[each.key].id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
restrict_public_buckets = true
}
resource "aws_s3_bucket_acl" "public_acl" {
for_each = {for bucket, value in var.buckets : bucket => value if value.public_access == true}
depends_on = [
aws_s3_bucket_ownership_controls.ownership,
aws_s3_bucket_public_access_block.public_access_block
]
bucket = aws_s3_bucket.bucket[each.key].id
acl = "public-read"
}
resource "aws_s3_bucket_acl" "private_acl" {
for_each = {for bucket, value in var.buckets : bucket => value if value.public_access == false}
depends_on = [
aws_s3_bucket_ownership_controls.ownership,
aws_s3_bucket_public_access_block.private_access_block
]
bucket = aws_s3_bucket.bucket[each.key].id
acl = "private"
}
resource "aws_s3_bucket_policy" "policy" {
for_each = {for bucket, value in var.buckets : bucket => value if value.public_access == true}
bucket = aws_s3_bucket.bucket[each.key].id
policy = jsonencode({
Version = "2012-10-17",
Statement = [
{
Action = ["s3:GetObject"],
Effect = "Allow",
Resource = ["${aws_s3_bucket.bucket[each.key].arn}/*"],
Principal = "*"
}
]
})
}

View File

@@ -0,0 +1,46 @@
variable "aws_region" {
default = "ap-northeast-2"
}
variable "buckets" {
type = map(object({
object_ownership = string
public_access = bool
versioning = string
lifecycle = object({
status = string
noncurrent_days = optional(number)
expiration_days = optional(number)
})
}))
default = {
dsk-alert-images = {
object_ownership = "BucketOwnerEnforced"
public_access = false
versioning = "Enabled"
lifecycle = {
status = "Disabled"
noncurrent_days = 1
expiration_days = 7
}
}
dsk-airflow = {
object_ownership = "BucketOwnerEnforced"
public_access = false
versioning = "Enabled"
lifecycle = {
status = "Enabled"
noncurrent_days = 1
expiration_days = 7
}
}
dsk-metering = {
object_ownership = "BucketOwnerEnforced"
public_access = false
versioning = "Disabled"
lifecycle = {
status = "Disabled"
}
}
}
}

View File

@@ -0,0 +1,8 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
}
}

25
terraform/cloudfront/.terraform.lock.hcl generated Normal file
View File

@@ -0,0 +1,25 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/aws" {
version = "5.35.0"
constraints = "~> 5.0"
hashes = [
"h1:fggCACmhwwn6NOo3D6xY6WDyZfBSbMIb47X/MOC+zqE=",
"zh:3a2a6f40db82d30ea8c5e3e251ca5e16b08e520570336e7e342be823df67e945",
"zh:420a23b69b412438a15b8b2e2c9aac2cf2e4976f990f117e4bf8f630692d3949",
"zh:4d8b887f6a71b38cff77ad14af9279528433e279eed702d96b81ea48e16e779c",
"zh:4edd41f8e1c7d29931608a7b01a7ae3d89d6f95ef5502cf8200f228a27917c40",
"zh:6337544e2ded5cf37b55a70aa6ce81c07fd444a2644ff3c5aad1d34680051bdc",
"zh:668faa3faaf2e0758bf319ea40d2304340f4a2dc2cd24460ddfa6ab66f71b802",
"zh:79ddc6d7c90e59fdf4a51e6ea822ba9495b1873d6a9d70daf2eeaf6fc4eb6ff3",
"zh:885822027faf1aa57787f980ead7c26e7d0e55b4040d926b65709b764f804513",
"zh:8c50a8f397b871388ff2e048f5eb280af107faa2e8926694f1ffd9f32a7a7cdf",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
"zh:a2f5d2553df5573a060641f18ee7585587047c25ba73fd80617f59b5893d22b4",
"zh:c43833ae2a152213ee92eb5be7653f9493779eddbe0ce403ea49b5f1d87fd766",
"zh:dab01527a3a55b4f0f958af6f46313d775e27f9ad9d10bedbbfea4a35a06dc5f",
"zh:ed49c65620ec42718d681a7fc00c166c295ff2795db6cede2c690b83f9fb3e65",
"zh:f0a358c0ae1087c466d0fbcc3b4da886f33f881a145c3836ec43149878b86a1a",
]
}

View File

@@ -0,0 +1,32 @@
data "aws_s3_bucket" "bucket" {
for_each = toset(var.buckets)
bucket = each.value
}
resource "aws_s3_bucket_policy" "policy" {
for_each = toset(var.buckets)
bucket = data.aws_s3_bucket.bucket[each.value].id
policy = jsonencode({
"Version" = "2008-10-17",
"Id": "PolicyForCloudFrontPrivateContent",
"Statement" = [
{
"Sid" = "AllowCloudFrontServicePrincipal"
"Effect" = "Allow",
"Principal" = {
"Service" = "cloudfront.amazonaws.com"
},
"Action" = "s3:GetObject",
"Resource" = "${data.aws_s3_bucket.bucket[each.value].arn}/*",
"Condition" = {
"StringEquals" = {
"AWS:SourceArn": "arn:aws:cloudfront::508259851457:distribution/${aws_cloudfront_distribution.distribution[each.value].id}"
}
}
}
]
})
}

View File

@@ -0,0 +1,39 @@
resource "aws_cloudfront_distribution" "distribution" {
for_each = toset(var.buckets)
origin {
domain_name = data.aws_s3_bucket.bucket[each.value].bucket_regional_domain_name
origin_id = data.aws_s3_bucket.bucket[each.value].bucket_regional_domain_name
origin_access_control_id = aws_cloudfront_origin_access_control.origin_access[each.value].id
origin_shield {
enabled = true
origin_shield_region = "ap-northeast-2"
}
}
enabled = true
is_ipv6_enabled = true
comment = "Alert Images CDN - S3 Bucket: dsk-alert-images"
price_class = "PriceClass_200"
restrictions {
geo_restriction {
restriction_type = "whitelist"
locations = ["KR"]
}
}
default_cache_behavior {
cache_policy_id = "658327ea-f89d-4fab-a63d-7e88639e58f6"
allowed_methods = ["GET", "HEAD"]
cached_methods = ["GET", "HEAD"]
target_origin_id = data.aws_s3_bucket.bucket[each.value].bucket_regional_domain_name
compress = true
viewer_protocol_policy = "redirect-to-https"
}
viewer_certificate {
cloudfront_default_certificate = true
}
}

View File

@@ -0,0 +1,3 @@
provider "aws" {
region = var.aws_region
}

View File

@@ -0,0 +1,8 @@
resource "aws_cloudfront_origin_access_control" "origin_access" {
for_each = toset(var.buckets)
name = data.aws_s3_bucket.bucket[each.value].bucket_regional_domain_name
origin_access_control_origin_type = "s3"
signing_behavior = "always"
signing_protocol = "sigv4"
}

View File

@@ -0,0 +1,8 @@
variable "aws_region" {
default = "ap-northeast-2"
}
variable "buckets" {
type = list(string)
default = ["dsk-alert-images"]
}

View File

@@ -0,0 +1,8 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
}
}

View File

@@ -0,0 +1,25 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/aws" {
version = "5.33.0"
constraints = "~> 5.0"
hashes = [
"h1:rAmKVvvzUqVocFppyheelWGnyfCcIGxLV31iFBY2sz4=",
"zh:10bb683f2a9306e881f51a971ad3b2bb654ac94b54945dd63769876a343b5b04",
"zh:3916406db958d5487ea0c2d2320012d1907c29e6d01bf693560fe05e38ee0601",
"zh:3cb54b76b2f9e30620f3281ab7fb20633b1e4584fc84cc4ecd5752546252e86f",
"zh:513bcfd6971482215c5d64725189f875cbcbd260c6d11f0da4d66321efd93a92",
"zh:545a34427ebe7a950056627e7c980c9ba16318bf086d300eb808ffc41c52b7a8",
"zh:5a44b90faf1c8e8269f389c04bfac25ad4766d26360e7f7ac371be12a442981c",
"zh:64e1ef83162f78538dccad8b035577738851395ba774d6919cb21eb465a21e3a",
"zh:7315c70cb6b7f975471ea6129474639a08c58c071afc95a36cfaa41a13ae7fb9",
"zh:9806faae58938d638b757f54414400be998dddb45edfd4a29c85e827111dc93d",
"zh:997fa2e2db242354d9f772fba7eb17bd6d18d28480291dd93f85a18ca0a67ac2",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
"zh:9f9e076b7e9752971f39eead6eda69df1c5e890c82ba2ca95f56974af7adfe79",
"zh:b1d6af047f96de7f97d38b685654f1aed4356d5060b0e696d87d0270f5d49f75",
"zh:bfb0654b6f34398aeffdf907b744af06733d168db610a2c5747263380f817ac7",
"zh:e25203ee8cedccf60bf450950d533d3c172509bda8af97dbc3bc817d2a503c57",
]
}

View File

@@ -0,0 +1,18 @@
resource "aws_instance" "dsk-agent-arm-host" {
ami = var.AMI_ID
instance_type = var.Instance_Type
key_name = var.Key_Pair
vpc_security_group_ids = [aws_security_group.dsk-agent-allow-security.id]
availability_zone = var.Aavailability_Zone
subnet_id = var.Public_Subnet_2C
root_block_device {
delete_on_termination = true
volume_size = 30
}
tags = {
Name = "dsk-agent-arm-host"
}
}

View File

@@ -0,0 +1,4 @@
# Configure the AWS Provider
provider "aws" {
region = var.REGION
}

View File

@@ -0,0 +1,41 @@
resource "aws_security_group" "dsk-agent-allow-security" {
name = "dsk-agent-allow-security"
description = "Allow inbound traffic"
vpc_id = var.VPC_ID
ingress {
description = "Allow SSH traffic"
from_port = 2222
to_port = 2222
protocol = "tcp"
cidr_blocks = ["39.115.183.236/32"]
}
ingress {
description = "Allow HTTPS traffic"
from_port = 443
to_port = 443
protocol = "tcp"
cidr_blocks = ["39.115.183.236/32"]
}
ingress {
description = "Allow HTTP traffic"
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["39.115.183.236/32"]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
ipv6_cidr_blocks = ["::/0"]
}
tags = {
Name = "dsk-agent-allow-security"
}
}

Some files were not shown because too many files have changed in this diff Show More