Clean Code

This commit is contained in:
dsk-minchulahn
2023-12-19 13:03:29 +09:00
parent 947561ce1d
commit 0273450ff6
4237 changed files with 0 additions and 7447 deletions

View File

@@ -0,0 +1,41 @@
#!/bin/bash
export KOPS_STATE_STORE=s3://clusters.spot.datasaker.io
export KOPS_CLUSTER_NAME=k8s-spot.datasaker.io
export VPC_ID="vpc-00ba2b0e9ad59f0ed "
export Network_CIDR="172.24.0.0/19"
export AMI_Image="ami-0abb33b73a78cae31"
export Private_Subnet_ID_1="subnet-024f0deda82039fa4"
export Private_Subnet_ID_2="subnet-050d942fa1c46540a"
export Private_Subnet_ID_3="subnet-0946eb806af7377be"
export Public_Subnet_ID_1="subnet-00c363356f133411d"
export Public_Subnet_ID_2="subnet-07aa5e879a262014d"
export Public_Subnet_ID_3="subnet-0073a61bc56a68a3e"
kops_cmd="""
kops create cluster \
--vpc "$VPC_ID" \
--cloud aws \
--ssh-public-key "$HOME/.ssh/id_rsa.pub" \
--topology private --kubernetes-version "1.25.2" \
--network-cidr "$Network_CIDR" \
--networking calico \
--container-runtime containerd \
--image $AMI_Image \
--zones ap-northeast-2a,ap-northeast-2b,ap-northeast-2c \
--master-count 3 \
--master-size t3.small \
--node-count 3 \
--node-size t3.small \
--node-volume-size 100 \
--subnets "$Private_Subnet_ID_1,$Private_Subnet_ID_2,$Private_Subnet_ID_3" \
--utility-subnets "$Public_Subnet_ID_1,$Public_Subnet_ID_2,$Public_Subnet_ID_3" \
-v 10
"""
kubeconfig="kops export kubecfg --admin --kubeconfig $HOME/.kube/config --name=${KOPS_CLUSTER_NAME} --state=${KOPS_STATE_STORE}"
echo ${kubeconfig}
echo
echo
echo ${kops_cmd}

View File

@@ -0,0 +1,8 @@
#!/bin/bash
aws lambda invoke --function-name autoscailing_start --cli-binary-format raw-in-base64-out --payload '{"key": "value"}' out
sed -i'' -e 's/"//g' out
sleep 100
aws lambda invoke --function-name add_tg_to_asg --cli-binary-format raw-in-base64-out --payload '{"key": "value"}' out
sed -i'' -e 's/"//g' out

View File

@@ -0,0 +1,8 @@
#!/bin/bash
aws lambda invoke --function-name remove_to_to_asg --cli-binary-format raw-in-base64-out --payload '{"key": "value"}' out
sed -i'' -e 's/"//g' out
sleep 10
aws lambda invoke --function-name autoscailing_stop --cli-binary-format raw-in-base64-out --payload '{"key": "value"}' out
sed -i'' -e 's/"//g' out

View File

@@ -0,0 +1,18 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDaqqy9YVwxh37xOU0nytBPd6GEJe30e1L/K5UXDZToteNebyfQrtFogxB6MpMNaAzAk6qbyPuZA3rgP8Y+qXgRlx88cxje5P5yOCsMW2o3xD5PiJ7lluWQ9tlS5ti4B9EWurJOsGF27XKKuSHN+dx9ZIb4sDqLYzmycPNwFaEtH6GQ2vjqpPMfjmKAuYmKD4L7mdA8lXTiRS2uYDkUxwQ+6PU+axTauD9qsXuGDAnGkVHKNE0o9OCf1uoyOhy6EB2sDz5Pymr7fbRJauWNxuSJdYPKY33GdDKpioP/1nRLSLtr1nvLHVrG/5CSNO1x20WYXFEGoMTzW4T5nYSS61apHkQ/0Csv0LBeHPc9gsMPobNJpIYlvGwdODQ+fpgxyB4SAQJKtQR1YB4w5OVtXVZAMvZZKI9gQQHZ8wQ4Zk0erGxKeyLxnDrKKNHLRPyUrjkL7H2a0i8BGpdk8sxW9NVrJJGgmQQiPbJx0yvIi1n55mUq+ZVjiF5qPvxtc5D133k= baekseungchan@DESKTOP-A4G95PG
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/JSUro9KEmFrLLBbehi6oGNFKSJnPZRg699tg4yqvTP+G0YL1NPw44hOk+n7CSf69BogQ2Hahj9d2olNw1T8KVJWv76fiUv58K8h/FzB+t/yMOz6MS/KMf5qPIvk2vSidTkxYPV8AgGY+A1zF37IP5qa7WviOqH+IhOE/bDYi/k+dQ1oTENxrKF2AoDYwMtyXgJnV/EXCQW6G4pr4Y3pMX9AhLf94Qjw+MxBO0835/W85VEVfqwYktWa9CuGdzMO8vIpF33837KAHzMEc0/u4iOOkytaRf1ouESu/c66MbFt8XiiVUJYF24PvmngocScfkTKalAsK3n2cCEPWHgEwdaxYbWaaOmQHcDhgO0TkHii81wdI/YGT3elqbNGc8vgd0Ohim0ySJepWHlsAb8XuyhPl8GJ6vC+Uc94BVxqf/3dBHllayQtWHnXzuMbMDlSYJ0Y8I+pkREHD3MPhZPKOXftGnBE/YqbQUAkZijDpBzD2gX1L+9bd4Wq85KEdCjU= bypark@DESKTOP-T73PETI
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC9Go9pLADJUQtq+ptTAcSIpi+VYv5/Kik0lBuV8xEc++vNtix5kwi+XSsNShHM3MVeiE8J27rYfyNn79r5pVKMuasMRyP3mTDZtRKr7/piM8MXuGSu1jCsVrTBZX0Sf4wuOA1tSkG9QgjBMZfvE9jOSYozA1K85mVE28m2rTihPnL5zYsDKnx+xIcwUBTpkOCoHiAfAX9b5ADAfScJigSZDjFLvexJ1aapPV2Iajh8huIhWvCUhrqUv/ldUm+b1iiOT7GXdrM/cam3FnLZ0b5KI9CQb7084+4l0BlmtPkuFcIlTDm1K6YO7+Mewd+F9uQZwvxGuElBPg8NVgFLD7+nrf2VlJYYCAeChyDV5+ZD70pSTcvHpJbmLKMtRFGov73ZPJ3vld9XCGUCajaoZz5Kz+ANmSC9nl3FpxnYgvFWfS7iwyC+VkGRKUg96/crXz4D8fW/wIskt+3cVrW9Z66psH41ll979mC8xly0ITWwbQZv7rvbdWSDVKVRgbXQOSc= kdalm@DESKTOP-L4TDAAL
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCR0UgFI8H016N4QrV24U2tuSIXxrD6J4bFrodC8mo8ScyEjlC16RnOvKUL5ljosxI8r8H2D8GHfw/omcCY5ScbSHUY41tmWORyUmAqTXT8B7W6L0IfAtZzRCEqnZ4+EQiZYkWgqn5Vb9C8NzR73LbAfAMAJj+gfcx++BuVcXHubgNe0bEZpeTiZR7kFOwqtZiE8mq9QBaFHlu8ectGi27Omr9pGjgi+c9ZhTtoEr4zX/pGpuHd22AXkWz//jBegzaUmNycCiIFfpG6NprT9yPe3BZZwuYeghrrv9dmhzBVVjw0jg9CKTm3LvddsQN3fZpL5aC5TJj7rB5tBNqiI5Y93fftKZkR8Df006ecXm7VAEZUmltNkCuoPodqt5BFSNRGysft7jp43KlTc0X3QSPEJUhFaDMw7MdT1TmJxIZ/kxW9PyS2k8QqCz1q3tU7GGu2WkEeY4/Dqp19WaA6brvYJPmiqMqt7Lnfy9pGmqILQxFa+hQQgermlFl0/8974ch64MESvoeQcM2XR+asB4978U1KniVxPwOIHZlMFDayaiq0GVLoK4N+/NWgLYFMiPnAB2vHws57A7R6Ba68hkUWBJWZOSl+P4zlKW/EJjX+ym4B4T8kndKm+I8iw7CDVnPND6Q9ANLkhpsBruF2OUp7rxzY3rEc52OsUo9/ZGje8w== gurwns1540@ex-em.com
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDv9Bk/20f0xHLQN1Mnub0VwsbRw7ggubeUZ+pUVaX9BD7uUud/ITktmTArbabLJLGgWx64la6+6VuQHauzX/cpMp4dVxoaySQDGPsB+V0WnXaq0pWop5BoJaPO75lpk/Kp7NFtn9x3315Rqmis1Df1UrQehMkqunnr2jWkil6iueAckztpsnqxlb8S+uVYiM7C4HsVx8XdOT3WtfUv+hzDlejy11nzi5T4HMT70O107N4g5CrEapluc7M3NfxCFhz5Gxu8P0dfJKLs9fFT4E8DRfGly5/cDcKbiJHSAZYRN6UwKr3z7LAw8aIW8JWflXn1fMZ92qdiT04kN8ZdVzyMpUiWMXJQPrfI2EHT/OHAympzKrXnT98oIqJANE4Eq72OG9Hrb6Tauk8Bde5/v3P9d7m5Zi9tx+01PZ1JQR+1dkJeV3Am6mjKWrxIowKPol2chnARoU7y1rEZGGi+09bD5hUq7KW6z61DUIlCMYF0Oq0IMs/voQP8zqpDmvSPNJc= hsgahm@ws-ubuntu
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCyfTPnCyr0Typ7yGTcy0LEGa8IH8yESEXa4Qyr85dWrxazTnWO7iYS0Ze6L0GMMO5qZXg/ntJGhI4PYF/WbCZ5KZMRXePyQIVs5pKMvSX4yH2gPIET5c6yTg4ZSIqrZDLBXGEZxMVp/SnNx1tRzxi0plBDtguSy6LZD0C1ue+VeT4oO98EB2T01GOeQp+RlF/theZuEWSWOVfFD0qVdsHIwVlYYlEZR11IrTamabMOVzyw+/8cokA4hgsrrkSrpKQ2YW0evHK1pxZrw+i3YJuHh3hJ0h98Ymw3rpHGec59gXaYT0PQEQvZs9RCrYw8NpCTQrImXR1UVjeeY3KGgpYQXna+WAmkjA+K/JvLmHGeombVJyd3v8330FX+Ob9klgqTWFvwb8Ew4QCcfl5hDAWxvzoJKAoG/TAZd13aNYaZAVkeWB7vPFWZ0brea6sqUJzXqzPwUXa0OirnqEfxMLZoo4tFyfxuVYVK+ScxayBPYJQkhwmTAZ4bj0OfQEw/jJM= hsgahm@ws-ubuntu
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDs1L1/dSx+aSxEiIuq9k4yzP31PX2CIK2RHDfVE0tqy2wvHZ1DMy9Wk/GfCcOICw746G+JexgJyb6AXYcUQrhxqzKE4dT6e8mtJaSvZJMVIq1gkkxooCs/q65vP/QhrdV8+bhzv9lS/rRiARzpiYE6ZrzteKnRQN7hxEgp398jqcyLLYj62f6wJv4yVOKfjiyCkkB/x/ooVrXGAGTooM/YN6o5dljDYq6xRdVi2GQOhQ6IrdmcIHP+JZGkTzq/IzCEF7cPlauwXl7buICYzatNOLf9AgGnmKRHB8c3pTB6WfBfKSF1BXZlIxbUsrz/i6oWXnDdC2l9Ir/o0Uj5jI6B jaejoonjung@ex-em.com
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUAppqxDLltrMsYMwIxGi0FA5STA/R+H6oy7myfiJP2Lt4woCogMi3ELVKEhFkeJx4i8y9G80lynEYCHRH1kAQ/7YaJEVFrPXTvBw+OVxYdVS/gLl0rL89ky+n0dv6A9mancrvUOMacI5aN7/W+EhoLohRjRbWlsPGNnvAmO0AZnt595aMUjFkdhusGyBVunDUFSitj9TFkjxDhr6cx8Bi0FLpvdsoAvfqiw/MVKW2pMgj56AT5UCT0wvtSHSNY/C731jP/RKrxP0fnVhIkVys/XmLV/6SVEqL1XwqMTvRfi5+Q8cPsXrnPuUFHiNN4e/MGJkYi0lg7XbX8jDXv3ybdxZ7lGiUDebxjTKBCCghFae3eAwpJADEDfrzb8DHJZFwJVVdKGXvStTWTibcs14ilRPcB4SWIBx/cFCzwOBK/iw8CfEfsbVe6WQbDc4T4LrgL8cUzHPOO8CQcC4DV/O3BuoqQExu6xTmU8rhLT9kgatIdX0K5jgGbuqz7c2lelU= root@JHJUNG
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIETt3DbkOgMY40sI0+dOxa3A/6o4yxNpoUWuuaSuUR/P joonsoopark@ex-em.com
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUAppqxDLltrMsYMwIxGi0FA5STA/R+H6oy7myfiJP2Lt4woCogMi3ELVKEhFkeJx4i8y9G80lynEYCHRH1kAQ/7YaJEVFrPXTvBw+OVxYdVS/gLl0rL89ky+n0dv6A9mancrvUOMacI5aN7/W+EhoLohRjRbWlsPGNnvAmO0AZnt595aMUjFkdhusGyBVunDUFSitj9TFkjxDhr6cx8Bi0FLpvdsoAvfqiw/MVKW2pMgj56AT5UCT0wvtSHSNY/C731jP/RKrxP0fnVhIkVys/XmLV/6SVEqL1XwqMTvRfi5+Q8cPsXrnPuUFHiNN4e/MGJkYi0lg7XbX8jDXv3ybdxZ7lGiUDebxjTKBCCghFae3eAwpJADEDfrzb8DHJZFwJVVdKGXvStTWTibcs14ilRPcB4SWIBx/cFCzwOBK/iw8CfEfsbVe6WQbDc4T4LrgL8cUzHPOO8CQcC4DV/O3BuoqQExu6xTmU8rhLT9kgatIdX0K5jgGbuqz7c2lelU= kp-jay-bastion-datasaker
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKDxtkcfx2ITlT2Yh7ZCT79do/25YQ2vROz38m8veAuBhOw+75oZJ4nN//zOWaaMvpC3Z7NIzOR+3UeukhnLZ591q8AaHcKjV8JEJMo2pvpH1vdLcTL9baLqWrxzgRimnZUNf5n5HNr+AKoXuPp//aVSJSoeznb66r04/rJSetT0QGDC8Kj5Q+MNvdd0/3U/nu7JxW9LIEaLoeiX6mVb4PpV7kl3rI3Vut/GnWakOhbS4yNvIFdR6d8rv305/BXJOz/aWy+0j7qK+NBzbSsI/l0vVUHfeD3whYGePCpWmj73ZsMTMjIjrC8DpRQlOJlAZ0GVpQnd/ayIWi4+V8VjvFcd6vSqrhhsNoOyo0Y/6cyO6iyvKqohMK6+HF1w6aXoaGCFFSl/3gw63saNAsdZPArnwf5yZ6GfPa/9bRn2k9g5xfp97Itpo6Iqq+PuRcZOes0EiIQe2hOoYQEIHIRhf8CZ+Xf6W1+XZB+WxEzUe4GCCwgUdTB6RIr4ThDxwCBV0= exem@DESKTOP-0V46131
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDAlYSGJZpOOEPZqIa1/CXxiaUNj1wsgkp0kEyD2SX8r7ovwmSAWCS24v/IOSgsUTFRpL64vIeCtcZ8sj4Hwzd3F2h+carQP0v+leCkzPpQ7aP/BoPS27+fSCzaOZv/QJ+eIcXWHIbWkXf6MYQ35PykDeJIO61OMOlWhpNV425VSwfZoB72xZmEH+rIZjXHHs8vYtIG2sXZE22BLiVw6PEL/C4QB2khBT5ZAjX2xGEzUoSknzva/8Uu20adQBalFTIdyLV7V6CxkIPkSgfmZh/fqXfbfPsxHLPK2o2ueGbx3fcN3kAqFrqpJgjEIZmNj6qhVPtbN5TSUyIjtoPhC4JR0heqckz1qLah+8lSiUfHSblGW89QuUcedHdwHp/RiZW6HQO0cqS/QPNcgPLTiv68voBapS9rav+j0tt1RynNY+AdhCOoo4BbGW0pXqi0vaHzbbfbzxp78kx/7/KXmUHkzGSkmlXVbKqzDm5k/kRn0q4pimDun42b+MjNYu3gZz0= ose@DESKTOP-PAE6T6K
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCd3buo7h81HCkNEdxJECUPenzQzKOmCyg1qyR6iy8KTrD/DYqwBKKVe0VDah8MwUpfI6c9OGIv09I0mGEm3PJYrVvADb2Ka37PFjubdLlKuW3obRSSwl4KFsM1RmTwRCYbVRFGRYymUFUtlZevGV/i+NoV/1/qCzgVibE43MVPBA3ttGhmgF3LuYm4VaPNwe/vpiPAJ+wgeb68y7UjeKPtitUrFRmOKyxteYm/pYIc8rlN7FkNdsX/xRgbLgHc23PGaeYeBKNa/v5+KnUrkAdNCZp5WxxHFv9okAMhmdvCShEuRA+uE1W1Ms7GWFssIM5l5XOv2hW/1BO9barR6jHt packer_634e1ea5-2078-7cb6-8a2a-3f3a9c3d6194
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC45maYW137cwvdS8AE9UzNHq9AMYrkEJtoNDAOVkUXtpVQITqvBCc4B4FfR5JK2h+imnBDng5fu728YAB7q31BE3Wub8I+QWhnQgv+kH1yMWj2s329tkHvcyNWIHSBqw4z1N74Zba+7mojKioju27HdcRcN1L7tpXSCHrq5bU6++CMShpZ7a3wo20RfikFWd563Y15mE3uDqlbkcuzE0KGSNrdY6Gy9aiE3/poVQRLaCmXnUKNw9wM3UGN9DanJi6iosXrlZRkpwhV+tHh2x+BWCbyY8jj94RDJgMwoKw71tzlEp+B1k6a7g+lEo3KFP//3PQxc9fdKBdg1YzSAKGKjsqATEVclmQHVskk6wZQC/wcjFxrSOreSp6knswX9AhIvGhMtoVo9iMy9cm+F4AauzjjfszCMO484983hIYwsh321VB14Wg7NroCYMUh7krATeKmNWhK0YicYCXINVMphBAcXFhuJduPejz19ZN356t+F/LDqlCxW7kO9QfYUy0= HP@DESKTOP-CNOVAON
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDi8funYVM0eRmfplW5EdnfJOFEVDEMMw08VRn6FD9x9VuNWCEkY3iErzekBY2SRat8g6q0VXRyu7b/bhm/kD+BtI79fmz9FKxslTZCeKKN1KWfoZoXSRnvjOX1Y6NDnY2X5M+3kN40ek9ku6abN6lOtInTXJ1QOJIISa8l6vrB/j1xVVZghTYY5MBMc89cRZESGdBZWld0CtmoM+mnjh5vWCCA3VJTcDbj5LKtWllA6t58KwtGBikr8iaOpi83dQ91eXWzxTttl/LCe9bfgSxYlmvZILn0UZMu1WiWBhlIBzC6RlxorkDVRXcSRjguEt+/ys2rv6UTSkm150O4PgjgxlZPmTJt1m5y/St57LELUVbV6XGSq6+eZNTZOYBxxRkKcV0uByCBjxjsVlMmoEZoxedhSVT1Z8/AiMnjPBjXx2ease04EvtZs6rpDRd0puzcx1TKoCkyak60ymxc91X9lQg3kUl0av/G5kMKJQqW6v31GA1Vnh4K9haCVF/Ki/M= seungjinjeong@DESKTOP-Q7KGU2F
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCugeU9csUdZ2HtN0gcwmdcSN1hcXL/ot590e829MI7W1KrOqaJs0XVKCvIKwYIB5MX+fyNLDI+M0JME/o0F1bjY0qiTHw35YEUY50h6dG8FOC8TwTrx/bRz95P1r+LWsv0x0/6O1r7VBKsr2YBB+uyg+0llY5NMBUEjifX/4nXM6FmqZZUuhxItAvUVBoXih3FyXUVEnYFM4pxrunac3srsJiuTCEdbKFVcAPVJDZa05D4/YNmG3k1ar1M7nE05Q10/I36f/23afGKfpkaNRofb9/SOvkMjCdsvvxwUE9XC9nMxs0Mn8tUlRgnx86ShUcGOe8ByCC7HM7YLgLGPcHxg0FqllBEQ6GVZl7Y0QZ3+g1ulX4XFY+XVdN0RFEXUT+nUO92BdDCSHlmwttnWjbFn7x+x6EuKbgdLKETMz3BMa5/ZMmdXzKh/WvcS6W2XZOmv9rgAuns+X2BRsw35rURcm77kUZUR0zlgEXXHmj+YfU9Wro4LXX4lhtiENOwKM0= hsw@DESKTOP-SMEHJL8
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCwP5OGkrGPimjOieEyz52kVnoNGN1o0v3z69GOTgjTqi0eapPsdKlbfVCnNNcLDdxK1hmurbopGLyymA5GDm96kP3c/Z3jJciaQjqrAyU4Wil/bgtIqNVDazcgKrUFO1NBtuWe0gj+rVHbpDaahpX8kcr3EUStwnD85+q7wzNixbZUreBwy9NoP8ZODxLTcrCFqRbVvMlZQyL1nKbPRWm9/LrD5SI/xd3XUenRatLalSxePKvZsPZcsO7yR71qByUQaDy7xs3HYHv4jy1K+Tbbo7hBFEsxEkHQ8CiV1sulx4fCplnD0ciGf0uS4+XyFXTHAuZL2nHqUyPkG44cQcv2hj3hX6+geb+oZtEKVNsjQeY7X5fbKsIxMfbLOwnaFKeWFxu4K0zJv10s1Wa5Qm33Saqvih5f3K+lmuXVNzYHKxpGx62JPN5N2eMGgy1x8l2MbrJHhchP/tORPMXkTPEgIADjQFdqX0JzAPrSOkTta3o+moyMjgMoqJMvKtzV0Z0= sujung@DESKTOP-HGLVNTC
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDtmuAhVVyVJ87+t2xVtiS3bvTxxn0dmD7t4D2iSvSErIjRsXRCPLdc/yGWiezj+oVZtRPLJ2mjKToGUerdkcW8oqiQeL0+x/CjdlS2rQXvQa2HXCjB+MejwZyJ2bl7VDtIMdLianJBn7+XVc48+bIf7yait8yVH1aVWcS/AXOSo9LwX/uNW5VCL5BeXSGwXdwkuhjeJurR4WIVSBXuh1ql5Vy6BdSxcmLMihNlIL/DyuzfPLuQZbuSeaJ7eJKiHu63/SwBA1cPzj9tgI7zNvguapIHKXvoK8n5gNUXVRDGnD4J6xbzUQB3DbU8kaz7pDClxzgpkf3MnvP9QvnTyqV+aftYlb02as0PrwIxlTlW/sBxyEGdFe+JwoTctHkrSfp0lYRpyCv3eXJcdDu2l3dTJXAHlpcJuQRH2j9herURxML0w6re1iKJ8MAjOqUvh+B3A1U3x116zEGdsCNCRcfwehEir7fmGKaPvrmOiDOTlNswdL/OJ1RHKFuEZJPlUr8= Kim-Dong-Woo-exem

View File

@@ -0,0 +1,11 @@
#!/bin/sh
rm ./authorized_keys
for FILE in `ls pub_keys/*.pub`
do
echo "$FILE"
cat "$FILE" >> authorized_keys
done
# scp authorized_keys ubuntu@bastion.dev.kr.datasaker.io:/home/ubuntu/.ssh/

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDaqqy9YVwxh37xOU0nytBPd6GEJe30e1L/K5UXDZToteNebyfQrtFogxB6MpMNaAzAk6qbyPuZA3rgP8Y+qXgRlx88cxje5P5yOCsMW2o3xD5PiJ7lluWQ9tlS5ti4B9EWurJOsGF27XKKuSHN+dx9ZIb4sDqLYzmycPNwFaEtH6GQ2vjqpPMfjmKAuYmKD4L7mdA8lXTiRS2uYDkUxwQ+6PU+axTauD9qsXuGDAnGkVHKNE0o9OCf1uoyOhy6EB2sDz5Pymr7fbRJauWNxuSJdYPKY33GdDKpioP/1nRLSLtr1nvLHVrG/5CSNO1x20WYXFEGoMTzW4T5nYSS61apHkQ/0Csv0LBeHPc9gsMPobNJpIYlvGwdODQ+fpgxyB4SAQJKtQR1YB4w5OVtXVZAMvZZKI9gQQHZ8wQ4Zk0erGxKeyLxnDrKKNHLRPyUrjkL7H2a0i8BGpdk8sxW9NVrJJGgmQQiPbJx0yvIi1n55mUq+ZVjiF5qPvxtc5D133k= baekseungchan@DESKTOP-A4G95PG

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/JSUro9KEmFrLLBbehi6oGNFKSJnPZRg699tg4yqvTP+G0YL1NPw44hOk+n7CSf69BogQ2Hahj9d2olNw1T8KVJWv76fiUv58K8h/FzB+t/yMOz6MS/KMf5qPIvk2vSidTkxYPV8AgGY+A1zF37IP5qa7WviOqH+IhOE/bDYi/k+dQ1oTENxrKF2AoDYwMtyXgJnV/EXCQW6G4pr4Y3pMX9AhLf94Qjw+MxBO0835/W85VEVfqwYktWa9CuGdzMO8vIpF33837KAHzMEc0/u4iOOkytaRf1ouESu/c66MbFt8XiiVUJYF24PvmngocScfkTKalAsK3n2cCEPWHgEwdaxYbWaaOmQHcDhgO0TkHii81wdI/YGT3elqbNGc8vgd0Ohim0ySJepWHlsAb8XuyhPl8GJ6vC+Uc94BVxqf/3dBHllayQtWHnXzuMbMDlSYJ0Y8I+pkREHD3MPhZPKOXftGnBE/YqbQUAkZijDpBzD2gX1L+9bd4Wq85KEdCjU= bypark@DESKTOP-T73PETI

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC9Go9pLADJUQtq+ptTAcSIpi+VYv5/Kik0lBuV8xEc++vNtix5kwi+XSsNShHM3MVeiE8J27rYfyNn79r5pVKMuasMRyP3mTDZtRKr7/piM8MXuGSu1jCsVrTBZX0Sf4wuOA1tSkG9QgjBMZfvE9jOSYozA1K85mVE28m2rTihPnL5zYsDKnx+xIcwUBTpkOCoHiAfAX9b5ADAfScJigSZDjFLvexJ1aapPV2Iajh8huIhWvCUhrqUv/ldUm+b1iiOT7GXdrM/cam3FnLZ0b5KI9CQb7084+4l0BlmtPkuFcIlTDm1K6YO7+Mewd+F9uQZwvxGuElBPg8NVgFLD7+nrf2VlJYYCAeChyDV5+ZD70pSTcvHpJbmLKMtRFGov73ZPJ3vld9XCGUCajaoZz5Kz+ANmSC9nl3FpxnYgvFWfS7iwyC+VkGRKUg96/crXz4D8fW/wIskt+3cVrW9Z66psH41ll979mC8xly0ITWwbQZv7rvbdWSDVKVRgbXQOSc= kdalm@DESKTOP-L4TDAAL

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCR0UgFI8H016N4QrV24U2tuSIXxrD6J4bFrodC8mo8ScyEjlC16RnOvKUL5ljosxI8r8H2D8GHfw/omcCY5ScbSHUY41tmWORyUmAqTXT8B7W6L0IfAtZzRCEqnZ4+EQiZYkWgqn5Vb9C8NzR73LbAfAMAJj+gfcx++BuVcXHubgNe0bEZpeTiZR7kFOwqtZiE8mq9QBaFHlu8ectGi27Omr9pGjgi+c9ZhTtoEr4zX/pGpuHd22AXkWz//jBegzaUmNycCiIFfpG6NprT9yPe3BZZwuYeghrrv9dmhzBVVjw0jg9CKTm3LvddsQN3fZpL5aC5TJj7rB5tBNqiI5Y93fftKZkR8Df006ecXm7VAEZUmltNkCuoPodqt5BFSNRGysft7jp43KlTc0X3QSPEJUhFaDMw7MdT1TmJxIZ/kxW9PyS2k8QqCz1q3tU7GGu2WkEeY4/Dqp19WaA6brvYJPmiqMqt7Lnfy9pGmqILQxFa+hQQgermlFl0/8974ch64MESvoeQcM2XR+asB4978U1KniVxPwOIHZlMFDayaiq0GVLoK4N+/NWgLYFMiPnAB2vHws57A7R6Ba68hkUWBJWZOSl+P4zlKW/EJjX+ym4B4T8kndKm+I8iw7CDVnPND6Q9ANLkhpsBruF2OUp7rxzY3rEc52OsUo9/ZGje8w== gurwns1540@ex-em.com

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDv9Bk/20f0xHLQN1Mnub0VwsbRw7ggubeUZ+pUVaX9BD7uUud/ITktmTArbabLJLGgWx64la6+6VuQHauzX/cpMp4dVxoaySQDGPsB+V0WnXaq0pWop5BoJaPO75lpk/Kp7NFtn9x3315Rqmis1Df1UrQehMkqunnr2jWkil6iueAckztpsnqxlb8S+uVYiM7C4HsVx8XdOT3WtfUv+hzDlejy11nzi5T4HMT70O107N4g5CrEapluc7M3NfxCFhz5Gxu8P0dfJKLs9fFT4E8DRfGly5/cDcKbiJHSAZYRN6UwKr3z7LAw8aIW8JWflXn1fMZ92qdiT04kN8ZdVzyMpUiWMXJQPrfI2EHT/OHAympzKrXnT98oIqJANE4Eq72OG9Hrb6Tauk8Bde5/v3P9d7m5Zi9tx+01PZ1JQR+1dkJeV3Am6mjKWrxIowKPol2chnARoU7y1rEZGGi+09bD5hUq7KW6z61DUIlCMYF0Oq0IMs/voQP8zqpDmvSPNJc= hsgahm@ws-ubuntu

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCyfTPnCyr0Typ7yGTcy0LEGa8IH8yESEXa4Qyr85dWrxazTnWO7iYS0Ze6L0GMMO5qZXg/ntJGhI4PYF/WbCZ5KZMRXePyQIVs5pKMvSX4yH2gPIET5c6yTg4ZSIqrZDLBXGEZxMVp/SnNx1tRzxi0plBDtguSy6LZD0C1ue+VeT4oO98EB2T01GOeQp+RlF/theZuEWSWOVfFD0qVdsHIwVlYYlEZR11IrTamabMOVzyw+/8cokA4hgsrrkSrpKQ2YW0evHK1pxZrw+i3YJuHh3hJ0h98Ymw3rpHGec59gXaYT0PQEQvZs9RCrYw8NpCTQrImXR1UVjeeY3KGgpYQXna+WAmkjA+K/JvLmHGeombVJyd3v8330FX+Ob9klgqTWFvwb8Ew4QCcfl5hDAWxvzoJKAoG/TAZd13aNYaZAVkeWB7vPFWZ0brea6sqUJzXqzPwUXa0OirnqEfxMLZoo4tFyfxuVYVK+ScxayBPYJQkhwmTAZ4bj0OfQEw/jJM= hsgahm@ws-ubuntu

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDs1L1/dSx+aSxEiIuq9k4yzP31PX2CIK2RHDfVE0tqy2wvHZ1DMy9Wk/GfCcOICw746G+JexgJyb6AXYcUQrhxqzKE4dT6e8mtJaSvZJMVIq1gkkxooCs/q65vP/QhrdV8+bhzv9lS/rRiARzpiYE6ZrzteKnRQN7hxEgp398jqcyLLYj62f6wJv4yVOKfjiyCkkB/x/ooVrXGAGTooM/YN6o5dljDYq6xRdVi2GQOhQ6IrdmcIHP+JZGkTzq/IzCEF7cPlauwXl7buICYzatNOLf9AgGnmKRHB8c3pTB6WfBfKSF1BXZlIxbUsrz/i6oWXnDdC2l9Ir/o0Uj5jI6B jaejoonjung@ex-em.com

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUAppqxDLltrMsYMwIxGi0FA5STA/R+H6oy7myfiJP2Lt4woCogMi3ELVKEhFkeJx4i8y9G80lynEYCHRH1kAQ/7YaJEVFrPXTvBw+OVxYdVS/gLl0rL89ky+n0dv6A9mancrvUOMacI5aN7/W+EhoLohRjRbWlsPGNnvAmO0AZnt595aMUjFkdhusGyBVunDUFSitj9TFkjxDhr6cx8Bi0FLpvdsoAvfqiw/MVKW2pMgj56AT5UCT0wvtSHSNY/C731jP/RKrxP0fnVhIkVys/XmLV/6SVEqL1XwqMTvRfi5+Q8cPsXrnPuUFHiNN4e/MGJkYi0lg7XbX8jDXv3ybdxZ7lGiUDebxjTKBCCghFae3eAwpJADEDfrzb8DHJZFwJVVdKGXvStTWTibcs14ilRPcB4SWIBx/cFCzwOBK/iw8CfEfsbVe6WQbDc4T4LrgL8cUzHPOO8CQcC4DV/O3BuoqQExu6xTmU8rhLT9kgatIdX0K5jgGbuqz7c2lelU= root@JHJUNG

View File

@@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIETt3DbkOgMY40sI0+dOxa3A/6o4yxNpoUWuuaSuUR/P joonsoopark@ex-em.com

View File

@@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKbI5DjRkABz65NnREzf5HKKIMPrIA4DrnDDXTrjnRH8 sujung@ex-em.com

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUAppqxDLltrMsYMwIxGi0FA5STA/R+H6oy7myfiJP2Lt4woCogMi3ELVKEhFkeJx4i8y9G80lynEYCHRH1kAQ/7YaJEVFrPXTvBw+OVxYdVS/gLl0rL89ky+n0dv6A9mancrvUOMacI5aN7/W+EhoLohRjRbWlsPGNnvAmO0AZnt595aMUjFkdhusGyBVunDUFSitj9TFkjxDhr6cx8Bi0FLpvdsoAvfqiw/MVKW2pMgj56AT5UCT0wvtSHSNY/C731jP/RKrxP0fnVhIkVys/XmLV/6SVEqL1XwqMTvRfi5+Q8cPsXrnPuUFHiNN4e/MGJkYi0lg7XbX8jDXv3ybdxZ7lGiUDebxjTKBCCghFae3eAwpJADEDfrzb8DHJZFwJVVdKGXvStTWTibcs14ilRPcB4SWIBx/cFCzwOBK/iw8CfEfsbVe6WQbDc4T4LrgL8cUzHPOO8CQcC4DV/O3BuoqQExu6xTmU8rhLT9kgatIdX0K5jgGbuqz7c2lelU= kp-jay-bastion-datasaker

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKDxtkcfx2ITlT2Yh7ZCT79do/25YQ2vROz38m8veAuBhOw+75oZJ4nN//zOWaaMvpC3Z7NIzOR+3UeukhnLZ591q8AaHcKjV8JEJMo2pvpH1vdLcTL9baLqWrxzgRimnZUNf5n5HNr+AKoXuPp//aVSJSoeznb66r04/rJSetT0QGDC8Kj5Q+MNvdd0/3U/nu7JxW9LIEaLoeiX6mVb4PpV7kl3rI3Vut/GnWakOhbS4yNvIFdR6d8rv305/BXJOz/aWy+0j7qK+NBzbSsI/l0vVUHfeD3whYGePCpWmj73ZsMTMjIjrC8DpRQlOJlAZ0GVpQnd/ayIWi4+V8VjvFcd6vSqrhhsNoOyo0Y/6cyO6iyvKqohMK6+HF1w6aXoaGCFFSl/3gw63saNAsdZPArnwf5yZ6GfPa/9bRn2k9g5xfp97Itpo6Iqq+PuRcZOes0EiIQe2hOoYQEIHIRhf8CZ+Xf6W1+XZB+WxEzUe4GCCwgUdTB6RIr4ThDxwCBV0= exem@DESKTOP-0V46131

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDAlYSGJZpOOEPZqIa1/CXxiaUNj1wsgkp0kEyD2SX8r7ovwmSAWCS24v/IOSgsUTFRpL64vIeCtcZ8sj4Hwzd3F2h+carQP0v+leCkzPpQ7aP/BoPS27+fSCzaOZv/QJ+eIcXWHIbWkXf6MYQ35PykDeJIO61OMOlWhpNV425VSwfZoB72xZmEH+rIZjXHHs8vYtIG2sXZE22BLiVw6PEL/C4QB2khBT5ZAjX2xGEzUoSknzva/8Uu20adQBalFTIdyLV7V6CxkIPkSgfmZh/fqXfbfPsxHLPK2o2ueGbx3fcN3kAqFrqpJgjEIZmNj6qhVPtbN5TSUyIjtoPhC4JR0heqckz1qLah+8lSiUfHSblGW89QuUcedHdwHp/RiZW6HQO0cqS/QPNcgPLTiv68voBapS9rav+j0tt1RynNY+AdhCOoo4BbGW0pXqi0vaHzbbfbzxp78kx/7/KXmUHkzGSkmlXVbKqzDm5k/kRn0q4pimDun42b+MjNYu3gZz0= ose@DESKTOP-PAE6T6K

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCd3buo7h81HCkNEdxJECUPenzQzKOmCyg1qyR6iy8KTrD/DYqwBKKVe0VDah8MwUpfI6c9OGIv09I0mGEm3PJYrVvADb2Ka37PFjubdLlKuW3obRSSwl4KFsM1RmTwRCYbVRFGRYymUFUtlZevGV/i+NoV/1/qCzgVibE43MVPBA3ttGhmgF3LuYm4VaPNwe/vpiPAJ+wgeb68y7UjeKPtitUrFRmOKyxteYm/pYIc8rlN7FkNdsX/xRgbLgHc23PGaeYeBKNa/v5+KnUrkAdNCZp5WxxHFv9okAMhmdvCShEuRA+uE1W1Ms7GWFssIM5l5XOv2hW/1BO9barR6jHt packer_634e1ea5-2078-7cb6-8a2a-3f3a9c3d6194

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC45maYW137cwvdS8AE9UzNHq9AMYrkEJtoNDAOVkUXtpVQITqvBCc4B4FfR5JK2h+imnBDng5fu728YAB7q31BE3Wub8I+QWhnQgv+kH1yMWj2s329tkHvcyNWIHSBqw4z1N74Zba+7mojKioju27HdcRcN1L7tpXSCHrq5bU6++CMShpZ7a3wo20RfikFWd563Y15mE3uDqlbkcuzE0KGSNrdY6Gy9aiE3/poVQRLaCmXnUKNw9wM3UGN9DanJi6iosXrlZRkpwhV+tHh2x+BWCbyY8jj94RDJgMwoKw71tzlEp+B1k6a7g+lEo3KFP//3PQxc9fdKBdg1YzSAKGKjsqATEVclmQHVskk6wZQC/wcjFxrSOreSp6knswX9AhIvGhMtoVo9iMy9cm+F4AauzjjfszCMO484983hIYwsh321VB14Wg7NroCYMUh7krATeKmNWhK0YicYCXINVMphBAcXFhuJduPejz19ZN356t+F/LDqlCxW7kO9QfYUy0= HP@DESKTOP-CNOVAON

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDi8funYVM0eRmfplW5EdnfJOFEVDEMMw08VRn6FD9x9VuNWCEkY3iErzekBY2SRat8g6q0VXRyu7b/bhm/kD+BtI79fmz9FKxslTZCeKKN1KWfoZoXSRnvjOX1Y6NDnY2X5M+3kN40ek9ku6abN6lOtInTXJ1QOJIISa8l6vrB/j1xVVZghTYY5MBMc89cRZESGdBZWld0CtmoM+mnjh5vWCCA3VJTcDbj5LKtWllA6t58KwtGBikr8iaOpi83dQ91eXWzxTttl/LCe9bfgSxYlmvZILn0UZMu1WiWBhlIBzC6RlxorkDVRXcSRjguEt+/ys2rv6UTSkm150O4PgjgxlZPmTJt1m5y/St57LELUVbV6XGSq6+eZNTZOYBxxRkKcV0uByCBjxjsVlMmoEZoxedhSVT1Z8/AiMnjPBjXx2ease04EvtZs6rpDRd0puzcx1TKoCkyak60ymxc91X9lQg3kUl0av/G5kMKJQqW6v31GA1Vnh4K9haCVF/Ki/M= seungjinjeong@DESKTOP-Q7KGU2F

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCugeU9csUdZ2HtN0gcwmdcSN1hcXL/ot590e829MI7W1KrOqaJs0XVKCvIKwYIB5MX+fyNLDI+M0JME/o0F1bjY0qiTHw35YEUY50h6dG8FOC8TwTrx/bRz95P1r+LWsv0x0/6O1r7VBKsr2YBB+uyg+0llY5NMBUEjifX/4nXM6FmqZZUuhxItAvUVBoXih3FyXUVEnYFM4pxrunac3srsJiuTCEdbKFVcAPVJDZa05D4/YNmG3k1ar1M7nE05Q10/I36f/23afGKfpkaNRofb9/SOvkMjCdsvvxwUE9XC9nMxs0Mn8tUlRgnx86ShUcGOe8ByCC7HM7YLgLGPcHxg0FqllBEQ6GVZl7Y0QZ3+g1ulX4XFY+XVdN0RFEXUT+nUO92BdDCSHlmwttnWjbFn7x+x6EuKbgdLKETMz3BMa5/ZMmdXzKh/WvcS6W2XZOmv9rgAuns+X2BRsw35rURcm77kUZUR0zlgEXXHmj+YfU9Wro4LXX4lhtiENOwKM0= hsw@DESKTOP-SMEHJL8

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCt83MqLt8ShnY0Wk/Ri8D7ITFgSDwZBZXI5u9FVDpvqgZvW1FykOWIDZlrtj8H2jM9z+lxtiHD03Py+nbC7sl92SDvDgRHSs+HJ7mrJKWJ9PYsmqYP+WXwqKdSVQn/R/r8ak0D5QKn2HLBFHN4ObxDkfiyvKHAjPAMMzH3l4dHmUS/J1qMaor3iHYvl4iYvFXU2kNx+d0x9DRj4iz6LlxhhbxU8gsmEUig8tvS32DBP3bzKeV1Dt97ul97MPOYnugetH+dQcJ7gc9X3q9L6u/23a07fWaEoKjNg1V/A+Whec2ROlcPWXdJps7oJDXD6SfrtL64Tce/OGSpmtBP1t8beSzRBh6rrHhVn78Sjf7vfx8fjHDgTJMYCmWFDUzyrpio0zSsTFeGaAi8D59Owj8OxEddrx1aNjV7iyxS1LMnMsoPZX/XHb54lGKSIZtSvTRfVNAhpX/m8pCCAarTEjpqCfoD8H2fmhRpKcAvz5fhaqCTxWMzATzJQlsq3DZMiQM= sujung@DESKTOP-KOQ0UTU

View File

@@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDtmuAhVVyVJ87+t2xVtiS3bvTxxn0dmD7t4D2iSvSErIjRsXRCPLdc/yGWiezj+oVZtRPLJ2mjKToGUerdkcW8oqiQeL0+x/CjdlS2rQXvQa2HXCjB+MejwZyJ2bl7VDtIMdLianJBn7+XVc48+bIf7yait8yVH1aVWcS/AXOSo9LwX/uNW5VCL5BeXSGwXdwkuhjeJurR4WIVSBXuh1ql5Vy6BdSxcmLMihNlIL/DyuzfPLuQZbuSeaJ7eJKiHu63/SwBA1cPzj9tgI7zNvguapIHKXvoK8n5gNUXVRDGnD4J6xbzUQB3DbU8kaz7pDClxzgpkf3MnvP9QvnTyqV+aftYlb02as0PrwIxlTlW/sBxyEGdFe+JwoTctHkrSfp0lYRpyCv3eXJcdDu2l3dTJXAHlpcJuQRH2j9herURxML0w6re1iKJ8MAjOqUvh+B3A1U3x116zEGdsCNCRcfwehEir7fmGKaPvrmOiDOTlNswdL/OJ1RHKFuEZJPlUr8= Kim-Dong-Woo-exem

View File

@@ -0,0 +1,102 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
start_function="autoscailing_start_prod"
stop_function="autoscailing_stop_prod"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | egrep -iv '(bastion|wireguard)' |wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${stop_function}
_main ${start_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

View File

@@ -0,0 +1,99 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
start_function="autoscailing_start_prod"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | egrep -iv '(bastion|wireguard)' |wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${start_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

View File

@@ -0,0 +1,100 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
stop_function="autoscailing_stop_prod"
start_function="autoscailing_start_prod"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${stop_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

69
01-old/scripts/prod/prod.sh Executable file
View File

@@ -0,0 +1,69 @@
#!/bin/bash
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
current_path=`pwd`
line="*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-*"
#----------------------------------------------------------------------------------------------------------------
# 파일 및 폴더 경로 확인
__find_path (){
script_file="00_terraform_var_change.sh"
cd ../../
tmp_path=`find -name ${script_file} | grep -vi old`
tmp_num=`grep -c "/" ${tmp_path}`
path_num=`echo $(( $tmp_num - 1 ))`
script_path=`echo ${tmp_path}| cut -d "/" -f1-${path_num}`
_bash=`which bash`
_terraform=`which terraform`
}
#----------------------------------------------------------------------------------------------------------------
# lambda 실행
__lambda_execute (){
lambda_file="lambda_${1}.sh"
cd "${current_path}/lambda"
#./${lambda_file}
echo ${line}
echo "lambda file : $(pwd)/${lambda_file}" #DEBUG
${_bash} ${lambda_file}
echo ${line}
}
#----------------------------------------------------------------------------------------------------------------
# 변수 변경 스크립트 실행 뒤 terraform apply 실행
__terraform_target (){
cd ${current_path}
cd ../../
cd ${script_path}
echo "script_file : ${script_file}" #DEBUG
${_bash} ${script_file}
echo ${line}
${_terraform} init
echo ${line}
sleep 1
${_terraform} apply
}
#----------------------------------------------------------------------------------------------------------------
if [[ ${1} == '' ]];then
echo "[Usage] ${0} { start | stop | restart } "
exit
else
script_func=${1}
__find_path
if [[ ${script_func} == 'start' ]];then
__lambda_execute start
__terraform_target
elif [[ ${script_func} == 'stop' ]];then
__lambda_execute stop
elif [[ ${script_func} == 'restart' ]];then
__lambda_execute restart
__terraform_target
else
echo "[Usage] ${0} { start | stop | restart } "
exit
fi
fi

View File

@@ -0,0 +1,102 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
start_function="spot_start"
stop_function="spot_stop"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | egrep -iv '(bastion|wireguard)' |wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${stop_function}
_main ${start_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

View File

@@ -0,0 +1,100 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
start_function="spot_start"
stop_function="spot_stop"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | egrep -iv '(bastion|wireguard)' |wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${start_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

View File

@@ -0,0 +1,100 @@
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
echo_line="=========================================================================="
search_tag="prod"
start_function="spot_start"
stop_function="spot_stop"
#----------------------------------------------------------------------------------------------------------------
# aws 인스턴스 조회
_get_aws_status (){
aws_query="Reservations[].Instances[].[ InstanceId, Tags[?Key=='Name'].Value|[0] ]"
aws_filter="Name=instance-state-name,Values=running"
current_ec2_num=`aws ec2 describe-instances --query "${aws_query}" --filter ${aws_filter} --output text | grep ${search_tag} | wc -l`
}
#----------------------------------------------------------------------------------------------------------------
# log 출력
_get_time_log (){
datetime=`date +'%Y-%m-%d %H:%M:%S'`
string=$1
echo "[${datetime}] ${string}"
}
#----------------------------------------------------------------------------------------------------------------
# aws lambda 실행 후 인스턴스 조회하여 성공/실패 여부 체크
_main (){
lambda_function=$1
echo ${echo_line}
if [[ ${lambda_fuction} == *"stop"* ]]; then
what_func='stop'
else
what_func='start'
fi
_get_time_log "[INFO] aws lambda ${lambda_function} start"
aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json > /tmp/aws_func_result
#echo "aws lambda invoke --function-name ${lambda_function} --cli-binary-format raw-in-base64-out --payload '{ "key": "value" }' response.json"
succ_flag=0
while read line
do
if [[ $line == *"200"* ]]; then
succ_flag=1
_get_time_log "[INFO] aws lambda ${lambda_function} success!"
break
else
succ_flag=0
fi
done < /tmp/aws_func_result
if [[ ${succ_flag} == 1 ]]; then
aws lambda invoke --function-name ${lambda_function} out --log-type Tail --query 'LogResult' --output text | base64 -d > /tmp/aws_func_output
if [[ ${what_func} == 'stop' ]]; then
final_ec2_num=0
else
final_ec2_num=`grep "Starting" /tmp/aws_func_output | wc -l`
fi
num=0
while true
do
_get_aws_status
num=`echo $(( $num + 1 ))`
if [[ ${current_ec2_num} == ${final_ec2_num} ]]; then
_get_time_log "[INFO] aws ${search_tag} instance number: ${final_ec2_num} check success!"
break
elif [[ ${num} == 30 ]]; then
_get_time_log "[Error] time out!"
break
else
_get_time_log "[Retry: ${num}] aws ${search_tag} instance number: ${final_ec2_num} checking..."
fi
sleep 5
done
else
echo ${echo_line}
_get_time_log "[Error] aws lambda ${lambda_function} failed!"
exit
fi
echo ${echo_line}
#---------------
}
#----------------------------------------------------------------------------------------------------------------
# 실행
_main ${stop_function}
#----------------------------------------------------------------------------------------------------------------
# 임시 파일 삭제
unlink out
unlink response.json
unlink /tmp/aws_func_output
unlink /tmp/aws_func_result
#----------------------------------------------------------------------------------------------------------------

69
01-old/scripts/spot/spot.sh Executable file
View File

@@ -0,0 +1,69 @@
#!/bin/bash
if [ -z "$BASH_VERSION" ]; then exec bash "$0" "$@"; exit; fi
#----------------------------------------------------------------------------------------------------------------
# 변수 선언
current_path=`pwd`
line="*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-*"
#----------------------------------------------------------------------------------------------------------------
# 파일 및 폴더 경로 확인
__find_path (){
script_file="00_terraform_var_change.sh"
cd ../../
tmp_path=`find -name ${script_file} | grep -vi old`
tmp_num=`grep -c "/" ${tmp_path}`
path_num=`echo $(( $tmp_num - 1 ))`
script_path=`echo ${tmp_path}| cut -d "/" -f1-${path_num}`
_bash=`which bash`
_terraform=`which terraform`
}
#----------------------------------------------------------------------------------------------------------------
# lambda 실행
__lambda_execute (){
lambda_file="lambda_${1}.sh"
cd "${current_path}/lambda"
#./${lambda_file}
echo ${line}
echo "lambda file : $(pwd)/${lambda_file}" #DEBUG
${_bash} ${lambda_file}
echo ${line}
}
#----------------------------------------------------------------------------------------------------------------
# 변수 변경 스크립트 실행 뒤 terraform apply 실행
__terraform_target (){
cd ${current_path}
cd ../../
cd ${script_path}
echo "script_file : ${script_file}" #DEBUG
${_bash} ${script_file}
echo ${line}
${_terraform} init
echo ${line}
sleep 1
${_terraform} apply
}
#----------------------------------------------------------------------------------------------------------------
if [[ ${1} == '' ]];then
echo "[Usage] ${0} { start | stop | restart } "
exit
else
script_func=${1}
__find_path
if [[ ${script_func} == 'start' ]];then
__lambda_execute start
__terraform_target
elif [[ ${script_func} == 'stop' ]];then
__lambda_execute stop
elif [[ ${script_func} == 'restart' ]];then
__lambda_execute restart
__terraform_target
else
echo "[Usage] ${0} { start | stop | restart } "
exit
fi
fi

View File

@@ -0,0 +1,193 @@
*기준 시간 : 2023.05.30 17:14:05
# dsk-iac
aws 환경 구축 시 사용하는 파일을 정리한다.
## directory 구조
1. __docs: 문서파일 저장.
2. architecture : aws 구성도
3. kops: aws kubernetes cluster 구성
4. packer: aws ami 생성
5. script: lambda 실행, key 배포 등 스크립트
6. terraform: terraform code
## 노드 목록 [26]
| name | ip | taints_key | instance_group | cpu | memory | zone | instance_type | os | k8s_ver | runtime_ver |
|---------------------|------------------|---------------------------------------|----------------------------|-----|--------|-----------------|---------------|--------------------|---------|--------------------|
| i-04abf3bdccadaf05c | 172.24.13.138/23 | - | k8s-prod-tmp | 8 | 32 | ap-northeast-2c | m5a.2xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-05a497c27d7f65d78 | 172.24.12.22/23 | control-plane | - | 2 | 4 | ap-northeast-2c | c5a.large | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0446ae551deac0b98 | 172.24.8.218/23 | control-plane | - | 2 | 4 | ap-northeast-2a | c5a.large | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0648e1521b484fde1 | 172.24.11.188/23 | control-plane | - | 2 | 4 | ap-northeast-2b | c5a.large | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-07aebcfff479c0332 | 172.24.12.210/23 | prod/data-druid | k8s-prod-data-druid-small | 4 | 16 | ap-northeast-2c | m6i.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0a36b91bce0b86b6f | 172.24.12.142/23 | prod/data-druid | k8s-prod-data-druid-c | 16 | 64 | ap-northeast-2c | m5a.4xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-07046cde77753a2d4 | 172.24.12.252/23 | prod/data-druid | k8s-prod-data-druid-c | 16 | 64 | ap-northeast-2c | m5a.4xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0c370e3211468e6e5 | 172.24.12.96/23 | prod/data-druid | k8s-prod-data-druid-middle | 8 | 32 | ap-northeast-2c | m5a.2xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0f2de40153ccd640e | 172.24.13.2/23 | prod/data-druid | k8s-prod-data-druid-middle | 8 | 32 | ap-northeast-2c | m5a.2xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0df4bd76188c35590 | 172.24.13.100/23 | prod/data-es | k8s-prod-data-es-c3 | 4 | 16 | ap-northeast-2c | m6i.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0fd0e6d60f02a7b14 | 172.24.13.29/23 | prod/data-es | k8s-prod-data-es-c2 | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-06e3ca9b0a909e7bd | 172.24.13.64/23 | prod/data-es | k8s-prod-data-es-c | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-092354f3890ecf207 | 172.24.13.215/23 | prod/data-kafka | k8s-prod-data-kafka-c2 | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0a6a549b5bf51efbc | 172.24.12.207/23 | prod/data-kafka | k8s-prod-data-kafka-c | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0097ed484d8513a71 | 172.24.12.66/23 | prod/data-kafka | k8s-prod-data-kafka-c3 | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0b38484102aa244e8 | 172.24.13.227/23 | prod/druid-middlemanager | k8s-prod-data-druid-large | 16 | 63 | ap-northeast-2c | m5.4xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-07fd9c87549c8ca0c | 172.24.12.112/23 | prod/druid-middlemanager | k8s-prod-data-druid-large | 16 | 64 | ap-northeast-2c | m5a.4xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-00fdae3a253e720bf | 172.24.12.227/23 | prod/process | k8s-prod-process-c | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0abc091cf1fc5084f | 172.24.12.23/23 | prod/process | k8s-prod-process-c4 | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-01da83c436c0fb834 | 172.24.13.47/23 | prod/process | k8s-prod-process-c2 | 4 | 16 | ap-northeast-2c | m6i.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-08cea9eddc5260b4d | 172.24.13.119/23 | prod/process | k8s-prod-process-c2 | 4 | 16 | ap-northeast-2c | m6i.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0d3e74b8884948aa3 | 172.24.13.32/23 | prod/process | k8s-prod-process-c | 4 | 16 | ap-northeast-2c | m5a.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0f5d94fbedf1127a7 | 172.24.12.78/23 | prod/rel-process | k8s-rel-process-c | 4 | 16 | ap-northeast-2c | m5zn.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-0746bcd96a3da0c7a | 172.24.13.226/23 | prod/rel-process | k8s-rel-process-c | 4 | 16 | ap-northeast-2c | m5zn.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-056794363777f0364 | 172.24.12.49/23 | prod/rel-process | k8s-rel-process-c | 4 | 16 | ap-northeast-2c | m5d.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
| i-001b5a37f8c2e6483 | 172.24.13.207/23 | prod/rel-process | k8s-rel-process-c | 4 | 16 | ap-northeast-2c | m5d.xlarge | Ubuntu 20.04.4 LTS | v1.25.2 | containerd://1.6.8 |
## 리소스 목록 [91]
| name | kind | count | reqeust_cpu | reqeust_mem | limit_cpu | limit_mem |
|------------------------------------------------------|--------------|-------|-------------|-------------|-----------|-----------|
| dsk-base-agent | DaemonSet | 26 | 50m | 128Mi | 100m | 250Mi |
| promtail | DaemonSet | 26 | <null> | <null> | <null> | <null> |
| dsk-log-agent | DaemonSet | 21 | 250m | 512Mi | 500m | 1G |
| prometheus-prometheus-node-exporter | DaemonSet | 26 | <null> | <null> | <null> | <null> |
| aws-cloud-controller-manager | DaemonSet | 3 | 200m | <null> | <null> | <null> |
| dsk-trace-agent | DaemonSet | 26 | <null> | <null> | <null> | <null> |
| calico-node | DaemonSet | 26 | 100m | <null> | <null> | <null> |
| kops-controller | DaemonSet | 3 | 50m | 50Mi | <null> | <null> |
| ebs-csi-node | DaemonSet | 26 | <null> | <null> | <null> | <null> |
| ingress-nginx-controller | DaemonSet | 7 | 100m | 90Mi | <null> | <null> |
| es-log-collector | DaemonSet | 4 | 10m | 20Mi | 50m | 50Mi |
| dsk-kubernetes-agent | Deployment | 1 | 100m | 512Mi | 1 | 1000Mi |
| dsk-otel | Deployment | 1 | 250m | 512M | 1 | 2048M |
| dsk-postgres-agents-pg-1 | Deployment | 1 | 100m | 512Mi | 1 | 1000Mi |
| dsk-vault-agent | Deployment | 1 | <null> | <null> | <null> | <null> |
| ebs-csi-controller | Deployment | 2 | <null> | <null> | <null> | <null> |
| prod-dsk-metric-base-flat-stream | Deployment | 4 | 500m | 500Mi | 1 | 1Gi |
| calico-kube-controllers | Deployment | 1 | <null> | <null> | <null> | <null> |
| cert-manager | Deployment | 1 | <null> | <null> | <null> | <null> |
| cert-manager-cainjector | Deployment | 1 | <null> | <null> | <null> | <null> |
| cert-manager-webhook | Deployment | 1 | <null> | <null> | <null> | <null> |
| coredns | Deployment | 2 | 100m | 70Mi | <null> | 170Mi |
| coredns-autoscaler | Deployment | 1 | 20m | 10Mi | <null> | <null> |
| dns-controller | Deployment | 1 | 50m | 50Mi | <null> | <null> |
| druid-broker | Deployment | 2 | <null> | 26Gi | <null> | 26Gi |
| druid-coordinator | Deployment | 1 | <null> | 1Gi | <null> | 8Gi |
| druid-router | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-log-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-loggate | Deployment | 4 | 100m | 500Mi | 200m | 1500Mi |
| prod-dsk-metric-base-agg-stream-1m | Deployment | 3 | 500m | 1Gi | 1 | 2Gi |
| prod-dsk-metric-base-agg-stream-5m | Deployment | 3 | 500m | 1Gi | 1 | 2Gi |
| prod-dsk-metric-custom-flat-stream | Deployment | 3 | <null> | <null> | <null> | <null> |
| prod-dsk-notification-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-tagging-connector-group1 | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-tagging-connector-group2 | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-tagging-connector-group3 | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-tagging-connector-group4 | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-tagging-connector-jaeger-process | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-tagging-connector-manifest | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-trace-stream | Deployment | 3 | <null> | <null> | <null> | <null> |
| prod-dsk-ui | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-usergate-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prometheus-kube-state-metrics | Deployment | 1 | 10m | 32Mi | 100m | 64Mi |
| prometheus-prometheus-pushgateway | Deployment | 1 | 100m | 30Mi | 200m | 50Mi |
| prometheus-server | Deployment | 1 | <null> | <null> | <null> | <null> |
| vault-agent-injector | Deployment | 1 | <null> | <null> | <null> | <null> |
| kafka-akhq | Deployment | 1 | <null> | <null> | <null> | <null> |
| kafka-kafka-ui | Deployment | 1 | <null> | <null> | <null> | <null> |
| metrics-server | Deployment | 1 | 100m | 200Mi | <null> | <null> |
| mongo-dsk-mongodb-sharded-mongos | Deployment | 1 | <null> | <null> | <null> | <null> |
| mongo-manifest-mongodb-sharded-mongos | Deployment | 1 | <null> | <null> | <null> | <null> |
| nfs-provisioner-prod-nfs-subdir-external-provisioner | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-agent-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-agentmanager-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-alert-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-apm-api | Deployment | 1 | 100m | 256Mi | 250m | 512Mi |
| prod-dsk-app-sender | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-backoffice | Deployment | 0 | <null> | <null> | <null> | <null> |
| prod-dsk-chart-api | Deployment | 1 | 500m | 512Mi | 1 | 1Gi |
| prod-dsk-dashboard-api | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-database-api | Deployment | 3 | 100m | 256Mi | 250m | 512Mi |
| prod-dsk-database-plan-stream | Deployment | 1 | <null> | <null> | <null> | <null> |
| prod-dsk-datagate-dsk-datagate-manifest | Deployment | 1 | 100m | 100M | 200m | 200M |
| prod-dsk-datagate-dsk-datagate-metric | Deployment | 5 | 250m | 250M | 500m | 500M |
| prod-dsk-datagate-dsk-datagate-plan | Deployment | 1 | 100m | 100M | 250m | 250M |
| prod-dsk-datagate-dsk-datagate-trace | Deployment | 2 | 100m | 250M | 200m | 500M |
| prod-dsk-infrastructure-api | Deployment | 1 | 100m | 256Mi | 250m | 512Mi |
| prod-dsk-kafka2storage-database-plan-stream | Deployment | 1 | 300m | 500Mi | 500m | 1G |
| prod-dsk-kafka2storage-database-stat-query | Deployment | 1 | 300m | 500Mi | 500m | 1G |
| prod-dsk-kafka2storage-manifest-original | Deployment | 1 | 300m | 500Mi | 500m | 1G |
| prod-dsk-kafka2storage-trace-span-stream | Deployment | 1 | 300m | 500Mi | 500m | 1G |
| prod-dsk-krakend | Deployment | 3 | 200m | 256Mi | 500m | 512Mi |
| elasticsearch-master | Stateful_Set | 3 | 2 | 12Gi | 2 | 12Gi |
| kafka | Stateful_Set | 3 | 2 | 6000Mi | 3 | 10000Mi |
| zookeeper | Stateful_Set | 2 | 100m | 512Mi | 200m | 1000Mi |
| druid-historical | Stateful_Set | 2 | <null> | 26Gi | <null> | 26Gi |
| keycloak | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| druid-zookeeper | Stateful_Set | 3 | 250m | 256Mi | <null> | <null> |
| rabbitmq | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| mongo-dsk-mongodb-sharded-configsvr | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| prometheus-alertmanager | Stateful_Set | 1 | 10m | 32Mi | 100m | 128Mi |
| mongo-dsk-mongodb-sharded-shard0-data | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| mongo-dsk-mongodb-sharded-shard1-data | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| mongo-manifest-mongodb-sharded-configsvr | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| redis-master | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| mongo-manifest-mongodb-sharded-shard0-data | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| mongo-manifest-mongodb-sharded-shard1-data | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
| druid-middle-manager | Stateful_Set | 2 | <null> | 1Gi | <null> | 60Gi |
| postgresql | Stateful_Set | 1 | 250m | 256Mi | <null> | <null> |
| druid-postgresql | Stateful_Set | 1 | 250m | 256Mi | <null> | <null> |
| vault | Stateful_Set | 1 | <null> | <null> | <null> | <null> |
## 서비스 목록 (NodePort) [33]
| name | namespace | type | node_port | age |
|-----------------------------------------|-------------------|----------|-----------|-------------------------|
| rabbitmq | dsk-middle | NodePort | 30038 | 5 mons 17 days 01:30:35 |
| kafka-broker | dsk-middle | NodePort | 30094 | 3 mons 8 days 23:00:58 |
| kafka-broker-global | dsk-middle | NodePort | 30095 | 3 mons 8 days 23:00:58 |
| keycloak | dsk-middle | NodePort | 30100 | 5 mons 15 days 01:32:43 |
| keycloak | dsk-middle | NodePort | 30101 | 5 mons 15 days 01:32:43 |
| mongo-manifest-mongodb-sharded | dsk-middle | NodePort | 30111 | 1 mon 16:25:57 |
| mongo-dsk-mongodb-sharded | dsk-middle | NodePort | 30112 | 1 mon 15:13:17 |
| redis-master | dsk-middle | NodePort | 30229 | 5 mons 17 days 01:30:22 |
| elasticsearch-master | dsk-middle | NodePort | 30433 | 27 days 03:11:32 |
| rabbitmq | dsk-middle | NodePort | 30523 | 5 mons 17 days 01:30:35 |
| rabbitmq | dsk-middle | NodePort | 30565 | 5 mons 17 days 01:30:35 |
| rabbitmq | dsk-middle | NodePort | 30655 | 5 mons 17 days 01:30:35 |
| rabbitmq | dsk-middle | NodePort | 32021 | 5 mons 17 days 01:30:35 |
| kafka-akhq | dsk-middle | NodePort | 32100 | 3 mons 8 days 23:00:58 |
| kafka-kafka-ui | dsk-middle | NodePort | 32101 | 3 mons 8 days 23:00:58 |
| elasticsearch-master | dsk-middle | NodePort | 32110 | 27 days 03:11:32 |
| prometheus-server | prometheus | NodePort | 32132 | 18 days 00:15:23 |
| prometheus-alertmanager | prometheus | NodePort | 32133 | 18 days 00:15:23 |
| prod-dsk-datagate-dsk-datagate-trace | dsk-datagate-prod | NodePort | 32300 | 4 mons 11 days 22:25:44 |
| prod-dsk-datagate-dsk-datagate-manifest | dsk-datagate-prod | NodePort | 32301 | 4 mons 23 days 23:41:29 |
| prod-dsk-datagate-dsk-datagate-metric | dsk-datagate-prod | NodePort | 32302 | 4 mons 23 days 23:41:29 |
| prod-dsk-datagate-dsk-datagate-plan | dsk-datagate-prod | NodePort | 32303 | 4 mons 23 days 23:41:29 |
| prod-dsk-loggate | dsk-datagate-prod | NodePort | 32304 | 4 mons 17 days 23:55:18 |
| prod-dsk-loggate | dsk-datagate-prod | NodePort | 32305 | 4 mons 17 days 23:55:18 |
| prod-dsk-backoffice | dsk-api-prod | NodePort | 32523 | 1 mon 24 days 05:37:14 |
| rabbitmq | dsk-middle | NodePort | 32987 | 5 mons 17 days 01:30:35 |
| postgresql | dsk-middle | NodePort | 32098 | 5 mons 17 days 01:30:58 |
| kafka-outside-0 | dsk-middle | NodePort | 32400 | 3 mons 8 days 23:00:58 |
| kafka-outside-1 | dsk-middle | NodePort | 32401 | 3 mons 8 days 23:00:58 |
| kafka-outside-2 | dsk-middle | NodePort | 32402 | 3 mons 8 days 23:00:58 |
| kafka-global-0 | dsk-middle | NodePort | 32500 | 3 mons 8 days 23:00:58 |
| kafka-global-1 | dsk-middle | NodePort | 32501 | 3 mons 8 days 23:00:58 |
| kafka-global-2 | dsk-middle | NodePort | 32502 | 3 mons 8 days 23:00:58 |
## 예약 인스턴스 사용 내역 [11]
| availability_zone | instance_type | ri_count | ec2_count | result |
|-------------------|---------------|----------|-----------|--------|
| ap-northeast-2a | c5a.large | 1 | 1 | 0 |
| ap-northeast-2a | t3.small | 2 | 1 | -1 |
| ap-northeast-2b | c5a.large | 1 | 1 | 0 |
| ap-northeast-2c | c5a.large | 1 | 1 | 0 |
| ap-northeast-2c | m5.4xlarge | 0 | 1 | 1 |
| ap-northeast-2c | m5a.2xlarge | 1 | 3 | 2 |
| ap-northeast-2c | m5a.4xlarge | 1 | 3 | 2 |
| ap-northeast-2c | m5a.xlarge | 7 | 8 | 1 |
| ap-northeast-2c | m6i.xlarge | 4 | 4 | 0 |
| ap-northeast-2c | t3.small | 1 | 1 | 0 |

View File

@@ -0,0 +1,11 @@
# dsk-iac
aws 환경 구축 시 사용하는 파일을 정리한다.
## directory 구조
1. __docs: 문서파일 저장.
2. architecture : aws 구성도
3. kops: aws kubernetes cluster 구성
4. packer: aws ami 생성
5. script: lambda 실행, key 배포 등 스크립트
6. terraform: terraform code

View File

@@ -0,0 +1,207 @@
#!/bin/bash
#------------------------------------------------------------------------------------------------------
__init (){
# pwd
# cd ${git_path}
# pwd
cd /home/jhjung/git/dsk-iac/scripts/steampipe_iac/
git pull
datetime=`date "+%Y.%m.%d %H:%M:%S"`
echo -e "*기준 시간 : ${datetime}\n" > ${file}
cat ${origin} >> ${file}
}
#------------------------------------------------------------------------------------------------------
__git_push (){
git add ${file}
git commit -m 'steampipe schedule'
git push
}
#------------------------------------------------------------------------------------------------------
__append (){
line_count=`cat ${exec_log} | grep -v -- -- | egrep -v '(name|ri_count)' | wc -l`
echo -e "\n${title} [${line_count}]\n" >> ${file}
cat ${exec_log} >> ${file}
}
#------------------------------------------------------------------------------------------------------
__query_exec (){
steampipe query "${1}" > ${exec_log}
__log_sed
}
#------------------------------------------------------------------------------------------------------
__log_sed (){
sed -i 's/+/|/g' ${exec_log}
sed -i "s/node-role.kubernetes.io\///g" ${exec_log}
sed -i '1d;$d' ${exec_log}
}
#------------------------------------------------------------------------------------------------------
node_query="""
SELECT
name,
annotations ->> 'projectcalico.org/IPv4Address' AS IP,
COALESCE(taints -> 0 ->> 'key', '-') AS Taints_key,
COALESCE(tags ->> 'kops.k8s.io/instancegroup', '-') AS Instance_group,
capacity ->> 'cpu' AS CPU,
CEIL((CAST(regexp_replace(capacity ->> 'memory', 'Ki', '') AS FLOAT) / 1024 / 1024)) AS Memory,
tags ->> 'topology.kubernetes.io/zone' AS Zone,
tags ->> 'beta.kubernetes.io/instance-type' AS Instance_type,
node_info ->> 'osImage' AS OS,
node_info ->> 'kubeletVersion' AS K8S_ver,
node_info ->> 'containerRuntimeVersion' AS Runtime_ver
FROM
kubernetes_node
ORDER BY
Taints_key
"""
resources_query="""
(SELECT
'sts' as kind,
name,
available_replicas as count,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'cpu' AS request_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'memory' AS request_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'cpu' AS limit_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'memory' AS limit_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') ->> 'name' AS c_name,
namespace
FROM
kubernetes_stateful_set
WHERE
name not like 'rel-%')
union
(SELECT
'deploy' as kind,
name,
available_replicas as count,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'cpu' AS request_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'memory' AS request_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'cpu' AS limit_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'memory' AS limit_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') ->> 'name' AS c_name,
namespace
FROM
kubernetes_deployment
WHERE
name not like 'rel-%')
union
(SELECT
'ds' as kind,
name,
number_available as count,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'cpu' AS request_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'requests' ->> 'memory' AS request_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'cpu' AS limit_cpu,
jsonb_array_elements(template -> 'spec' -> 'containers') -> 'resources' -> 'limits' ->> 'memory' AS limit_mem,
jsonb_array_elements(template -> 'spec' -> 'containers') ->> 'name' AS c_name,
namespace
FROM
kubernetes_daemonset
WHERE
name not like 'rel-%')
order by kind
"""
service_query="""
SELECT
name,
namespace,
type,
lower(p ->> 'nodePort') as Node_Port,
age(current_timestamp, creation_timestamp)
FROM
kubernetes_service,
jsonb_array_elements(ports) as p
WHERE
type='NodePort'
ORDER BY
Node_Port
"""
#name not like '%rel-%'
aws_ri_query="""
SELECT
COALESCE(a.availability_zone, b.availability_zone, '-') AS availability_zone,
COALESCE(a.instance_type, b.instance_type, c.instance_type, '-') AS instance_type,
COALESCE(c.cpu, 0) AS cpu,
COALESCE(c.memory, 0) AS memory,
COALESCE(a.ri_count, 0) AS ri_count,
COALESCE(b.ec2_count, 0) AS ec2_count,
COALESCE(b.ec2_count, 0) - COALESCE(a.ri_count, 0) AS result
FROM
(SELECT
availability_zone,
instance_type,
SUM(instance_count) AS ri_count
FROM
aws_ec2_reserved_instance
WHERE
instance_state='active'
GROUP BY
availability_zone,
instance_type
) a
FULL OUTER JOIN
(SELECT
placement_availability_zone AS availability_zone,
instance_type,
COUNT(*) AS ec2_count
FROM
aws_ec2_instance
WHERE
instance_state='running' AND
instance_lifecycle!='spot'
GROUP BY
availability_zone,
instance_type
) b
ON
a.availability_zone = b.availability_zone AND
a.instance_type = b.instance_type
INNER JOIN
(SELECT
instance_type,
(CAST(memory_info ->> 'SizeInMiB' AS FLOAT) / 1024) AS memory,
(CAST(v_cpu_info ->> 'DefaultCores' AS FLOAT) * 2) AS cpu
FROM
aws_ec2_instance_type
WHERE
instance_type in (SELECT instance_type FROM aws_ec2_instance WHERE instance_state='running')
GROUP BY
instance_type, memory, cpu
) c
ON
COALESCE(a.instance_type, b.instance_type, '-') = c.instance_type
ORDER BY availability_zone
"""
#instance_type in (SELECT instance_type FROM aws_ec2_instance WHERE instance_state='running' AND instance_lifecycle!='spot')
#------------------------------------------------------------------------------------------------------
#git_path="/home/jhjung/git/dsk-iac"
origin="org_README.md"
exec_log="query.log"
file="../../README.md"
#------------------------------------------------------------------------------------------------------
__init
title="## 노드 목록"
__query_exec "${node_query}"
__append
title="## 리소스 목록"
__query_exec "${resources_query}"
__append
title="## 서비스 목록 (NodePort)"
__query_exec "${service_query}"
__append
title="## 예약 인스턴스 사용 내역"
__query_exec "${aws_ri_query}"
__append
#------------------------------------------------------------------------------------------------------
rm ${exec_log}
__git_push