Skip to content

Commit

Permalink
Adding an eks module on which knot can be deployed along with instuct…
Browse files Browse the repository at this point in the history
…ions on how to deploy it
  • Loading branch information
DimosChristatos committed Jun 19, 2024
1 parent 8101a52 commit 87229f3
Show file tree
Hide file tree
Showing 19 changed files with 693 additions and 0 deletions.
124 changes: 124 additions & 0 deletions eks/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 24 additions & 0 deletions eks/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
How to run knot on aws:
- cd into 'eks' folder
- run 'terraform apply' to create the eks cluster
- run 'aws eks --region us-east-1 update-kubeconfig --name tf-cluster' in order to connect onto the remote cluster and be able to run commands on it
- (optional) run 'kubectl get pods -A' to make sure that connected onto the cluster
- export the KNOT_HOST envirometal as descibed in knot readme ($env:KNOT_HOST="mydns" in windows)
- cd back onto the folder with knot helmfile
- run 'helmfile sync --concurrency 1' to deploy knot onto the cluster
-after running the helmfile make sure that knot ingress is up
- goto aws route53 console and connect knot ingress to your dns
- *at the bottom of this read me you'll find details on how to do this*
- wait for you dns to propagate and after a while you should be able to see knot be pinging you dns


*Creating the route53 records
As an example I'm using the custon dns "boiboiapp.com"
- Create a route53 hosted zone on aws
- Create records of said route53 in which
- record type = A
- alias = on
- type = Alias to Network Load Balancer
- area = US East (N. Virginia)
- Record name = *.boiboiapp.com/boiboiapp.com (create on record for each)
![alt text](image.png)
Binary file added eks/assets/EKS-With-Terraform.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 9 additions & 0 deletions eks/backend.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# terraform {
# required_version = ">=0.12.0"
# backend "s3" {
# region = "us-east-1"
# profile = "default"
# key = "terraformstatefile"
# bucket = ""
# }
# }
Binary file added eks/image.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
41 changes: 41 additions & 0 deletions eks/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
################################################################################
# VPC Module
################################################################################

module "vpc" {
source = "./modules/vpc"

main-region = var.main-region
profile = var.profile
}

################################################################################
# EKS Cluster Module
################################################################################

module "eks" {
source = "./modules/eks-cluster"

main-region = var.main-region
profile = var.profile
rolearn = var.rolearn

vpc_id = module.vpc.vpc_id
private_subnets = module.vpc.private_subnets
}

################################################################################
# AWS ALB Controller
################################################################################

module "aws_alb_controller" {
source = "./modules/aws-alb-controller"

main-region = var.main-region
env_name = var.env_name
cluster_name = var.cluster_name

vpc_id = module.vpc.vpc_id
oidc_provider_arn = module.eks.oidc_provider_arn
}

81 changes: 81 additions & 0 deletions eks/modules/aws-alb-controller/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
################################################################################
# Load Balancer Role
################################################################################

module "lb_role" {
source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks"

role_name = "${var.env_name}_eks_lb"
attach_load_balancer_controller_policy = true

oidc_providers = {
main = {
provider_arn = var.oidc_provider_arn
namespace_service_accounts = ["kube-system:aws-load-balancer-controller"]
}
}
}

################################################################################
# Aws Load balancer Controller Service Account
################################################################################

resource "kubernetes_service_account" "service-account" {
metadata {
name = "aws-load-balancer-controller"
namespace = "kube-system"
labels = {
"app.kubernetes.io/name" = "aws-load-balancer-controller"
"app.kubernetes.io/component" = "controller"
}
annotations = {
"eks.amazonaws.com/role-arn" = module.lb_role.iam_role_arn
"eks.amazonaws.com/sts-regional-endpoints" = "true"
}
}
}

################################################################################
# Install Load Balancer Controler With Helm
################################################################################

resource "helm_release" "lb" {
name = "aws-load-balancer-controller"
repository = "https://aws.github.io/eks-charts"
chart = "aws-load-balancer-controller"
namespace = "kube-system"
depends_on = [
kubernetes_service_account.service-account
]

set {
name = "region"
value = var.main-region
}

set {
name = "vpcId"
value = var.vpc_id
}

set {
name = "image.repository"
value = "602401143452.dkr.ecr.${var.main-region}.amazonaws.com/amazon/aws-load-balancer-controller"
}

set {
name = "serviceAccount.create"
value = "false"
}

set {
name = "serviceAccount.name"
value = "aws-load-balancer-controller"
}

set {
name = "clusterName"
value = var.cluster_name
}
}

29 changes: 29 additions & 0 deletions eks/modules/aws-alb-controller/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
################################################################################
# General Variables from root module
################################################################################

variable "main-region" {
type = string
}

variable "env_name" {
type = string
}

variable "cluster_name" {
type = string
}

################################################################################
# Variables from other Modules
################################################################################

variable "vpc_id" {
description = "VPC ID which Load balancers will be deployed in"
type = string
}

variable "oidc_provider_arn" {
description = "OIDC Provider ARN used for IRSA "
type = string
}
82 changes: 82 additions & 0 deletions eks/modules/eks-cluster/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
################################################################################
# EKS Cluster
################################################################################

module "eks" {
source = "terraform-aws-modules/eks/aws"
version = "~> 19.0"

cluster_name = "tf-cluster"
cluster_version = "1.27"

providers = {
aws = aws.us-east-1
}

cluster_endpoint_public_access = true

create_kms_key = false
create_cloudwatch_log_group = false
cluster_encryption_config = {}

cluster_addons = {
coredns = {
most_recent = true
}
kube-proxy = {
most_recent = true
}
vpc-cni = {
most_recent = true
}
aws-ebs-csi-driver = {
most_recent = true
}
}

vpc_id = var.vpc_id
subnet_ids = var.private_subnets
control_plane_subnet_ids = var.private_subnets

# EKS Managed Node Group(s)
eks_managed_node_group_defaults = {
instance_types = ["m5.xlarge", "m5.large", "t3.medium"]
iam_role_additional_policies = {
AmazonEBSCSIDriverPolicy = "arn:aws:iam::aws:policy/service-role/AmazonEBSCSIDriverPolicy"
}
}

eks_managed_node_groups = {
blue = {
min_size = 1
max_size = 10
desired_size = 3
}
green = {
min_size = 1
max_size = 10
desired_size = 3

instance_types = ["t3.medium"]
capacity_type = "ON_DEMAND"
}
}

# aws-auth configmap
# manage_aws_auth_configmap = true
#create_aws_auth_configmap = true

aws_auth_roles = [
{
rolearn = var.rolearn
username = "skanyi"
groups = ["system:masters"]
},
]

tags = {
env = "dev"
terraform = "true"
}
}

Loading

0 comments on commit 87229f3

Please sign in to comment.