Skip to content

Commit

Permalink
Talos k8s with AWS CCM support
Browse files Browse the repository at this point in the history
  • Loading branch information
erikvveen committed Jan 10, 2025
1 parent c52c1e1 commit b4e94b7
Show file tree
Hide file tree
Showing 11 changed files with 121 additions and 37 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/conformance-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
kube-proxy-replacement: "true"
socketlb: false
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand Down Expand Up @@ -88,6 +89,7 @@ jobs:
--set ipv4.enabled=${{ matrix.config.ipv4 }} \
--set ipv6.enabled=${{ matrix.config.ipv6 }} \
--set bpf.masquerade=${{ matrix.config.bpf-masquerade }} \
--set bpf.hostLegacyRouting=${{ matrix.config.bpf-hostlegacyrouting }} \
--set kubeProxyReplacement=${{ matrix.config.kube-proxy-replacement }} \
--set socketLB.enabled=${{ matrix.config.socketlb }} \
--set ipam.mode=${{ matrix.config.ipam-mode }} \
Expand Down
7 changes: 7 additions & 0 deletions .github/workflows/conformance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ jobs:
kube-proxy-replacement: "true"
socketlb: false
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand All @@ -49,6 +50,7 @@ jobs:
kube-proxy-replacement: "true"
socketlb: false
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand All @@ -63,6 +65,7 @@ jobs:
kube-proxy-replacement: "false"
socketlb: true
bpf-masquerade: false
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand All @@ -77,6 +80,7 @@ jobs:
kube-proxy-replacement: "false"
socketlb: true
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand All @@ -90,6 +94,7 @@ jobs:
kube-proxy-replacement: "true"
socketlb: false
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'cluster-pool'
ipv4: true
ipv6: false
Expand All @@ -104,6 +109,7 @@ jobs:
kube-proxy-replacement: "true"
socketlb: false
bpf-masquerade: true
bpf-hostlegacyrouting: true
ipam-mode: 'kubernetes'
ipv4: true
ipv6: false
Expand Down Expand Up @@ -163,6 +169,7 @@ jobs:
--set ipv4.enabled=${{ matrix.config.ipv4 }} \
--set ipv6.enabled=${{ matrix.config.ipv6 }} \
--set bpf.masquerade=${{ matrix.config.bpf-masquerade }} \
--set bpf.hostLegacyRouting=${{ matrix.config.bpf-hostlegacyrouting }} \
--set kubeProxyReplacement=${{ matrix.config.kube-proxy-replacement }} \
--set socketLB.enabled=${{ matrix.config.socketlb }} \
--set ipam.mode=${{ matrix.config.ipam-mode }} \
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/terraform.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: terraform fmt
uses: dflook/terraform-fmt-check@c9309dc072b71dded0f23b29e3ffd4406e27c078
uses: dflook/terraform-fmt-check@2bf43ab3454607c0f8567abc333f8208447ec03f
with:
path: .
- name: terraform fmt
uses: dflook/terraform-fmt-check@c9309dc072b71dded0f23b29e3ffd4406e27c078
uses: dflook/terraform-fmt-check@2bf43ab3454607c0f8567abc333f8208447ec03f
with:
path: example
docs:
Expand Down
16 changes: 11 additions & 5 deletions 00-locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,21 @@ locals {

common_config_patch = {
cluster = {

id = var.cluster_id,
clusterName = var.cluster_name,
apiServer = {
certSANs = [
module.elb_k8s_elb.elb_dns_name
]
module.elb_k8s_elb.elb_dns_name,
],
extraArgs = {
enable-admission-plugins = var.admission_plugins
}
},
controllerManager = {
extraArgs = {
allocate-node-cidrs = var.allocate_node_cidrs
cloud-provider = "external"
}
},
network = {
Expand All @@ -59,17 +64,18 @@ locals {
registerWithFQDN = true
},
certSANs = [
module.elb_k8s_elb.elb_dns_name
module.elb_k8s_elb.elb_dns_name,
],
kubelet = {
extraArgs = {
rotate-server-certificates = true
cloud-provider = "external"
}
}
}
}

# Used to configure Cilium Kube-Proxy replacement
# Used to configure Cilium Kube-Proxy replacement
config_cilium_patch = {
cluster = {
proxy = {
Expand All @@ -94,4 +100,4 @@ locals {
"kubernetes.io/cluster/${var.cluster_name}" = "owned"
}

}
}
2 changes: 1 addition & 1 deletion 00-terraform.tf
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ terraform {
}
talos = {
source = "siderolabs/talos"
version = "0.6.1"
version = "0.7.0"
}
random = {
source = "hashicorp/random"
Expand Down
33 changes: 31 additions & 2 deletions 00-variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,29 @@ variable "cluster_id" {
type = number
}

variable "iam_instance_profile_controller" {
description = "IAM instance profile to attach to the controller instances."
type = string
default = null
}

variable "iam_instance_profile_worker" {
description = "IAM instance profile to attach to the worker instances."
type = string
default = null
}

variable "metadata_options" {
description = "Metadata to attach to the instances."
type = map(string)
default = {
http_endpoint = "enabled"
http_tokens = "required"
http_put_response_hop_limit = 2
instance_metadata_tags = "disabled"
}
}

variable "cluster_architecture" {
default = "amd64"
description = "Cluster architecture. Choose 'arm64' or 'amd64'. If you choose 'arm64', ensure to also override the control_plane.instance_type and worker_groups.instance_type with an ARM64-based instance type like 'm7g.large'."
Expand Down Expand Up @@ -60,7 +83,7 @@ variable "allow_workload_on_cp_nodes" {
}

variable "talos_version" {
default = "v1.8.0"
default = "v1.9.1"
description = "Talos version to use for the cluster, if not set, the newest Talos version. Check https://github.com/siderolabs/talos/releases for available releases."
type = string
validation {
Expand Down Expand Up @@ -142,4 +165,10 @@ variable "config_patch_files" {
default = []
description = "Path to talos config path files that applies to all nodes"
type = list(string)
}
}

variable "admission_plugins" {
description = "List of admission plugins to enable"
type = string
default = "MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ServiceAccount"
}
40 changes: 34 additions & 6 deletions 03-talos.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ module "talos_control_plane_nodes" {
subnet_id = element(data.aws_subnets.public.ids, count.index)
associate_public_ip_address = true
tags = merge(var.tags, local.cluster_required_tags)
metadata_options = var.metadata_options
iam_instance_profile = var.iam_instance_profile_controller

vpc_security_group_ids = [module.cluster_sg.security_group_id]

Expand All @@ -32,6 +34,8 @@ module "talos_worker_group" {
subnet_id = element(data.aws_subnets.public.ids, tonumber(trimprefix(each.key, "${each.value.name}.")))
associate_public_ip_address = true
tags = merge(each.value.tags, var.tags, local.cluster_required_tags)
metadata_options = var.metadata_options
iam_instance_profile = var.iam_instance_profile_worker

vpc_security_group_ids = [module.cluster_sg.security_group_id]

Expand All @@ -45,6 +49,8 @@ module "talos_worker_group" {
resource "talos_machine_secrets" "this" {}

data "talos_machine_configuration" "controlplane" {
for_each = { for index in range(var.controlplane_count) : index => index }

cluster_name = var.cluster_name
cluster_endpoint = "https://${module.elb_k8s_elb.elb_dns_name}"
machine_type = "controlplane"
Expand All @@ -55,6 +61,18 @@ data "talos_machine_configuration" "controlplane" {
local.config_patches_common,
[yamlencode(local.common_config_patch)],
[yamlencode(local.config_cilium_patch)],
[yamlencode(
{
machine = {
kubelet = {
extraArgs = {
hostname-override = module.talos_control_plane_nodes[each.key].id
}
}
}
}
)
],
[for path in var.control_plane.config_patch_files : file(path)]
)
}
Expand All @@ -72,17 +90,28 @@ data "talos_machine_configuration" "worker_group" {
local.config_patches_common,
[yamlencode(local.common_config_patch)],
[yamlencode(local.config_cilium_patch)],
[yamlencode(
{
machine = {
kubelet = {
extraArgs = {
hostname-override = module.talos_worker_group[each.key].id
}
}
}
}
)
],
[for path in each.value.config_patch_files : file(path)]
)
}

resource "talos_machine_configuration_apply" "controlplane" {
count = var.controlplane_count

for_each = { for index, instance in module.talos_control_plane_nodes : index => instance }
client_configuration = talos_machine_secrets.this.client_configuration
machine_configuration_input = data.talos_machine_configuration.controlplane.machine_configuration
endpoint = module.talos_control_plane_nodes[count.index].public_ip
node = module.talos_control_plane_nodes[count.index].private_ip
machine_configuration_input = data.talos_machine_configuration.controlplane[each.key].machine_configuration
endpoint = module.talos_control_plane_nodes[each.key].public_ip
node = module.talos_control_plane_nodes[each.key].private_ip
}

resource "talos_machine_configuration_apply" "worker_group" {
Expand All @@ -105,7 +134,6 @@ resource "talos_machine_bootstrap" "this" {
data "talos_client_configuration" "this" {
cluster_name = var.cluster_name
client_configuration = talos_machine_secrets.this.client_configuration
endpoints = module.talos_control_plane_nodes.*.public_ip
}

resource "local_file" "talosconfig" {
Expand Down
30 changes: 17 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ module "talos" {
source = "git::https://github.com/isovalent/terraform-aws-talos?ref=<RELEASE_TAG>"
// Supported Talos versions (and therefore K8s versions) can be found here: https://github.com/siderolabs/talos/releases
talos_version = "v1.5.3"
kubernetes_version = "1.27.3"
talos_version = "v1.9.1"
kubernetes_version = "1.31.4"
cluster_name = "talos-cute"
region = "eu-west-1"
tags = local.tags
Expand All @@ -46,7 +46,7 @@ module "talos" {
| <a name="requirement_terraform"></a> [terraform](#requirement\_terraform) | >= 1.4.0 |
| <a name="requirement_aws"></a> [aws](#requirement\_aws) | ~> 5.0 |
| <a name="requirement_random"></a> [random](#requirement\_random) | ~> 3.5 |
| <a name="requirement_talos"></a> [talos](#requirement\_talos) | 0.6.1 |
| <a name="requirement_talos"></a> [talos](#requirement\_talos) | 0.7.0 |

### Providers

Expand All @@ -56,7 +56,7 @@ module "talos" {
| <a name="provider_local"></a> [local](#provider\_local) | n/a |
| <a name="provider_null"></a> [null](#provider\_null) | n/a |
| <a name="provider_random"></a> [random](#provider\_random) | ~> 3.5 |
| <a name="provider_talos"></a> [talos](#provider\_talos) | 0.6.1 |
| <a name="provider_talos"></a> [talos](#provider\_talos) | 0.7.0 |

### Modules

Expand All @@ -76,23 +76,24 @@ module "talos" {
| [local_file.talosconfig](https://registry.terraform.io/providers/hashicorp/local/latest/docs/resources/file) | resource |
| [null_resource.wait_for_public_subnets](https://registry.terraform.io/providers/hashicorp/null/latest/docs/resources/resource) | resource |
| [random_string.workspace_id](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/string) | resource |
| [talos_cluster_kubeconfig.this](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/resources/cluster_kubeconfig) | resource |
| [talos_machine_bootstrap.this](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/resources/machine_bootstrap) | resource |
| [talos_machine_configuration_apply.controlplane](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/resources/machine_configuration_apply) | resource |
| [talos_machine_configuration_apply.worker_group](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/resources/machine_configuration_apply) | resource |
| [talos_machine_secrets.this](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/resources/machine_secrets) | resource |
| [talos_cluster_kubeconfig.this](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/resources/cluster_kubeconfig) | resource |
| [talos_machine_bootstrap.this](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/resources/machine_bootstrap) | resource |
| [talos_machine_configuration_apply.controlplane](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/resources/machine_configuration_apply) | resource |
| [talos_machine_configuration_apply.worker_group](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/resources/machine_configuration_apply) | resource |
| [talos_machine_secrets.this](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/resources/machine_secrets) | resource |
| [aws_ami.talos](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ami) | data source |
| [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
| [aws_subnets.public](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/subnets) | data source |
| [aws_vpc.vpc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/vpc) | data source |
| [talos_client_configuration.this](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/data-sources/client_configuration) | data source |
| [talos_machine_configuration.controlplane](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/data-sources/machine_configuration) | data source |
| [talos_machine_configuration.worker_group](https://registry.terraform.io/providers/siderolabs/talos/0.6.1/docs/data-sources/machine_configuration) | data source |
| [talos_client_configuration.this](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/data-sources/client_configuration) | data source |
| [talos_machine_configuration.controlplane](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/data-sources/machine_configuration) | data source |
| [talos_machine_configuration.worker_group](https://registry.terraform.io/providers/siderolabs/talos/0.7.0/docs/data-sources/machine_configuration) | data source |

### Inputs

| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| <a name="input_admission_plugins"></a> [admission\_plugins](#input\_admission\_plugins) | List of admission plugins to enable | `string` | `"MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ServiceAccount"` | no |
| <a name="input_allocate_node_cidrs"></a> [allocate\_node\_cidrs](#input\_allocate\_node\_cidrs) | Whether to assign PodCIDRs to Node resources or not. Only needed in case Cilium runs in 'kubernetes' IPAM mode. | `bool` | `true` | no |
| <a name="input_allow_workload_on_cp_nodes"></a> [allow\_workload\_on\_cp\_nodes](#input\_allow\_workload\_on\_cp\_nodes) | Allow workloads on CP nodes or not. Allowing it means Talos Linux default taints are removed from CP nodes. More details here: https://www.talos.dev/v1.5/talos-guides/howto/workers-on-controlplane/ | `bool` | `false` | no |
| <a name="input_cluster_architecture"></a> [cluster\_architecture](#input\_cluster\_architecture) | Cluster architecture. Choose 'arm64' or 'amd64'. If you choose 'arm64', ensure to also override the control\_plane.instance\_type and worker\_groups.instance\_type with an ARM64-based instance type like 'm7g.large'. | `string` | `"amd64"` | no |
Expand All @@ -102,14 +103,17 @@ module "talos" {
| <a name="input_control_plane"></a> [control\_plane](#input\_control\_plane) | Info for control plane that will be created | <pre>object({<br/> instance_type = optional(string, "m5.large")<br/> config_patch_files = optional(list(string), [])<br/> tags = optional(map(string), {})<br/> })</pre> | `{}` | no |
| <a name="input_controlplane_count"></a> [controlplane\_count](#input\_controlplane\_count) | Defines how many controlplane nodes are deployed in the cluster. | `number` | `3` | no |
| <a name="input_disable_kube_proxy"></a> [disable\_kube\_proxy](#input\_disable\_kube\_proxy) | Whether to deploy Kube-Proxy or not. By default, KP shouldn't be deployed. | `bool` | `true` | no |
| <a name="input_iam_instance_profile_controller"></a> [iam\_instance\_profile\_controller](#input\_iam\_instance\_profile\_controller) | IAM instance profile to attach to the controller instances. | `string` | `null` | no |
| <a name="input_iam_instance_profile_worker"></a> [iam\_instance\_profile\_worker](#input\_iam\_instance\_profile\_worker) | IAM instance profile to attach to the worker instances. | `string` | `null` | no |
| <a name="input_kubernetes_api_allowed_cidr"></a> [kubernetes\_api\_allowed\_cidr](#input\_kubernetes\_api\_allowed\_cidr) | The CIDR from which to allow to access the Kubernetes API | `string` | `"0.0.0.0/0"` | no |
| <a name="input_kubernetes_version"></a> [kubernetes\_version](#input\_kubernetes\_version) | Kubernetes version to use for the Talos cluster, if not set, the K8s version shipped with the selected Talos version will be used. Check https://www.talos.dev/latest/introduction/support-matrix/. For example '1.29.3'. | `string` | `""` | no |
| <a name="input_metadata_options"></a> [metadata\_options](#input\_metadata\_options) | Metadata to attach to the instances. | `map(string)` | <pre>{<br/> "http_endpoint": "enabled",<br/> "http_put_response_hop_limit": 2,<br/> "http_tokens": "required",<br/> "instance_metadata_tags": "disabled"<br/>}</pre> | no |
| <a name="input_pod_cidr"></a> [pod\_cidr](#input\_pod\_cidr) | The CIDR to use for Pods. Only required in case allocate\_node\_cidrs is set to 'true'. Otherwise, simply configure it inside Cilium's Helm values. | `string` | `"100.64.0.0/14"` | no |
| <a name="input_region"></a> [region](#input\_region) | The region in which to create the Talos Linux cluster. | `string` | n/a | yes |
| <a name="input_service_cidr"></a> [service\_cidr](#input\_service\_cidr) | The CIDR to use for services. | `string` | `"100.68.0.0/16"` | no |
| <a name="input_tags"></a> [tags](#input\_tags) | The set of tags to place on the cluster. | `map(string)` | n/a | yes |
| <a name="input_talos_api_allowed_cidr"></a> [talos\_api\_allowed\_cidr](#input\_talos\_api\_allowed\_cidr) | The CIDR from which to allow to access the Talos API | `string` | `"0.0.0.0/0"` | no |
| <a name="input_talos_version"></a> [talos\_version](#input\_talos\_version) | Talos version to use for the cluster, if not set, the newest Talos version. Check https://github.com/siderolabs/talos/releases for available releases. | `string` | `"v1.8.0"` | no |
| <a name="input_talos_version"></a> [talos\_version](#input\_talos\_version) | Talos version to use for the cluster, if not set, the newest Talos version. Check https://github.com/siderolabs/talos/releases for available releases. | `string` | `"v1.9.1"` | no |
| <a name="input_vpc_cidr"></a> [vpc\_cidr](#input\_vpc\_cidr) | The IPv4 CIDR block for the VPC. | `string` | `"10.0.0.0/16"` | no |
| <a name="input_vpc_id"></a> [vpc\_id](#input\_vpc\_id) | ID of the VPC where to place the VMs. | `string` | n/a | yes |
| <a name="input_worker_groups"></a> [worker\_groups](#input\_worker\_groups) | List of node worker node groups to create | <pre>list(object({<br/> name = string<br/> instance_type = optional(string, "m5.large")<br/> config_patch_files = optional(list(string), [])<br/> tags = optional(map(string), {})<br/> }))</pre> | <pre>[<br/> {<br/> "name": "default"<br/> }<br/>]</pre> | no |
Expand Down
Loading

0 comments on commit b4e94b7

Please sign in to comment.