Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

when updating kubectl, add current caller identity to eksctl user mapping #14

Merged
merged 2 commits into from
Feb 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions aws/391835788720/eks_users
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
arn:aws:iam::391835788720:user/ossci
arn:aws:iam::391835788720:role/ossci_gha_terraform
arn:aws:iam::391835788720:user/tha@linuxfoundation.org
arn:aws:iam::391835788720:user/jschmidt@meta.com
arn:aws:iam::391835788720:user/baibak@meta.com
19 changes: 14 additions & 5 deletions modules/arc/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,24 @@ delete-stale-rds:
../../venv/bin/python3 ../../scripts/kubectl_delete_rds_resources.py \
--rds-state-file $(K8S_RDS_STATE_FILE)

.PHONY: update-kubectl
update-kubectl:
.PHONY: add-eksctl-identity-mappings
add-eksctl-identity-mappings:
[ "$(EKS_USERS_PATH)" != "" ] || (echo "EKS_USERS_PATH not set"; exit 1)
[ "$(EKS_CLUSTER_NAME)" != "" ] || (echo "EKS_CLUSTER_NAME not set"; exit 1)
aws eks update-kubeconfig --region "us-east-1" --name "$(EKS_CLUSTER_NAME)"
if [ "$${NO_EKSCTL}" != "true" ] ; then \
eksctl create iamidentitymapping --cluster '$(EKS_CLUSTER_NAME)' --arn 'arn:aws:iam::391835788720:user/ossci' --group 'system:masters' --no-duplicate-arns --username 'admin-user1' ; \
eksctl create iamidentitymapping --cluster '$(EKS_CLUSTER_NAME)' --arn 'arn:aws:iam::391835788720:role/ossci_gha_terraform' --group 'system:masters' --no-duplicate-arns --username 'admin-user1' ; \
cat "$$EKS_USERS_PATH" | while read line ; do \
eksctl create iamidentitymapping --cluster '$(EKS_CLUSTER_NAME)' --arn $$line --group 'system:masters' --no-duplicate-arns --username 'admin-user1' || exit 1 ; \
done ; \
fi

.PHONY: do-update-kubectl
do-update-kubectl:
[ "$(EKS_CLUSTER_NAME)" != "" ] || (echo "EKS_CLUSTER_NAME not set"; exit 1)
aws eks update-kubeconfig --region "us-east-1" --name "$(EKS_CLUSTER_NAME)"

.PHONY: update-kubectl
update-kubectl: do-update-kubectl add-eksctl-identity-mappings

.PHONY: add-helm-repository
add-helm-repository: update-kubectl
helm repo add actions-runner-controller https://actions-runner-controller.github.io/actions-runner-controller
Expand Down
18 changes: 12 additions & 6 deletions scripts/module_makefile
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ arc-canary: inventory/eks/canary_cluster_name inventory/eks/canary_cluster_confi
GHA_INST_ID=46578864 \
GHA_PRIVATE_KEY_VAR=GHA_PRIVATE_KEY_CANARY \
EKS_ENVIRONMENT=canary \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
PROJECTTAG=gi-ci-canary \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/canary_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
Expand All @@ -176,6 +177,7 @@ karpenter-autoscaler-canary: inventory/eks/canary_cluster_name inventory/eks/can
echo "OPS: clean-k8s-rds-state setup-karpenter-autoscaler delete-stale-rds" ; \
$(MAKE) EKS_CLUSTER_NAME=$$p \
EKS_ENVIRONMENT=canary \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
PROJECTTAG=gi-ci-canary \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/canary_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
Expand All @@ -195,6 +197,7 @@ k8s-runner-scaler-canary: inventory/eks/canary_cluster_name inventory/eks/canary
GHA_INST_ID=46578864 \
GHA_PRIVATE_KEY_VAR=GHA_PRIVATE_KEY_CANARY \
EKS_ENVIRONMENT=canary \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
PROJECTTAG=gi-ci-canary \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/canary_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
Expand All @@ -215,6 +218,7 @@ arc-vanguard: inventory/eks/vanguard_cluster_name inventory/eks/vanguard_cluster
GHA_INST_ID=38323217 \
GHA_PRIVATE_KEY_VAR=GHA_PRIVATE_KEY \
EKS_ENVIRONMENT=vanguard \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
PROJECTTAG=gi-ci-vanguard \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/vanguard_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
Expand All @@ -232,16 +236,17 @@ arc-vanguard-off: inventory/eks/vanguard_cluster_name inventory/eks/vanguard_clu
echo "==== Cluster $$p ============================================" ; \
echo "OPS: clean-k8s-rds-state setup-karpenter-autoscaler k8s-runner-scaler delete-stale-rds" ; \
$(MAKE) EKS_CLUSTER_NAME=$$p \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/vanguard_cluster_config \
EKS_ENVIRONMENT=vanguard \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
GHA_ID=343735 \
GHA_INST_ID=38323217 \
GHA_PRIVATE_KEY_VAR=GHA_PRIVATE_KEY \
EKS_ENVIRONMENT=vanguard \
MAXRUNNERS=0 \
MINRUNNERS=0 \
PROJECTTAG=gi-ci-vanguard \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/vanguard_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
RUNNERSCOPE=pytorch-org \
MINRUNNERS=0 \
MAXRUNNERS=0 \
clean-k8s-rds-state install-arc setup-karpenter-autoscaler k8s-runner-scaler delete-stale-rds || exit 1 ; \
done

Expand All @@ -258,6 +263,7 @@ arc-prod: inventory/eks/prod_cluster_name inventory/eks/prod_cluster_config $(PR
GHA_INST_ID=38323217 \
GHA_PRIVATE_KEY_VAR=GHA_PRIVATE_KEY \
EKS_ENVIRONMENT=prod \
EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users \
PROJECTTAG=gi-ci-prod \
CLUSTER_CONFIG_FILE=$(PROHOME)/aws/$(ACCOUNT)/$(REGION)/inventory/eks/prod_cluster_config \
ARC_CFG_FILE_FOLDER=$(PROHOME)/aws/$(ACCOUNT)/$(REGION) \
Expand All @@ -267,4 +273,4 @@ arc-prod: inventory/eks/prod_cluster_name inventory/eks/prod_cluster_config $(PR

.PHONY: eks-use-cluster
eks-use-cluster:
cd $(PROHOME)/modules/arc ; $(MAKE) EKS_CLUSTER_NAME=$(CLUSTER) update-kubectl
cd $(PROHOME)/modules/arc ; $(MAKE) EKS_USERS_PATH=$(PROHOME)/aws/$(ACCOUNT)/eks_users EKS_CLUSTER_NAME=$(CLUSTER) update-kubectl
Loading