1st lesson

This commit is contained in:
Penguin Grape 2025-10-12 17:08:20 +03:00
parent f9ffc53a86
commit d5ef4add57
13 changed files with 400 additions and 0 deletions

View File

@ -0,0 +1,15 @@
apiVersion: batch/v1
kind: CronJob
metadata:
name: backup-cronjob
spec:
schedule: "*/2 * * * *"
jobTemplate:
spec:
template:
spec:
containers:
- name: backup
image: docker.io/busybox:latest
command: ["/bin/sh", "-c", "echo 'Backup complete' && sleep 5"]
restartPolicy: Never

View File

@ -0,0 +1,12 @@
apiVersion: batch/v1
kind: Job
metadata:
name: data-process-job
spec:
template:
spec:
containers:
- name: data-process
image: docker.io/busybox:latest
command: ["/bin/sh", "-c", "echo 'Data processed' && sleep 5"]
restartPolicy: Never

View File

@ -0,0 +1,23 @@
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: fluentd
labels:
app: fluentd
spec:
selector:
matchLabels:
app: fluentd
template:
metadata:
labels:
app: fluentd
spec:
containers:
- name: fluentd
image: docker.io/fluent/fluentd:latest
resources:
limits:
memory: "200Mi"
requests:
memory: "100Mi"

View File

@ -0,0 +1,26 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: nginx-deployment
labels:
app: nginx
spec:
replicas: 3
selector:
matchLabels:
app: nginx
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: 1
maxUnavailable: 1
template:
metadata:
labels:
app: nginx
spec:
containers:
- name: nginx
image: docker.io/nginx:1.23.3
ports:
- containerPort: 80

View File

@ -0,0 +1,26 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: nginx-deployment
labels:
app: nginx
spec:
replicas: 3
selector:
matchLabels:
app: nginx
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: 1
maxUnavailable: 1
template:
metadata:
labels:
app: nginx
spec:
containers:
- name: nginx
image: docker.io/nginx:latest
ports:
- containerPort: 80

View File

@ -0,0 +1,12 @@
apiVersion: v1
kind: Pod
metadata:
name: nginx-pod
labels:
app: nginx
spec:
containers:
- name: nginx
image: docker.io/nginx:latest
ports:
- containerPort: 80

View File

@ -0,0 +1,24 @@
apiVersion: apps/v1
kind: ReplicaSet
metadata:
name: nginx-replicaset
labels:
app: nginx-rs
spec:
replicas: 5
selector:
matchLabels:
app: nginx-rs
template:
metadata:
labels:
app: nginx-rs
spec:
containers:
- name: nginx
image: docker.io/nginx:latest
ports:
- containerPort: 8080
env:
- name: NGINX_PORT
value: "8080"

View File

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: nginx-rs-service
spec:
selector:
app: nginx-rs
ports:
- protocol: TCP
port: 8080
targetPort: 80
type: ClusterIP

View File

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: redis-service
spec:
clusterIP: None
selector:
app: redis
ports:
- protocol: TCP
port: 6379
targetPort: 6379

View File

@ -0,0 +1,33 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: redis
labels:
app: redis
spec:
serviceName: redis-service
replicas: 3
selector:
matchLabels:
app: redis
template:
metadata:
labels:
app: redis
spec:
containers:
- name: redis
image: docker.io/redis:latest
ports:
- containerPort: 6379
volumeMounts:
- name: redis-data
mountPath: /data
volumeClaimTemplates:
- metadata:
name: redis-data
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 1Gi

107
kubernetes/1/task1.txt Normal file
View File

@ -0,0 +1,107 @@
grape@lonetrek:~/Documents/devops/devops/kubernetes$ kubectl get nodes
NAME STATUS ROLES AGE VERSION
k8s-master0 Ready control-plane 6d23h v1.34.1
k8s-master1 Ready control-plane 6d23h v1.34.1
k8s-master2 Ready control-plane 6d23h v1.34.1
k8s-worker0 Ready <none> 6d23h v1.34.1
k8s-worker1 Ready <none> 6d23h v1.34.1
k8s-worker2 Ready <none> 6d23h v1.34.1
grape@lonetrek:~/Documents/devops/devops/kubernetes$ cat nginx-pod.yaml
apiVersion: v1
kind: Pod
metadata:
name: nginx-pod
labels:
app: nginx
spec:
containers:
- name: nginx
image: docker.io/nginx:latest
ports:
- containerPort: 80
grape@lonetrek:~/Documents/devops/devops/kubernetes$ kubectl port-forward pod/nginx-pod 8080:80 &
[1] 617748
grape@lonetrek:~/Documents/devops/devops/kubernetes$ Forwarding from 127.0.0.1:8080 -> 80
curl -IL localhost:8080
Handling connection for 8080
HTTP/1.1 200 OK
Server: nginx/1.29.2
Date: Thu, 09 Oct 2025 11:50:08 GMT
Content-Type: text/html
Content-Length: 615
Last-Modified: Tue, 07 Oct 2025 17:04:07 GMT
Connection: keep-alive
ETag: "68e54807-267"
Accept-Ranges: bytes
grape@lonetrek:~/Documents/devops/devops/kubernetes$ kubectl describe pod nginx-pod
Name: nginx-pod
Namespace: default
Priority: 0
Service Account: default
Node: k8s-worker2/192.168.88.229
Start Time: Thu, 09 Oct 2025 12:34:45 +0300
Labels: app=nginx
Annotations: <none>
Status: Running
IP: 10.244.5.5
IPs:
IP: 10.244.5.5
Containers:
nginx:
Container ID: cri-o://d4afd0e4cc9d1f475663fdccc6694b7fdd7841d212feccc5c03a435ecd729733
Image: docker.io/nginx:latest
Image ID: docker.io/library/nginx@sha256:35fabd32a7582bed5da0a40f41fd4984df7ddff32f81cd6be4614d07240ec115
Port: 80/TCP
Host Port: 0/TCP
State: Running
Started: Thu, 09 Oct 2025 12:34:55 +0300
Ready: True
Restart Count: 0
Environment: <none>
Mounts:
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-27hx7 (ro)
Conditions:
Type Status
PodReadyToStartContainers True
Initialized True
Ready True
ContainersReady True
PodScheduled True
Volumes:
kube-api-access-27hx7:
Type: Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds: 3607
ConfigMapName: kube-root-ca.crt
Optional: false
DownwardAPI: true
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events: <none>
grape@lonetrek:~/Documents/devops/devops/kubernetes$ kubectl logs nginx-pod
/docker-entrypoint.sh: /docker-entrypoint.d/ is not empty, will attempt to perform configuration
/docker-entrypoint.sh: Looking for shell scripts in /docker-entrypoint.d/
/docker-entrypoint.sh: Launching /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
10-listen-on-ipv6-by-default.sh: info: Getting the checksum of /etc/nginx/conf.d/default.conf
10-listen-on-ipv6-by-default.sh: info: Enabled listen on IPv6 in /etc/nginx/conf.d/default.conf
/docker-entrypoint.sh: Sourcing /docker-entrypoint.d/15-local-resolvers.envsh
/docker-entrypoint.sh: Launching /docker-entrypoint.d/20-envsubst-on-templates.sh
/docker-entrypoint.sh: Launching /docker-entrypoint.d/30-tune-worker-processes.sh
/docker-entrypoint.sh: Configuration complete; ready for start up
2025/10/09 09:34:55 [notice] 1#1: using the "epoll" event method
2025/10/09 09:34:55 [notice] 1#1: nginx/1.29.2
2025/10/09 09:34:55 [notice] 1#1: built by gcc 14.2.0 (Debian 14.2.0-19)
2025/10/09 09:34:55 [notice] 1#1: OS: Linux 6.8.0-84-generic
2025/10/09 09:34:55 [notice] 1#1: getrlimit(RLIMIT_NOFILE): 1048576:1048576
2025/10/09 09:34:55 [notice] 1#1: start worker processes
2025/10/09 09:34:55 [notice] 1#1: start worker process 24
2025/10/09 09:34:55 [notice] 1#1: start worker process 25
2025/10/09 09:34:55 [notice] 1#1: start worker process 26
2025/10/09 09:34:55 [notice] 1#1: start worker process 27
127.0.0.1 - - [09/Oct/2025:09:35:17 +0000] "GET / HTTP/1.1" 200 615 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0" "-"
2025/10/09 09:35:17 [error] 24#24: *1 open() "/usr/share/nginx/html/favicon.ico" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: "GET /favicon.ico HTTP/1.1", host: "127.0.0.1:8080", referrer: "http://127.0.0.1:8080/"
127.0.0.1 - - [09/Oct/2025:09:35:17 +0000] "GET /favicon.ico HTTP/1.1" 404 153 "http://127.0.0.1:8080/" "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0" "-"
127.0.0.1 - - [09/Oct/2025:11:50:08 +0000] "HEAD / HTTP/1.1" 200 0 "-" "curl/8.5.0" "-"
grape@lonetrek:~/Documents/devops/devops/kubernetes$

59
kubernetes/1/task2.txt Normal file
View File

@ -0,0 +1,59 @@
grape@lonetrek:~/Documents/devops/devops/kubernetes$ kubectl apply -f nginx-deployment.yaml
deployment.apps/nginx-deployment created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get pods
NAME READY STATUS RESTARTS AGE
nginx-deployment-7964487d9f-6lx6z 1/1 Running 0 8m26s
nginx-deployment-7964487d9f-8thgv 1/1 Running 0 8m26s
nginx-deployment-7964487d9f-lh2lp 1/1 Running 0 8m26s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get deployment nginx-deployment
NAME READY UP-TO-DATE AVAILABLE AGE
nginx-deployment 3/3 3 3 9m10s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f nginx-deployment-update.yaml
deployment.apps/nginx-deployment configured
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl rollout status deployment/nginx-deployment
Waiting for deployment "nginx-deployment" rollout to finish: 2 out of 3 new replicas have been updated...
Waiting for deployment "nginx-deployment" rollout to finish: 2 out of 3 new replicas have been updated...
Waiting for deployment "nginx-deployment" rollout to finish: 2 out of 3 new replicas have been updated...
Waiting for deployment "nginx-deployment" rollout to finish: 2 out of 3 new replicas have been updated...
Waiting for deployment "nginx-deployment" rollout to finish: 1 old replicas are pending termination...
Waiting for deployment "nginx-deployment" rollout to finish: 1 old replicas are pending termination...
Waiting for deployment "nginx-deployment" rollout to finish: 1 old replicas are pending termination...
Waiting for deployment "nginx-deployment" rollout to finish: 2 of 3 updated replicas are available...
deployment "nginx-deployment" successfully rolled out
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl rollout history deployment/nginx-deployment
deployment.apps/nginx-deployment
REVISION CHANGE-CAUSE
1 <none>
2 <none>
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano nginx-replicaset.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f nginx-replicaset.yaml
replicaset.apps/nginx-replicaset created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get replicaset nginx-replicaset
NAME DESIRED CURRENT READY AGE
nginx-replicaset 5 5 5 10s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get pods
NAME READY STATUS RESTARTS AGE
nginx-deployment-5fd9874647-phdtg 1/1 Running 0 16m
nginx-deployment-5fd9874647-pm58m 1/1 Running 0 16m
nginx-deployment-5fd9874647-z5cnq 1/1 Running 0 16m
nginx-replicaset-44w72 1/1 Running 0 14s
nginx-replicaset-7dpqk 1/1 Running 0 14s
nginx-replicaset-jkphz 1/1 Running 0 14s
nginx-replicaset-p2fhv 1/1 Running 0 14s
nginx-replicaset-qzsxv 1/1 Running 0 14s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano nginx-service.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f nginx-service.yaml
service/nginx-rs-service created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl port-forward service/nginx-rs-service 8080:8080
Forwarding from 127.0.0.1:8080 -> 80
Handling connection for 8080
grape@lonetrek:~$ curl -IL 127.0.0.1:8080
HTTP/1.1 200 OK
Server: nginx/1.29.2
Date: Sun, 12 Oct 2025 11:49:03 GMT
Content-Type: text/html
Content-Length: 615
Last-Modified: Tue, 07 Oct 2025 17:04:07 GMT
Connection: keep-alive
ETag: "68e54807-267"
Accept-Ranges: bytes

39
kubernetes/1/task3.txt Normal file
View File

@ -0,0 +1,39 @@
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano fluentd-daemonset.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f fluentd-daemonset.yaml
daemonset.apps/fluentd created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get pods
NAME READY STATUS RESTARTS AGE
fluentd-f4trd 0/1 ContainerCreating 0 20s
fluentd-qzjnt 0/1 ContainerCreating 0 20s
fluentd-tpmpw 0/1 ContainerCreating 0 20s
nginx-deployment-5fd9874647-phdtg 1/1 Running 0 119m
nginx-deployment-5fd9874647-pm58m 1/1 Running 0 118m
nginx-deployment-5fd9874647-z5cnq 1/1 Running 0 119m
nginx-replicaset-44w72 1/1 Running 0 102m
nginx-replicaset-7dpqk 1/1 Running 0 102m
nginx-replicaset-jkphz 1/1 Running 0 102m
nginx-replicaset-p2fhv 1/1 Running 0 102m
nginx-replicaset-qzsxv 1/1 Running 0 102m
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano redis-statefulset.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f redis-statefulset.yaml
statefulset.apps/redis created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano data-process-job.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f data-process-job.yaml
job.batch/data-process-job created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get jobs
NAME STATUS COMPLETIONS DURATION AGE
data-process-job Complete 1/1 14s 57s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl logs -l job-name=data-process-job
Data processed
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ nano backup-cronjob.yaml
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl apply -f backup-cronjob.yaml
cronjob.batch/backup-cronjob created
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get cronjobs
NAME SCHEDULE TIMEZONE SUSPEND ACTIVE LAST SCHEDULE AGE
backup-cronjob */2 * * * * <none> False 0 <none> 22s
grape@lonetrek:~/Documents/devops/devops/kubernetes/1$ kubectl get jobs
NAME STATUS COMPLETIONS DURATION AGE
backup-cronjob-29337938 Complete 1/1 14s 4m13s
backup-cronjob-29337940 Complete 1/1 11s 2m13s
backup-cronjob-29337942 Complete 1/1 12s 13s
data-process-job Complete 1/1 14s 7m34s