diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..0c2ad16fbd --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,13 @@ +## Goal + + +## Changes + + +## Testing + + +## Artifacts & Screenshots + + +## Checklist diff --git a/.github/workflows/ansible-deploy.yml b/.github/workflows/ansible-deploy.yml new file mode 100644 index 0000000000..afc1c1212a --- /dev/null +++ b/.github/workflows/ansible-deploy.yml @@ -0,0 +1,91 @@ +name: Ansible Deployment + +on: + push: + branches: [ main ] + paths: + - 'ansible/**' + - '.github/workflows/ansible-deploy.yml' + pull_request: + branches: [ main ] + paths: + - 'ansible/**' + +jobs: + lint: + name: Lint Ansible Code + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install ansible-lint + run: pip install ansible-lint + + - name: Run ansible-lint + run: | + cd ansible + ansible-lint playbooks/deploy.yml + + deploy: + name: Deploy Application + needs: lint + runs-on: ubuntu-latest + if: github.event_name == 'push' + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install Ansible + run: pip install ansible + + - name: Setup SSH + run: | + mkdir -p ~/.ssh + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan -H ${{ secrets.VM_HOST }} >> ~/.ssh/known_hosts + + - name: Create inventory + run: | + mkdir -p ansible/inventory + echo "[webservers]" > ansible/inventory/hosts.ini + echo "${{ secrets.VM_HOST }} ansible_user=${{ secrets.VM_USER }}" >> ansible/inventory/hosts.ini + + - name: Decrypt vault password + run: echo "${{ secrets.ANSIBLE_VAULT_PASSWORD }}" > /tmp/vault_pass + + - name: Run Ansible playbook + run: | + cd ansible + ansible-playbook playbooks/deploy.yml \ + -i inventory/hosts.ini \ + --vault-password-file /tmp/vault_pass + + - name: Cleanup + if: always() + run: rm -f /tmp/vault_pass + + verify: + name: Verify Deployment + needs: deploy + runs-on: ubuntu-latest + if: success() + + steps: + - name: Check application + run: | + sleep 10 + curl -f http://${{ secrets.VM_HOST }}:8000/health || exit 1 + echo "Application verified successfully" \ No newline at end of file diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml new file mode 100644 index 0000000000..dc7c04b0fb --- /dev/null +++ b/.github/workflows/python-ci.yml @@ -0,0 +1,73 @@ +name: Python CI/CD + +on: + push: + branches: [ main, lab03 ] + paths: + - 'app_python/**' + - '.github/workflows/python-ci.yml' + pull_request: + branches: [ main ] + paths: + - 'app_python/**' + - '.github/workflows/python-ci.yml' + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: 'pip' + cache-dependency-path: 'app_python/requirements.txt' + + - name: Install dependencies + run: | + cd app_python + pip install -r requirements.txt + + - name: Run linter + run: | + cd app_python + flake8 app.py + + - name: Run tests + run: | + cd app_python + pytest tests/test_app.py + + - name: Security scan with pip-audit + run: | + cd app_python + pip install pip-audit + pip-audit -r requirements.txt || echo "Security scan completed" + + build: + runs-on: ubuntu-latest + needs: test + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./app_python + push: true + tags: | + ${{ secrets.DOCKER_USERNAME }}/fastapi-lab-app:latest + ${{ secrets.DOCKER_USERNAME }}/fastapi-lab-app:$(date +%Y.%m.%d) \ No newline at end of file diff --git a/.gitignore b/.gitignore index 30d74d2584..748114b1fb 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -test \ No newline at end of file +test +all.yml diff --git a/ansible/ansible.cfg b/ansible/ansible.cfg new file mode 100644 index 0000000000..dd2f3d3134 --- /dev/null +++ b/ansible/ansible.cfg @@ -0,0 +1,11 @@ +[defaults] +inventory = inventory/hosts.ini +roles_path = roles +host_key_checking = False +remote_user = ubuntu +retry_files_enabled = False + +[privilege_escalation] +become = True +become_method = sudo +become_user = root diff --git a/ansible/docs/LAB05.md b/ansible/docs/LAB05.md new file mode 100644 index 0000000000..8961459365 --- /dev/null +++ b/ansible/docs/LAB05.md @@ -0,0 +1,147 @@ +# Lab 5 — Ansible Fundamentals + +## 1. Architecture Overview + +### Ansible version used + +```bash +ansible --version +ansible [core 2.20.1] +``` + +### Target VM OS and version + +**Ubuntu 22.04 LTS** + +### Role structure explanation + +1. `common` role configures base system for server +2. `docker` role installs and configures Docker +3. `app_deploy` role deploys the actual application + +`common` role -> `docker` role -> `app_deploy` role + +### Why roles instead of monolithic playbooks? + +Using roles instead of monolithic playbooks ensures better readability as this provides clear separation of concerns and it's easier to debug and update isolated components. + +## 2. Roles Documentation + +### `common` role: + +**Purpose:** Configures base system for server +**Variables:** Common packages and timezone +**Handlers:** No handlers +**Dependencies:** No dependencies + +### `docker` role: + +**Purpose:** Installs and configures Docker +**Variables:** Dokcer version, Docker Compose version, Docker users, Docker repository URL, Docker GPG key URL +**Handlers:** Docker restart +**Dependencies:** Depends on `common` role + +### `deploy` role: + +**Purpose:** Deploys the actual application +**Variables:** Application settings, Docker settings, environment variables, health check and vault variables +**Handlers:** App container restart and reload +**Dependencies:** Depends on `docker` role + +## 3. Idempotency Demonstration + +### Terminal output from FIRST provision.yml run + +![](screenshots/first-provision.jpg) + +### Terminal output from SECOND provision.yml run + +![](screenshots/second-provision.jpg) + +### Analysis: What changed first time? What didn't change second time? + +Most of the tasks were changed first time. Packets, Docker, container weren't on the server. Then second time only 1 task was changed because I set `cache_valid_time: 3600` for cache update. Ansible checks system state and doesn't do unnecessary actions. If everything is set properly, nothing changes. + +### Explanation: What makes your roles idempotent? + +I used `state: present' that ensures packages are intalled. + +## 4. Ansible Vault Usage + +### How you store credentials securely + +I use **Ansible Vault** to encrypt sensitive data. + +### Vault password management strategy + +```bash +ansible-playbook playbooks/deploy.yml --ask-vault-pass +``` + +### Example of encrypted file (show it's encrypted!) + +![](screenshots/encrypted-data.jpg) + +### Why Ansible Vault is important + +Ansible Vault is important because passwords are encrypted and it's safe in case of pushing file to git. + +## 5. Deployment Verification + +### Terminal output from deploy.yml run + +![](screenshots/deploy.jpg) + +### Container status: docker ps output + +![](screenshots/docker-ps.jpg) + +### Health check verification: curl outputs + +```bash +curl http://62.84.120.249:5000/health | jq +{ + "status": "healthy", + "timestamp": "2026-02-26T20:42:46.002Z", + "uptime_seconds": 10337 +} +``` + +## 6. Key Decisions + +### Why use roles instead of plain playbooks? + +- **Organization** - Roles group related tasks, variables, and handlers together +- **Readability** - Playbooks become clean and simple (just list roles) +- **Reusability** - Same role can be used in multiple playbooks +- **Maintainability** - Easier to update and debug isolated components + +### How do roles improve reusability? + +- **Parameterization** - Variables make roles adaptable to different environments +- **Encapsulation** - All dependencies are contained within the role +- **Sharing** - Roles can be shared via Ansible Galaxy +- **Composability** - Mix and match roles for different server types + +### What makes a task idempotent? + +- **State checking** - Modules check current state before making changes +- **Declarative syntax** - Describe the desired state, not how to achieve it +- **Conditionals** - Tasks run only when needed (e.g., when: container_info.exists) +- **No "latest"** - Using state: present instead of state: latest +- **Idempotent modules** - Ansible modules are designed to be idempotent + +### How do handlers improve efficiency? + +- **Run-once** - Execute only once, even if notified by multiple tasks +- **Conditional execution** - Run only when changes actually occur +- **Order control** - Execute at the end of the play, not during +- **Resource savings** - Prevent unnecessary restarts (e.g., restart Docker once, not multiple times) + +### Why is Ansible Vault necessary? + +- **Security** - Encrypts sensitive data (passwords, tokens, keys) +- **Version control safe** - Can commit encrypted files to git +- **Compliance** - Meets security standards and audit requirements +- **Team collaboration** - Share code without sharing secrets +- **Multi-environment** - Different passwords for dev/staging/production diff --git a/ansible/docs/LAB06.md b/ansible/docs/LAB06.md new file mode 100644 index 0000000000..d846f50a60 --- /dev/null +++ b/ansible/docs/LAB06.md @@ -0,0 +1,128 @@ +# Lab 6 — Advanced Ansible & CI/CD + +## 1. Overview + +This project automates application deployment using Ansible and GitHub Actions. I took a basic Ansible setup and added proper structure, safety features, and CI/CD automation. + +What I Used: + +- Ansible for automation +- Docker Compose for containers +- GitHub Actions for CI/CD +- Ubuntu servers for deployment + +## 2. Blocks & Tags + +I organized each role using blocks to group related tasks: + +Common Role - Groups package tasks and user tasks separately +Docker Role - Separates installation from configuration + +Tag Strategy: + +- packages - Just install packages +- users - Just manage users +- docker_install - Only Docker installation +- docker_config - Only Docker setup +- web_app_wipe - Only cleanup operations + +Example Usage: +```bash +ansible-playbook deploy.yml --tags docker_install + +ansible-playbook deploy.yml --skip-tags common + +ansible-playbook deploy.yml --list-tags +``` + +![](screenshots/tags.jpg) + +![](screenshots/tag-docker.jpg) + +## 3. Docker Compose Migration + +I replaced the old docker run approach with Docker Compose templates. + +Before: Manual container management with multiple tasks +After: Single declarative docker-compose.yml template + +The template supports: + +- Dynamic service names and ports +- Environment variables (including vault secrets) +- Health checks +- Restart policies + +I also added proper role dependencies - the web_app role now automatically pulls in the docker role, so Docker is always installed first. + +## 4. Wipe Logic + +This was tricky - needed a way to completely remove the app, but make it really hard to do by accident. + +The Solution: Double safety - requires BOTH a variable AND a tag: + +```yaml +web_app_wipe: false + +when: web_app_wipe | bool +tags: web_app_wipe +``` + +Test Scenarios: + +- Normal deploy - wipe skipped (safe) +- Wipe only - -e "web_app_wipe=true" --tags web_app_wipe removes everything +- Clean reinstall - -e "web_app_wipe=true" wipes then deploys fresh +- Safety check - tag without variable = nothing happens + +The wipe task removes containers, compose file, and app directory. Optional image/volume cleanup too. + +![](screenshots/without-wipe.jpg) + +![](screenshots/with-wipe.jpg) + +## 5. CI/CD Pipeline + +GitHub Actions automates everything on git push: + +Workflow Steps: + +- Lint - Runs ansible-lint to catch syntax errors +- Deploy - Sets up SSH, decrypts vault, runs playbook +- Verify - Checks health endpoint, confirms container is running + +## 6. What I Learned + +Blocks are great for: + +-Grouping related tasks +- Applying conditions once +- Error handling with rescue/always + +Tag + Variable combo is perfect for dangerous operations like wipe - prevents accidents but still allows automation. + +Idempotency matters - Second run of the playbook shows "ok" not "changed". Docker Compose handles this automatically. + +CI/CD secrets need careful handling - I create temp files and immediately delete them, even on failure. + +## 7. Research Answers + +Q: Rescue block failure? + +A: Always block still runs, but playbook stops for that host. + +Q: Nested blocks? + +A: Yes, used them for package groups inside roles. + +Q: Tag inheritance? + +A: Tasks inherit parent block tags, can add their own. + +Q: Variable + tag why both? + +A: Double safety - tag for selective execution, variable for default-off behavior. + +Q: Self-hosted vs GitHub runner? + +A: Self-hosted is more secure (no SSH keys in GitHub), faster (same network), but needs maintenance. diff --git a/ansible/docs/image.png b/ansible/docs/image.png new file mode 100644 index 0000000000..894cf50711 Binary files /dev/null and b/ansible/docs/image.png differ diff --git a/ansible/docs/screenshots/deploy.jpg b/ansible/docs/screenshots/deploy.jpg new file mode 100644 index 0000000000..48b8752756 Binary files /dev/null and b/ansible/docs/screenshots/deploy.jpg differ diff --git a/ansible/docs/screenshots/docker-ps.jpg b/ansible/docs/screenshots/docker-ps.jpg new file mode 100644 index 0000000000..c07096b0b8 Binary files /dev/null and b/ansible/docs/screenshots/docker-ps.jpg differ diff --git a/ansible/docs/screenshots/encrypted-data.jpg b/ansible/docs/screenshots/encrypted-data.jpg new file mode 100644 index 0000000000..3c7489a05c Binary files /dev/null and b/ansible/docs/screenshots/encrypted-data.jpg differ diff --git a/ansible/docs/screenshots/first-provision.jpg b/ansible/docs/screenshots/first-provision.jpg new file mode 100644 index 0000000000..e3bc8ab6ac Binary files /dev/null and b/ansible/docs/screenshots/first-provision.jpg differ diff --git a/ansible/docs/screenshots/second-provision.jpg b/ansible/docs/screenshots/second-provision.jpg new file mode 100644 index 0000000000..2076050688 Binary files /dev/null and b/ansible/docs/screenshots/second-provision.jpg differ diff --git a/ansible/docs/screenshots/tag-docker.jpg b/ansible/docs/screenshots/tag-docker.jpg new file mode 100644 index 0000000000..89b7ca0ad5 Binary files /dev/null and b/ansible/docs/screenshots/tag-docker.jpg differ diff --git a/ansible/docs/screenshots/tags.jpg b/ansible/docs/screenshots/tags.jpg new file mode 100644 index 0000000000..6f6bc8dd52 Binary files /dev/null and b/ansible/docs/screenshots/tags.jpg differ diff --git a/ansible/docs/screenshots/with-wipe.jpg b/ansible/docs/screenshots/with-wipe.jpg new file mode 100644 index 0000000000..4cb5f72d88 Binary files /dev/null and b/ansible/docs/screenshots/with-wipe.jpg differ diff --git a/ansible/docs/screenshots/without-wipe.jpg b/ansible/docs/screenshots/without-wipe.jpg new file mode 100644 index 0000000000..ff812d8d7a Binary files /dev/null and b/ansible/docs/screenshots/without-wipe.jpg differ diff --git a/ansible/inventory/hosts.ini b/ansible/inventory/hosts.ini new file mode 100644 index 0000000000..786a977658 --- /dev/null +++ b/ansible/inventory/hosts.ini @@ -0,0 +1,2 @@ +[webservers] +vm ansible_host=89.169.183.195 ansible_user=ubuntu ansible_ssh_private_key_file=~/.ssh/yc-key diff --git a/ansible/playbooks/deploy.yml b/ansible/playbooks/deploy.yml new file mode 100644 index 0000000000..157c4fe04d --- /dev/null +++ b/ansible/playbooks/deploy.yml @@ -0,0 +1,45 @@ +- name: Deploy Python containerized application + hosts: webservers + become: yes + gather_facts: yes + + vars_files: + - ../group_vars/all.yml + + roles: + - name: web_app + vars: + app_environment: + ENVIRONMENT: production + LOG_LEVEL: info + APP_VERSION: "{{ docker_image_tag }}" + + pre_tasks: + - name: Verify vault variables are loaded + debug: + msg: "Vault variables loaded successfully: {{ dockerhub_username is defined}}" + when: dockerhub_username is defined + + post_tasks: + - name: Get running containers + command: docker ps + register: docker_ps + changed_when: false + + - name: Show running containers + debug: + var: docker_ps.stdout_lines + + - name: Test application main endpoint + uri: + url: "http://{{ ansible_default_ipv4.address | default('localhost') }}:{{ app_host_port }}/" + method: GET + status_code: 200 + register: main_endpoint + ignore_errors: yes + + - name: Show application endpoints status + debug: + msg: + - "Main endpoint: {{ main_endpoint.status | default('FAILED') }}" + - "Health endpoint: {{ health_result.status | default('unknown') }}" diff --git a/ansible/playbooks/provision.yml b/ansible/playbooks/provision.yml new file mode 100644 index 0000000000..176326f8e0 --- /dev/null +++ b/ansible/playbooks/provision.yml @@ -0,0 +1,7 @@ +- name: Provision web servers + hosts: webservers + become: yes + + roles: + - common + - docker diff --git a/ansible/playbooks/site.yml b/ansible/playbooks/site.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ansible/roles/common/defaults/main.yml b/ansible/roles/common/defaults/main.yml new file mode 100644 index 0000000000..7c2fe4728e --- /dev/null +++ b/ansible/roles/common/defaults/main.yml @@ -0,0 +1,10 @@ +common_packages: + - python3-pip + - python3-venv + - curl + - wget + - git + - vim + - htop + +timezone: "UTC" diff --git a/ansible/roles/common/tasks/main.yml b/ansible/roles/common/tasks/main.yml new file mode 100644 index 0000000000..05ccff75a2 --- /dev/null +++ b/ansible/roles/common/tasks/main.yml @@ -0,0 +1,67 @@ +- name: Common role tasks + block: + - name: Package installation block + block: + - name: Update apt cache + apt: + update_cache: yes + cache_valid_time: 3600 + + - name: Install common packages + apt: + name: "{{ common_packages }}" + state: present + + - name: Upgrade pip + pip: + name: pip + state: latest + + rescue: + - name: Handle apt cache update failure + debug: + msg: "Apt cache update failed, running fix..." + + - name: Fix missing packages + apt: + name: apt + state: latest + update_cache: yes + when: ansible_os_family == "Debian" + + tags: + - packages + + - name: User creation block + block: + - name: Set timezone + timezone: + name: "{{ timezone }}" + when: timezone is defined + + tags: + - users + + rescue: + - name: Common role failure handler + debug: + msg: "Common role execution failed" + + - name: Log failure + copy: + content: "Common role failed at {{ ansible_date_time.iso8601 }}" + dest: /tmp/common_role_failed.log + mode: '0644' + + always: + - name: Log completion + copy: + content: "Common role completed at {{ ansible_date_time.iso8601 }}" + dest: /tmp/common_role_completed.log + mode: '0644' + - debug: + msg: "Common role execution finished" + + become: true + tags: + - common \ No newline at end of file diff --git a/ansible/roles/docker/defaults/main.yml b/ansible/roles/docker/defaults/main.yml new file mode 100644 index 0000000000..9cc896f797 --- /dev/null +++ b/ansible/roles/docker/defaults/main.yml @@ -0,0 +1,2 @@ +docker_users: + - ubuntu diff --git a/ansible/roles/docker/handlers/main.yml b/ansible/roles/docker/handlers/main.yml new file mode 100644 index 0000000000..4dbfe45fab --- /dev/null +++ b/ansible/roles/docker/handlers/main.yml @@ -0,0 +1,5 @@ +- name: restart docker + systemd: + name: docker + state: restarted + become: yes diff --git a/ansible/roles/docker/tasks/main.yml b/ansible/roles/docker/tasks/main.yml new file mode 100644 index 0000000000..8b77b5d763 --- /dev/null +++ b/ansible/roles/docker/tasks/main.yml @@ -0,0 +1,88 @@ +- name: Docker role tasks + block: + - name: Docker installation block + block: + - name: Add Docker GPG key + apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - name: Add Docker repository + apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable" + state: present + + - name: Install Docker packages + apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + state: present + notify: restart docker + + rescue: + - name: Wait before retry on GPG key failure + wait_for: + timeout: 10 + delegate_to: localhost + + - name: Retry Docker GPG key addition + apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - debug: + msg: "Docker GPG key retry completed" + + tags: + - docker_install + + - name: Docker configuration block + block: + - name: Ensure Docker service is running + systemd: + name: docker + state: started + enabled: yes + + - name: Add user to docker group + user: + name: "{{ item }}" + group: docker + append: yes + loop: "{{ docker_users }}" + + tags: + - docker_config + + rescue: + - name: Docker role failure handler + debug: + msg: "Docker role execution failed" + + - name: Log Docker failure + copy: + content: "Docker role failed at {{ ansible_date_time.iso8601 }}" + dest: /tmp/docker_role_failed.log + mode: '0644' + + always: + - name: Ensure Docker service is enabled + systemd: + name: docker + enabled: yes + ignore_errors: yes + + - name: Log Docker role completion + copy: + content: "Docker role completed at {{ ansible_date_time.iso8601 }}" + dest: /tmp/docker_role_completed.log + mode: '0644' + + - debug: + msg: "Docker role execution finished" + + become: true + tags: + - docker \ No newline at end of file diff --git a/ansible/roles/web_app/defaults/main.yml b/ansible/roles/web_app/defaults/main.yml new file mode 100644 index 0000000000..0eddcea781 --- /dev/null +++ b/ansible/roles/web_app/defaults/main.yml @@ -0,0 +1,17 @@ +app_name: fastapi-lab-app +app_port: 5000 +app_host_port: 5000 +app_container_name: "{{ app_name }}" +docker_image_tag: latest +restart_policy: unless-stopped + +app_environment: production +app_debug: false + +health_check_retries: 30 +health_check_delay: 2 +health_endpoint: /health + +web_app_wipe: false +web_app_wipe_images: false +web_app_wipe_volumes: false \ No newline at end of file diff --git a/ansible/roles/web_app/handlers/main.yml b/ansible/roles/web_app/handlers/main.yml new file mode 100644 index 0000000000..56fcc81a45 --- /dev/null +++ b/ansible/roles/web_app/handlers/main.yml @@ -0,0 +1,12 @@ +- name: restart app container + docker_container: + name: "{{ app_container_name }}" + state: started + restart: yes + become: yes + +- name: reload app container + docker_container: + name: "{{ app_container_name }}" + restart: yes + become: yes diff --git a/ansible/roles/web_app/meta/main.yml b/ansible/roles/web_app/meta/main.yml new file mode 100644 index 0000000000..74475d7449 --- /dev/null +++ b/ansible/roles/web_app/meta/main.yml @@ -0,0 +1,7 @@ +dependencies: + - role: docker + tags: + - docker + - docker_install + vars: + docker_users: "{{ web_app_users | default(['ubuntu']) }}" diff --git a/ansible/roles/web_app/tasks/main.yml b/ansible/roles/web_app/tasks/main.yml new file mode 100644 index 0000000000..bf22fb201e --- /dev/null +++ b/ansible/roles/web_app/tasks/main.yml @@ -0,0 +1,162 @@ +- name: Include wipe tasks + include_tasks: wipe.yml + tags: + - web_app_wipe + +- name: Deploy application with Docker Compose + block: + - name: Login to Docker Hub + docker_login: + username: "{{ dockerhub_username }}" + password: "{{ dockerhub_password }}" + become: yes + no_log: true + tags: + - docker_login + - compose + + - name: Create application directory + file: + path: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + state: directory + mode: '0755' + become: yes + tags: + - app_deploy + - compose + - directories + + - name: Template docker-compose.yml + template: + src: docker-compose.yml.j2 + dest: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}/docker-compose.yml" + mode: '0644' + become: yes + register: compose_file + tags: + - app_deploy + - compose + - templates + + - name: Install Docker Compose Python library + pip: + name: docker-compose + state: present + become: yes + tags: + - app_deploy + - compose + - dependencies + + - name: Deploy with Docker Compose + community.docker.docker_compose_v2: + project_src: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + files: + - docker-compose.yml + state: present + pull: always + recreate: always + become: yes + register: compose_result + tags: + - app_deploy + - compose + - deploy + + - name: Wait for application to start + wait_for: + port: "{{ app_host_port | default('8000') }}" + host: "localhost" + delay: 5 + timeout: 60 + state: started + become: no + tags: + - app_deploy + - health_check + + - name: Check health endpoint + uri: + url: "http://localhost:{{ app_host_port | default('8000') }}{{ health_endpoint | default('/health') }}" + method: GET + status_code: 200 + timeout: 10 + register: health_result + retries: 5 + delay: 3 + until: health_result.status == 200 + become: no + tags: + - app_deploy + - health_check + + - name: Display application info + debug: + msg: + - "Application deployed successfully with Docker Compose!" + - "Project directory: {{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + - "Container: {{ web_app_name | default('devops-app') }}" + - "Image: {{ docker_image }}:{{ docker_tag | default('latest') }}" + - "Port: {{ app_host_port | default('8000') }} -> {{ app_port | default('8000') }}" + - "Health check: {{ health_result.status }}" + - "Compose status: {{ compose_result is changed | ternary('Changed', 'OK') }}" + tags: + - app_deploy + - debug + + rescue: + - name: Handle deployment failure + debug: + msg: "Docker Compose deployment failed, attempting rollback..." + tags: + - app_deploy + - rescue + + - name: Log failure + copy: + content: | + Deployment failed at {{ ansible_date_time.iso8601 }} + Error: {{ ansible_failed_result.msg | default('Unknown error') }} + dest: "/tmp/{{ web_app_name | default('devops-app') }}_deploy_failed.log" + mode: '0644' + become: yes + tags: + - app_deploy + - rescue + + - name: Attempt to rollback to previous version + community.docker.docker_compose_v2: + project_src: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + files: + - docker-compose.yml + state: present + pull: never + become: yes + ignore_errors: yes + when: compose_file.changed + tags: + - app_deploy + - rescue + - rollback + + always: + - name: Cleanup temporary files + file: + path: "/tmp/{{ web_app_name | default('devops-app') }}_*" + state: absent + become: yes + ignore_errors: yes + tags: + - app_deploy + - cleanup + + - name: Display completion message + debug: + msg: "Docker Compose deployment process completed" + tags: + - app_deploy + - debug + + tags: + - web_app + - app_deployment \ No newline at end of file diff --git a/ansible/roles/web_app/tasks/wipe.yml b/ansible/roles/web_app/tasks/wipe.yml new file mode 100644 index 0000000000..d6c7a08473 --- /dev/null +++ b/ansible/roles/web_app/tasks/wipe.yml @@ -0,0 +1,125 @@ +- name: Wipe web application + block: + - name: Display wipe start message + debug: + msg: "Starting wipe process for {{ web_app_name | default('devops-app') }}" + tags: + - web_app_wipe + - wipe_info + + - name: Stop and remove containers with Docker Compose + community.docker.docker_compose_v2: + project_src: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + files: + - docker-compose.yml + state: absent + remove_volumes: yes + remove_orphans: yes + become: yes + ignore_errors: yes + register: compose_down_result + tags: + - web_app_wipe + - wipe_containers + + - name: Remove docker-compose.yml file + file: + path: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}/docker-compose.yml" + state: absent + become: yes + ignore_errors: yes + tags: + - web_app_wipe + - wipe_files + + - name: Remove application directory + file: + path: "{{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + state: absent + become: yes + ignore_errors: yes + register: dir_removed + tags: + - web_app_wipe + - wipe_directories + + - name: Optionally remove Docker images + block: + - name: Get image ID + docker_image_info: + name: "{{ docker_image }}:{{ docker_tag | default('latest') }}" + register: image_info + become: yes + ignore_errors: yes + + - name: Remove Docker image + docker_image: + name: "{{ docker_image }}:{{ docker_tag | default('latest') }}" + state: absent + become: yes + when: image_info.images | length > 0 + ignore_errors: yes + when: web_app_wipe_images | default(false) | bool + tags: + - web_app_wipe + - wipe_images + + - name: Remove any orphaned volumes + shell: | + docker volume ls -q --filter "label=com.docker.compose.project={{ web_app_name | default('devops-app') }}" | xargs -r docker volume rm + become: yes + ignore_errors: yes + when: web_app_wipe_volumes | default(false) | bool + tags: + - web_app_wipe + - wipe_volumes + + - name: Log wipe completion + copy: + content: | + Application wiped at {{ ansible_date_time.iso8601 }} + Directory removed: {{ dir_removed is changed | ternary('Yes', 'No') }} + Containers removed: {{ compose_down_result is changed | ternary('Yes', 'No') }} + dest: "/tmp/{{ web_app_name | default('devops-app') }}_wipe_{{ ansible_date_time.epoch }}.log" + mode: '0644' + become: yes + tags: + - web_app_wipe + - wipe_log + + - name: Display wipe completion message + debug: + msg: + - "Application {{ web_app_name | default('devops-app') }} wiped successfully!" + - "Directory: {{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }} removed" + - "Containers stopped and removed" + - "Log saved to /tmp/{{ web_app_name | default('devops-app') }}_wipe_*.log" + tags: + - web_app_wipe + - wipe_info + + rescue: + - name: Handle wipe failure + debug: + msg: "Wipe process encountered errors, but continuing with cleanup..." + tags: + - web_app_wipe + + - name: Force remove directory if still exists + shell: "rm -rf {{ compose_project_dir | default('/opt/' + (web_app_name | default('devops-app'))) }}" + become: yes + ignore_errors: yes + when: compose_project_dir is defined + tags: + - web_app_wipe + + always: + - name: Final cleanup message + debug: + msg: "Wipe process completed for {{ web_app_name | default('devops-app') }}" + tags: + - web_app_wipe + + when: web_app_wipe | default(false) | bool + tags: + - web_app_wipe \ No newline at end of file diff --git a/ansible/roles/web_app/templates/docker-compose.yml.j2 b/ansible/roles/web_app/templates/docker-compose.yml.j2 new file mode 100644 index 0000000000..b00e921bec --- /dev/null +++ b/ansible/roles/web_app/templates/docker-compose.yml.j2 @@ -0,0 +1,37 @@ +version: '{{ docker_compose_version | default("3.8") }}' + +services: + {{ web_app_name | default('devops-app') }}: + image: {{ docker_image }}:{{ docker_tag | default('latest') }} + container_name: {{ web_app_name | default('devops-app') }} + restart: {{ restart_policy | default('unless-stopped') }} + ports: + - "{{ app_host_port | default('8000') }}:{{ app_port | default('8000') }}" + environment: + - APP_ENV={{ app_environment | default('production') }} + - APP_SECRET_KEY={{ app_secret_key | default('') }} + - APP_DEBUG={{ app_debug | default('false') }} + - DATABASE_URL={{ database_url | default('') }} + - REDIS_URL={{ redis_url | default('') }} + {% if app_extra_env is defined %} + {% for key, value in app_extra_env.items() %} + - {{ key }}={{ value }} + {% endfor %} + {% endif %} + networks: + - {{ web_app_network | default('app_network') }} + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:{{ app_port | default('8000') }}{{ health_endpoint | default('/health') }}"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + +networks: + {{ web_app_network | default('app_network') }}: + driver: bridge \ No newline at end of file diff --git a/app_java/.gitattributes b/app_java/.gitattributes new file mode 100644 index 0000000000..3b41682ac5 --- /dev/null +++ b/app_java/.gitattributes @@ -0,0 +1,2 @@ +/mvnw text eol=lf +*.cmd text eol=crlf diff --git a/app_java/.gitignore b/app_java/.gitignore new file mode 100644 index 0000000000..667aaef0c8 --- /dev/null +++ b/app_java/.gitignore @@ -0,0 +1,33 @@ +HELP.md +target/ +.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ diff --git a/app_java/.mvn/wrapper/maven-wrapper.properties b/app_java/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000000..8dea6c227c --- /dev/null +++ b/app_java/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,3 @@ +wrapperVersion=3.3.4 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.12/apache-maven-3.9.12-bin.zip diff --git a/app_java/README.md b/app_java/README.md new file mode 100644 index 0000000000..79e193f811 --- /dev/null +++ b/app_java/README.md @@ -0,0 +1,58 @@ +# DevOps Info Service (Java / Spring Boot) + +## Overview + +This Java service implements a DevOps info API using Spring Boot framework. It provides system information, service metadata, and health check endpoints in a structured JSON format. Built as a modular platform for DevOps education. + +## Prerequisites + +- Java 21 +- Apache Maven 3.6+ + +## Installation + +Build the project: + +```bash +cd app_java +mvn install +``` + +## Running the Application + +### Default Run + +By default, the application runs on `0.0.0.0:8080`. + +```bash +java -jar ./target/*.jar +``` + +### Configuration + +The application can be configured using **environment variables**: + +| **Variable** | **Default** | **Description** | +| ------------ | ----------- | ------------------- | +| `HOST` | `0.0.0.0` | Server bind address | +| `PORT` | `8080` | Server port | + +## API Endpoints + +### GET `/` + +Returns comprehensive JSON metadata with the following top-level sections: + +- **service** – name, version, description, framework +- **system** – hostname, platform, platform_version, architecture, cpu_count, python_version +- **runtime** – uptime_seconds, uptime_human, current_time, timezone +- **request** – client_ip, user_agent, method, path +- **endpoints** – list of available paths and their purpose + +### GET `/health` + +Returns a compact health status document: + +- **status** – string status +- **timestamp** – current UTC timestamp +- **uptime_seconds** – number of seconds the process has been running \ No newline at end of file diff --git a/app_java/docs/JAVA.md b/app_java/docs/JAVA.md new file mode 100644 index 0000000000..d7fc05fe26 --- /dev/null +++ b/app_java/docs/JAVA.md @@ -0,0 +1 @@ +I selected Java for this DevOps service because it offers the optimal balance of enterprise reliability and containerization efficiency. Java's mature ecosystem, strong type safety, and JVM optimization make it ideal for production-grade microservices. For Docker multi-stage builds, Java enables compact final images. Maven dependency management ensures consistent builds across environments. diff --git a/app_java/docs/LAB01.md b/app_java/docs/LAB01.md new file mode 100644 index 0000000000..4f2ce527ae --- /dev/null +++ b/app_java/docs/LAB01.md @@ -0,0 +1,106 @@ +# LAB01 – DevOps Info Service (Java / Spring Boot) + +## 1. Overview + +This Java service implements a DevOps info API using Spring Boot framework. It provides system information, service metadata, and health check endpoints in a structured JSON format. The service exposes `GET /` for comprehensive service information and `GET /health` for health monitoring. + +## 2. Architecture + +- **Spring Boot Application**: Built with Spring Boot using minimal configuration and auto-configuration +- **Single Controller Design**: All endpoints are implemented in `MainApplication.java` as a combined `@SpringBootApplication` and `@RestController` +- **Layered Logic**: + - Controller layer handles HTTP requests and responses + - Business logic methods encapsulate system information gathering and uptime calculation + - Exception handling with centralized error responses +- **Request Context Integration**: Uses `HttpServletRequest` to extract client information including IP address and user agent + +## 3. Configuration & Environment + +- **No External Configuration**: The service uses default Spring Boot configurations +- **Default Port**: Runs on port 8080 +- **Environment Variables**: + - Can be configured via standard Spring Boot properties + - `SERVER_PORT` – override HTTP port (e.g., `SERVER_PORT=9090`) + - `SERVER_ADDRESS` – bind address (e.g., `SERVER_ADDRESS=0.0.0.0`) +- **Logging**: Uses SLF4J with Logback for structured logging + +## 4. Build & Run + +### Prerequisites + +- Java 21 +- Maven 3.6 +- Spring Boot 4.0.2 + +### Build and Run Commands + +```bash +cd app_java +mvn install +java -jar ./target/*.jar +``` + +## 5. API Endpoints Structure + +### `GET /` - Service Information + +Returns comprehensive service and system information: + +- **service**: Name, version, description, framework +- **system**: Hostname, OS details, CPU count, Java version +- **runtime**: Uptime (seconds and human-readable), current UTC time +- **request**: Client IP, user agent, HTTP method, path +- **endpoints**: Available API endpoints with descriptions + +### `GET /health` - Health Check + +Returns service health status: + +- **status**: "healthy" service status +- **timestamp**: Current UTC timestamp +- **uptime_seconds**: Service uptime in seconds + +## 6. Testing Evidence + +### Manual Verification Commands + +**Request:** + +```bash +curl http://localhost:5000/ +``` + +**Response:** + +![Main Endpoint](screenshots/01-main-endpoint.jpg) + +**Request:** + +```bash +curl http://localhost:5000/health +``` + +**Response:** + +![Health Check](screenshots/02-health-check.jpg) + +### Terminal Output + +``` +2026-01-28 14:33:24 - d.d.d.MainApplication - INFO - Starting MainApplication v0.0.1-SNAPSHOT using Java 21.0.9 with PID 1181 (/mnt/c/Users/a11al/DevOps-Core-Course/app_java/target/devops-info-service-0.0.1-SNAPSHOT.jar started by alena in /mnt/c/Users/a11al/DevOps-Core-Course/app_java) +2026-01-28 14:33:24 - d.d.d.MainApplication - INFO - No active profile set, falling back to 1 default profile: "default" +2026-01-28 14:33:31 - o.s.boot.tomcat.TomcatWebServer - INFO - Tomcat initialized with port 5000 (http) +2026-01-28 14:33:31 - o.a.catalina.core.StandardService - INFO - Starting service [Tomcat] +2026-01-28 14:33:31 - o.a.catalina.core.StandardEngine - INFO - Starting Servlet engine: [Apache Tomcat/11.0.15] +2026-01-28 14:33:31 - o.s.b.w.c.s.WebApplicationContextInitializer - INFO - Root WebApplicationContext: initialization completed in 6375 ms +2026-01-28 14:33:32 - o.s.b.a.e.web.EndpointLinksResolver - INFO - Exposing 2 endpoints beneath base path '/actuator' +2026-01-28 14:33:33 - o.s.boot.tomcat.TomcatWebServer - INFO - Tomcat started on port 5000 (http) with context path '/' +2026-01-28 14:33:33 - d.d.d.MainApplication - INFO - Started MainApplication in 9.527 seconds (process running for 7.858) +2026-01-28 14:33:33 - d.d.d.MainApplication - INFO - Server started successfully +2026-01-28 14:35:26 - o.a.c.c.C.[Tomcat].[localhost].[/] - INFO - Initializing Spring DispatcherServlet 'dispatcherServlet' +2026-01-28 14:35:26 - o.s.web.servlet.DispatcherServlet - INFO - Initializing Servlet 'dispatcherServlet' +2026-01-28 14:35:26 - o.s.web.servlet.DispatcherServlet - INFO - Completed initialization in 1 ms +2026-01-28 14:35:26 - d.d.d.MainApplication - INFO - GET / from 0:0:0:0:0:0:0:1 +2026-01-28 14:36:01 - d.d.d.MainApplication - INFO - Health check requested +``` + diff --git a/app_java/docs/screenshots/01-main-endpoint.jpg b/app_java/docs/screenshots/01-main-endpoint.jpg new file mode 100644 index 0000000000..da515bdc92 Binary files /dev/null and b/app_java/docs/screenshots/01-main-endpoint.jpg differ diff --git a/app_java/docs/screenshots/02-health-check.jpg b/app_java/docs/screenshots/02-health-check.jpg new file mode 100644 index 0000000000..80b400e301 Binary files /dev/null and b/app_java/docs/screenshots/02-health-check.jpg differ diff --git a/app_java/docs/screenshots/03-formatted-output.jpg b/app_java/docs/screenshots/03-formatted-output.jpg new file mode 100644 index 0000000000..f8ba6290ee Binary files /dev/null and b/app_java/docs/screenshots/03-formatted-output.jpg differ diff --git a/app_java/mvnw b/app_java/mvnw new file mode 100644 index 0000000000..bd8896bf22 --- /dev/null +++ b/app_java/mvnw @@ -0,0 +1,295 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.3.4 +# +# Optional ENV vars +# ----------------- +# JAVA_HOME - location of a JDK home dir, required when download maven via java source +# MVNW_REPOURL - repo url base for downloading maven distribution +# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output +# ---------------------------------------------------------------------------- + +set -euf +[ "${MVNW_VERBOSE-}" != debug ] || set -x + +# OS specific support. +native_path() { printf %s\\n "$1"; } +case "$(uname)" in +CYGWIN* | MINGW*) + [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" + native_path() { cygpath --path --windows "$1"; } + ;; +esac + +# set JAVACMD and JAVACCMD +set_java_home() { + # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched + if [ -n "${JAVA_HOME-}" ]; then + if [ -x "$JAVA_HOME/jre/sh/java" ]; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACCMD="$JAVA_HOME/jre/sh/javac" + else + JAVACMD="$JAVA_HOME/bin/java" + JAVACCMD="$JAVA_HOME/bin/javac" + + if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then + echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 + echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 + return 1 + fi + fi + else + JAVACMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v java + )" || : + JAVACCMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v javac + )" || : + + if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then + echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 + return 1 + fi + fi +} + +# hash string like Java String::hashCode +hash_string() { + str="${1:-}" h=0 + while [ -n "$str" ]; do + char="${str%"${str#?}"}" + h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) + str="${str#?}" + done + printf %x\\n $h +} + +verbose() { :; } +[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } + +die() { + printf %s\\n "$1" >&2 + exit 1 +} + +trim() { + # MWRAPPER-139: + # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. + # Needed for removing poorly interpreted newline sequences when running in more + # exotic environments such as mingw bash on Windows. + printf "%s" "${1}" | tr -d '[:space:]' +} + +scriptDir="$(dirname "$0")" +scriptName="$(basename "$0")" + +# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties +while IFS="=" read -r key value; do + case "${key-}" in + distributionUrl) distributionUrl=$(trim "${value-}") ;; + distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; + esac +done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties" +[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" + +case "${distributionUrl##*/}" in +maven-mvnd-*bin.*) + MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ + case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in + *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; + :Darwin*x86_64) distributionPlatform=darwin-amd64 ;; + :Darwin*arm64) distributionPlatform=darwin-aarch64 ;; + :Linux*x86_64*) distributionPlatform=linux-amd64 ;; + *) + echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 + distributionPlatform=linux-amd64 + ;; + esac + distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" + ;; +maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; +*) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; +esac + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" +distributionUrlName="${distributionUrl##*/}" +distributionUrlNameMain="${distributionUrlName%.*}" +distributionUrlNameMain="${distributionUrlNameMain%-bin}" +MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" +MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" + +exec_maven() { + unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : + exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" +} + +if [ -d "$MAVEN_HOME" ]; then + verbose "found existing MAVEN_HOME at $MAVEN_HOME" + exec_maven "$@" +fi + +case "${distributionUrl-}" in +*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; +*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; +esac + +# prepare tmp dir +if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then + clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } + trap clean HUP INT TERM EXIT +else + die "cannot create temp dir" +fi + +mkdir -p -- "${MAVEN_HOME%/*}" + +# Download and Install Apache Maven +verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +verbose "Downloading from: $distributionUrl" +verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +# select .zip or .tar.gz +if ! command -v unzip >/dev/null; then + distributionUrl="${distributionUrl%.zip}.tar.gz" + distributionUrlName="${distributionUrl##*/}" +fi + +# verbose opt +__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' +[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v + +# normalize http auth +case "${MVNW_PASSWORD:+has-password}" in +'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; +has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; +esac + +if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then + verbose "Found wget ... using wget" + wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" +elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then + verbose "Found curl ... using curl" + curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" +elif set_java_home; then + verbose "Falling back to use Java to download" + javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" + targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" + cat >"$javaSource" <<-END + public class Downloader extends java.net.Authenticator + { + protected java.net.PasswordAuthentication getPasswordAuthentication() + { + return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); + } + public static void main( String[] args ) throws Exception + { + setDefault( new Downloader() ); + java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); + } + } + END + # For Cygwin/MinGW, switch paths to Windows format before running javac and java + verbose " - Compiling Downloader.java ..." + "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" + verbose " - Running Downloader.java ..." + "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" +fi + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +if [ -n "${distributionSha256Sum-}" ]; then + distributionSha256Result=false + if [ "$MVN_CMD" = mvnd.sh ]; then + echo "Checksum validation is not supported for maven-mvnd." >&2 + echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + elif command -v sha256sum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then + distributionSha256Result=true + fi + elif command -v shasum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 + echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + fi + if [ $distributionSha256Result = false ]; then + echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 + echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 + exit 1 + fi +fi + +# unzip and move +if command -v unzip >/dev/null; then + unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" +else + tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" +fi + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +actualDistributionDir="" + +# First try the expected directory name (for regular distributions) +if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then + if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then + actualDistributionDir="$distributionUrlNameMain" + fi +fi + +# If not found, search for any directory with the Maven executable (for snapshots) +if [ -z "$actualDistributionDir" ]; then + # enable globbing to iterate over items + set +f + for dir in "$TMP_DOWNLOAD_DIR"/*; do + if [ -d "$dir" ]; then + if [ -f "$dir/bin/$MVN_CMD" ]; then + actualDistributionDir="$(basename "$dir")" + break + fi + fi + done + set -f +fi + +if [ -z "$actualDistributionDir" ]; then + verbose "Contents of $TMP_DOWNLOAD_DIR:" + verbose "$(ls -la "$TMP_DOWNLOAD_DIR")" + die "Could not find Maven distribution directory in extracted archive" +fi + +verbose "Found extracted Maven distribution directory: $actualDistributionDir" +printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url" +mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" + +clean || : +exec_maven "$@" diff --git a/app_java/mvnw.cmd b/app_java/mvnw.cmd new file mode 100644 index 0000000000..92450f9327 --- /dev/null +++ b/app_java/mvnw.cmd @@ -0,0 +1,189 @@ +<# : batch portion +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.3.4 +@REM +@REM Optional ENV vars +@REM MVNW_REPOURL - repo url base for downloading maven distribution +@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output +@REM ---------------------------------------------------------------------------- + +@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) +@SET __MVNW_CMD__= +@SET __MVNW_ERROR__= +@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% +@SET PSModulePath= +@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( + IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) +) +@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% +@SET __MVNW_PSMODULEP_SAVE= +@SET __MVNW_ARG0_NAME__= +@SET MVNW_USERNAME= +@SET MVNW_PASSWORD= +@IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*) +@echo Cannot start maven from wrapper >&2 && exit /b 1 +@GOTO :EOF +: end batch / begin powershell #> + +$ErrorActionPreference = "Stop" +if ($env:MVNW_VERBOSE -eq "true") { + $VerbosePreference = "Continue" +} + +# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties +$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl +if (!$distributionUrl) { + Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" +} + +switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { + "maven-mvnd-*" { + $USE_MVND = $true + $distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" + $MVN_CMD = "mvnd.cmd" + break + } + default { + $USE_MVND = $false + $MVN_CMD = $script -replace '^mvnw','mvn' + break + } +} + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +if ($env:MVNW_REPOURL) { + $MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" } + $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')" +} +$distributionUrlName = $distributionUrl -replace '^.*/','' +$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' + +$MAVEN_M2_PATH = "$HOME/.m2" +if ($env:MAVEN_USER_HOME) { + $MAVEN_M2_PATH = "$env:MAVEN_USER_HOME" +} + +if (-not (Test-Path -Path $MAVEN_M2_PATH)) { + New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null +} + +$MAVEN_WRAPPER_DISTS = $null +if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) { + $MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists" +} else { + $MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists" +} + +$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain" +$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' +$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" + +if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { + Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" + Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" + exit $? +} + +if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { + Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" +} + +# prepare tmp dir +$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile +$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" +$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null +trap { + if ($TMP_DOWNLOAD_DIR.Exists) { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } + } +} + +New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null + +# Download and Install Apache Maven +Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +Write-Verbose "Downloading from: $distributionUrl" +Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +$webclient = New-Object System.Net.WebClient +if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { + $webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) +} +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum +if ($distributionSha256Sum) { + if ($USE_MVND) { + Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." + } + Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash + if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { + Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." + } +} + +# unzip and move +Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +$actualDistributionDir = "" + +# First try the expected directory name (for regular distributions) +$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain" +$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD" +if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) { + $actualDistributionDir = $distributionUrlNameMain +} + +# If not found, search for any directory with the Maven executable (for snapshots) +if (!$actualDistributionDir) { + Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object { + $testPath = Join-Path $_.FullName "bin/$MVN_CMD" + if (Test-Path -Path $testPath -PathType Leaf) { + $actualDistributionDir = $_.Name + } + } +} + +if (!$actualDistributionDir) { + Write-Error "Could not find Maven distribution directory in extracted archive" +} + +Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir" +Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null +try { + Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null +} catch { + if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { + Write-Error "fail to move MAVEN_HOME" + } +} finally { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } +} + +Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" diff --git a/app_java/pom.xml b/app_java/pom.xml new file mode 100644 index 0000000000..f38b0310d5 --- /dev/null +++ b/app_java/pom.xml @@ -0,0 +1,68 @@ + + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 4.0.2 + + + dev.devops + devops-info-service + 0.0.1-SNAPSHOT + devops-info-service + + + + + + + + + + + + + + + + 21 + 21 + 21 + 21 + + + + + org.springframework.boot + spring-boot-starter-web + + + + org.springframework.boot + spring-boot-starter-test + test + + + + org.springframework.boot + spring-boot-starter-actuator + + + + org.springframework.boot + spring-boot-starter-validation + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + diff --git a/app_java/src/main/java/dev/devops/devops_info_service/MainApplication.java b/app_java/src/main/java/dev/devops/devops_info_service/MainApplication.java new file mode 100644 index 0000000000..317e75d9be --- /dev/null +++ b/app_java/src/main/java/dev/devops/devops_info_service/MainApplication.java @@ -0,0 +1,165 @@ +package dev.devops.devops_info_service; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.web.bind.annotation.*; +import org.springframework.http.ResponseEntity; +import org.springframework.http.HttpStatus; + +import jakarta.servlet.http.HttpServletRequest; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.net.InetAddress; +import java.net.UnknownHostException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SpringBootApplication +@RestController +public class MainApplication { + + private static final Logger logger = LoggerFactory.getLogger(MainApplication.class); + private static final Instant APP_START_TIME = Instant.now(); + private static final String SERVICE_NAME = "devops-info-service"; + private static final String VERSION = "1.0.0"; + private static final String DESCRIPTION = "DevOps course info service"; + private static final String FRAMEWORK = "Spring Boot"; + + private static final DateTimeFormatter ISO_FORMATTER = DateTimeFormatter + .ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") + .withZone(ZoneOffset.UTC); + + public static void main(String[] args) { + SpringApplication.run(MainApplication.class, args); + logger.info("Server started successfully"); + } + + private Map getUptime() { + Duration uptime = Duration.between(APP_START_TIME, Instant.now()); + long seconds = uptime.getSeconds(); + long hours = seconds / 3600; + long minutes = (seconds % 3600) / 60; + + Map uptimeInfo = new LinkedHashMap<>(); + uptimeInfo.put("seconds", seconds); + uptimeInfo.put("human", String.format("%d hour%s, %d minute%s", + hours, hours != 1 ? "s" : "", + minutes, minutes != 1 ? "s" : "")); + + return uptimeInfo; + } + + private Map getSystemInfo() throws UnknownHostException { + Map systemInfo = new LinkedHashMap<>(); + systemInfo.put("hostname", InetAddress.getLocalHost().getHostName()); + systemInfo.put("platform", System.getProperty("os.name")); + systemInfo.put("platform_version", System.getProperty("os.version")); + systemInfo.put("architecture", System.getProperty("os.arch")); + systemInfo.put("cpu_count", Runtime.getRuntime().availableProcessors()); + systemInfo.put("java_version", System.getProperty("java.version")); + systemInfo.put("java_vendor", System.getProperty("java.vendor")); + + return systemInfo; + } + + @GetMapping("/") + public ResponseEntity> getServiceInfo(HttpServletRequest request) { + String clientIp = getClientIp(request); + logger.info("GET / from {}", clientIp); + + try { + Map serviceInfo = new LinkedHashMap<>(); + serviceInfo.put("name", SERVICE_NAME); + serviceInfo.put("version", VERSION); + serviceInfo.put("description", DESCRIPTION); + serviceInfo.put("framework", FRAMEWORK); + + Map uptime = getUptime(); + Map runtimeInfo = new LinkedHashMap<>(); + runtimeInfo.put("uptime_seconds", uptime.get("seconds")); + runtimeInfo.put("uptime_human", uptime.get("human")); + runtimeInfo.put("current_time", ISO_FORMATTER.format(Instant.now()));; + runtimeInfo.put("timezone", "UTC"); + + Map requestInfo = new LinkedHashMap<>(); + requestInfo.put("client_ip", clientIp); + requestInfo.put("user_agent", request.getHeader("User-Agent") != null ? + request.getHeader("User-Agent") : "unknown"); + requestInfo.put("method", request.getMethod()); + requestInfo.put("path", request.getRequestURI()); + + List> endpoints = new ArrayList<>(); + endpoints.add(Map.of( + "path", "/", + "method", "GET", + "description", "Service information" + )); + endpoints.add(Map.of( + "path", "/health", + "method", "GET", + "description", "Health check" + )); + + Map response = new LinkedHashMap<>(); + response.put("service", serviceInfo); + response.put("system", getSystemInfo()); + response.put("runtime", runtimeInfo); + response.put("request", requestInfo); + response.put("endpoints", endpoints); + + return ResponseEntity.ok(response); + + } catch (Exception e) { + logger.error("Error getting service info", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(Map.of( + "error", "Internal Server Error", + "message", "Failed to retrieve service information" + )); + } + } + + @GetMapping("/health") + public ResponseEntity> healthCheck() { + logger.info("Health check requested"); + + Map response = new LinkedHashMap<>(); + response.put("status", "healthy"); + response.put("timestamp", ISO_FORMATTER.format(Instant.now())); + response.put("uptime_seconds", getUptime().get("seconds")); + + return ResponseEntity.ok(response); + } + + @ExceptionHandler(UnknownHostException.class) + public ResponseEntity> handleUnknownHostException(UnknownHostException e) { + logger.error("Host resolution error", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(Map.of( + "error", "System Error", + "message", "Failed to retrieve system information" + )); + } + + @ExceptionHandler(Exception.class) + public ResponseEntity> handleGenericException(Exception e) { + logger.error("Unexpected error", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(Map.of( + "error", "Internal Server Error", + "message", "An unexpected error occurred" + )); + } + + private String getClientIp(HttpServletRequest request) { + String xForwardedFor = request.getHeader("X-Forwarded-For"); + if (xForwardedFor != null && !xForwardedFor.isEmpty()) { + return xForwardedFor.split(",")[0].trim(); + } + return request.getRemoteAddr(); + } +} \ No newline at end of file diff --git a/app_java/src/main/resources/application.properties b/app_java/src/main/resources/application.properties new file mode 100644 index 0000000000..6d7a1db487 --- /dev/null +++ b/app_java/src/main/resources/application.properties @@ -0,0 +1,14 @@ +# Server configuration +server.port=${PORT:8080} +server.address=${HOST:0.0.0.0} + +# Spring configuration +spring.application.name=devops-info-service + +# Logging +logging.level.com.example=INFO +logging.pattern.console=%d{yyyy-MM-dd HH:mm:ss} - %logger{36} - %level - %msg%n + +# Actuator endpoints +management.endpoints.web.exposure.include=health,info +management.endpoint.health.show-details=always \ No newline at end of file diff --git a/app_java/src/test/java/dev/devops/devops_info_service/MainApplicationTests.java b/app_java/src/test/java/dev/devops/devops_info_service/MainApplicationTests.java new file mode 100644 index 0000000000..cc590a6bc7 --- /dev/null +++ b/app_java/src/test/java/dev/devops/devops_info_service/MainApplicationTests.java @@ -0,0 +1,13 @@ +package dev.devops.devops_info_service; + +import org.junit.jupiter.api.Test; +import org.springframework.boot.test.context.SpringBootTest; + +@SpringBootTest +class MainApplicationTests { + + @Test + void contextLoads() { + } + +} diff --git a/app_python/.dockerignore b/app_python/.dockerignore new file mode 100644 index 0000000000..32357bde46 --- /dev/null +++ b/app_python/.dockerignore @@ -0,0 +1,23 @@ +.git +.gitignore +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +env/ +venv/ +.venv/ +.env +.vscode/ +.idea/ + +docs/ +tests/ + +*.log +*.sqlite3 +*.db + +.DS_Store +Thumbs.db diff --git a/app_python/.gitignore b/app_python/.gitignore new file mode 100644 index 0000000000..3b9bb78f47 --- /dev/null +++ b/app_python/.gitignore @@ -0,0 +1,13 @@ +# Python +__pycache__/ +*.py[cod] +venv/ +*.log +.pytest_cache + +# IDE +.vscode/ +.idea/ + +# OS +.DS_Store \ No newline at end of file diff --git a/app_python/Dockerfile b/app_python/Dockerfile new file mode 100644 index 0000000000..d2e88c7f54 --- /dev/null +++ b/app_python/Dockerfile @@ -0,0 +1,43 @@ +FROM python:3.13-slim AS builder + +WORKDIR /app + +RUN python -m venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +COPY requirements.txt . +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir --only-binary :all: -r requirements.txt + +FROM python:3.13-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* + +RUN groupadd -r appuser && useradd -r -g appuser appuser + +WORKDIR /app + +COPY --from=builder /opt/venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +COPY app.py . +COPY requirements.txt . + +RUN chown -R appuser:appuser /app + +USER appuser + +EXPOSE 5000 + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV HOST=0.0.0.0 +ENV PORT=5000 +ENV DEBUG=False + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ +CMD curl -f http://localhost:5000/health || exit 1 + +CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "5000"] diff --git a/app_python/README.md b/app_python/README.md new file mode 100644 index 0000000000..27a5c91a20 --- /dev/null +++ b/app_python/README.md @@ -0,0 +1,157 @@ +# DevOps Info Service (Python / FastAPI) + +[![Python CI/CD](https://github.com/flowelx/DevOps-Core-Course/actions/workflows/python-ci.yml/badge.svg)](https://github.com/flowelx/DevOps-Core-Course/actions/workflows/python-ci.yml) + +## Overview + +This FastAPI application delivers runtime and system data through HTTP endpoints. Built as a modular platform for DevOps education, it enables practical exploration of containerization, CI/CD pipelines, monitoring solutions, and infrastructure automation concepts. + +## Prerequisites + +- Python 3.11+ +- pip (Python package manager) + +## Installation + +1. Clone the repository and navigate to the project directory: + +```bash +cd app_python +``` + +2. Create and activate a virtual environment: + +```bash +python -m venv venv +source venv/bin/activate +``` + +3. Install dependencies: + +```bash +pip install -r requirements.txt +``` + +## Running the Application + +### Default Configuration + +The application runs on `0.0.0.0:5000` with debug mode disabled by default: + +```bash +python app.py +``` + +### Custom Configuration with Environment Variables + +You can customize the server behavior using environment variables: + +```bash +# Run on localhost:8080 +PORT=8080 python app.py + +# Run on 127.0.0.1:3000 with debug/reload enabled +HOST=127.0.0.1 PORT=3000 DEBUG=true python app.py +``` + +### Testing the Endpoints + +After starting the application, test the endpoints using curl: + +```bash +curl http://localhost:8080/ +curl http://localhost:8080/health +``` + +## API Endpoints + +### GET `/` + +Returns comprehensive JSON metadata with the following top-level sections: + +- **service** – name, version, description, framework +- **system** – hostname, platform, platform_version, architecture, cpu_count, python_version +- **runtime** – uptime_seconds, uptime_human, current_time, timezone +- **request** – client_ip, user_agent, method, path +- **endpoints** – list of available paths and their purpose + +### GET `/health` + +Returns a compact health status document: + +- **status** – string status ("healthy") +- **timestamp** – current UTC timestamp +- **uptime_seconds** – number of seconds the process has been running + +## Configuration + +The application is configured via environment variables. All variables are optional; if not set, the defaults below are used. + +| Variable | Default | Description | +|----------|-----------|----------------------------------------------| +| `HOST` | `0.0.0.0` | IP address the server binds to | +| `PORT` | `5000` | TCP port the application listens on | +| `DEBUG` | `False` | When `true`, enables debug mode with auto-reload and detailed error messages | + +--- +--- + +# Docker Containerization + +## Prerequisites + +- Docker 25.0.0+ + +## Building the Image Locally + +To build the Docker image: + +```bash +cd app_python +docker build -t [image-name]:[tag] -f Dockerfile . +``` + +**Example:** + +```bash +docker build -t my-fastapi-app:latest -f Dockerfile . +``` + +## Running a Container + +To run application in a container: + +```bash +docker run -d -p [host-port]:[container-port] --name [container-name] [image-name]:[tag] +``` + +**Example:** + +```bash +docker run -d -p 5000:5000 --name myapp my-fastapi-app:latest +``` + +## Environment Variables in Docker + +When running in Docker, you can override these environment variables: + +```bash +docker run -d -p 5000:5000 \ + -e HOST=0.0.0.0 \ + -e PORT=5000 \ + -e DEBUG=False \ + --name myapp \ + my-fastapi-app:1.0.0 +``` + +## Pulling from Docker Hub + +To use the pre-built image from Docker Hub registry: + +```bash +# Pull latest version +docker pull flowelx/fastapi-lab-app:latest + +# Run pulled image +docker run -d -p 5000:5000 flowelx/fastapi-lab-app:latest +``` diff --git a/app_python/app.py b/app_python/app.py new file mode 100644 index 0000000000..9b5cdef281 --- /dev/null +++ b/app_python/app.py @@ -0,0 +1,160 @@ +import os +import socket +import platform +import logging +from datetime import datetime, timezone + +from fastapi import FastAPI, Request +from fastapi.responses import JSONResponse +import uvicorn + +app = FastAPI() + +HOST = os.getenv('HOST', '0.0.0.0') +PORT = int(os.getenv('PORT', '5000')) +DEBUG = os.getenv('DEBUG', 'False').lower() == 'true' + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +APP_START_TIME = datetime.now(timezone.utc) + + +def get_uptime(): + """Calculate application runtime""" + delta = datetime.now(timezone.utc) - APP_START_TIME + seconds = int(delta.total_seconds()) + + hours = seconds // 3600 + minutes = (seconds % 3600) // 60 + + return { + 'seconds': seconds, + 'human': f"{hours} hour{'s' if hours != 1 else ''}, " + f"{minutes} minute{'s' if minutes != 1 else ''}" + } + + +def format_datetime_iso(dt: datetime): + """Format datetime""" + formatted = dt.strftime('%Y-%m-%dT%H:%M:%S') + milliseconds = dt.microsecond // 1000 + + return f"{formatted}.{milliseconds:03d}Z" + + +@app.get('/') +async def get_service_info(request: Request): + """ + Root endpoint returning comprehensive service and system information. + + Returns: + dict: JSON with service, system, runtime, and request information. + """ + logger.info( + f"GET / from {request.client.host if request.client else 'unknown'}" + ) + + service_info = { + 'name': 'devops-info-request', + 'version': '1.0.0', + 'description': 'DevOps course info service', + 'framework': 'FastAPI' + } + + system_info = { + 'hostname': socket.gethostname(), + 'platform': platform.system(), + 'platform_version': platform.version(), + 'architecture': platform.machine(), + 'cpu_count': os.cpu_count() or 0, + 'python_version': platform.python_version() + } + + uptime = get_uptime() + runtime_info = { + 'uptime_seconds': uptime['seconds'], + 'uptime_human': uptime['human'], + 'current_time': format_datetime_iso(datetime.now(timezone.utc)), + 'timezone': 'UTC' + } + + client_ip = request.client.host if request.client else '127.0.0.1' + request_info = { + 'client_ip': client_ip, + 'user_agent': request.headers.get('user-agent', 'uknown'), + 'method': request.method, + 'path': request.url.path + } + + endpoints = [ + {'path': '/', 'method': 'GET', 'description': 'Service information'}, + {'path': '/health', 'method': 'GET', 'description': 'Health check'} + ] + + response = { + 'service': service_info, + 'system': system_info, + 'runtime': runtime_info, + 'request': request_info, + 'endpoints': endpoints + } + + return response + + +@app.get('/health') +async def health_check(request: Request): + """ + Health check endpoint for service monitoring. + + Returns: + dict: Service health status with timestamp and uptime. + """ + logger.info('Health check requested') + + return { + 'status': 'healthy', + 'timestamp': format_datetime_iso(datetime.now(timezone.utc)), + 'uptime_seconds': get_uptime()['seconds'] + } + + +@app.exception_handler(404) +async def not_found_exception_handler(request: Request, exc: Exception): + """Handle 404 errors: page not found""" + + return JSONResponse( + status_code=404, + content={ + 'error': 'Not Found', + 'message': 'Endpoint does not exist' + } + ) + + +@app.exception_handler(500) +async def internal_error_handler(request: Request, exc: Exception): + """Handle 500 errors: internal server errors""" + + return JSONResponse( + status_code=500, + content={ + 'error': 'Internal Server Error', + 'message': 'An unexpected error occurred' + } + ) + + +if __name__ == '__main__': + logger.info(f'Starting server on {HOST}:{PORT}') + + uvicorn.run( + 'app:app', + host=HOST, + port=PORT, + reload=DEBUG + ) diff --git a/app_python/docs/LAB01.md b/app_python/docs/LAB01.md new file mode 100644 index 0000000000..7104b40c3f --- /dev/null +++ b/app_python/docs/LAB01.md @@ -0,0 +1,246 @@ +# Lab 1 — DevOps Info Service: Web Application Development + +## 1. Framework Selection + +**Selected Framework: FastAPI** + +My framework choice was FastAPI. I have previous experience with the framework in academic projects. It provides an automatic API documentation (via Swagger UI) and has excellent asynchronous performance. + +**Comparison table:** + +| **Feature** | **FastAPI** | **Flask** | **Django** | +| ----------------- | -------------- | ---------- | ------------- | +| **Performance** | Async | Sync | Sync | +| **Documentation** | Auto-generated | Manual | Manual | +| **Complexity** | Moderate | Easy | Difficult | +| **Best For** | APIs | Small apps | Full web apps | + +## 2. Best Practices Applied + +#### 1. Clean code organization + +**Clear function names** allow developers to quickly understand a function's purpose. + +```python +def get_uptime(): +async def ger_service_info(request: Request): +async def health_check(): +``` + +**Proper imports grouping** enhances code's readability. Standard library imports come first, followed by third-party imports. + +```python +import os +import socket +import platform +import logging +from datetime import datetime, timezone + +from fastapi import FastAPI, Request +from fastapi.responses import JSONResponse +import uvicorn +``` + +**Comments only where needed** help developers quickly get familiar with code. An abundance of comments decreases readability. + +```python +async def health_check(): +    """ +    Health check endpoint for service monitoring. +    Returns: +        dict: Service health status with timestamp and uptime. +    """ +    +def not_found_exception_handler(request: Request, exc: Exception): +    """Handle 404 errors: page not found""" +``` + +**Follow PEP 8** + +- Proper import grouping +- 4-space indentation +- Line length < 79 characters +- Descriptive variable names + +```python +return { +        'seconds': seconds, +        'human': f"{hours} hour{'s' if hours != 1 else ''}, " +                 f"{minutes} minute{'s' if minutes != 1 else ''}" +    } +``` + +#### 2. Error Handling + +Error handling is cruicial in the creation and maintenance of web applications because developers should cover all possible outcomes so that end users won't face unexpected behavior. + +```python +@app.exception_handler(500) +async def internal_error_handler(request: Request, exc: Exception): +    """Handle 500 errors: internal server errors""" + +    return JSONResponse( +        status_code=500, +        content={ +            'error': 'Internal Server Error', +            'message': 'An unexpected error occurred' +        } +    ) +``` + +#### 3. Logging + +Logging is a significant part of the development process that ensures competent debugging. + +```python +logger.info(f'Starting server on {HOST}:{PORT}') + +logger.info('Health check requested') +``` + +#### 4. Dependencies + +The `requirements.txt` file lists the required versions of packages ensuring consistent environments in develompent, testing, and production. + +``` +fastapi==0.115.0 +uvicorn[standard]==0.32.0 +``` + +#### 5. Git Ignore + +The `.gitignore` file is used to avoid leaking of files with sensitive information and files with large amounts of unnecessary data and cache from Git tracking. + +``` +# Python +__pycache__/ +*.py[cod] +venv/ +*.log + +# IDE +.vscode/ +.idea/ + +# OS +.DS_Store +``` + +## 3. API Documentation + +#### GET / - Service Information + +**Request:** + +```bash +curl http://localhost:5000/ | jq +``` + +**Response:** + +``` +{ + "service": { + "name": "devops-info-request", + "version": "1.0.0", + "description": "DevOps course info service", + "framework": "FastAPI" + }, + "system": { + "hostname": "Alena", + "platform": "Linux", + "platform_version": "#1 SMP Tue Nov 5 00:21:55 UTC 2024", + "architecture": "x86_64", + "cpu_count": 8, + "python_version": "3.12.3" + }, + "runtime": { + "uptime_seconds": 7, + "uptime_human": "0 hours, 0 minutes", + "current_time": "2026-01-28T11:30:27.647Z", + "timezone": "UTC" + }, + "request": { + "client_ip": "127.0.0.1", + "user_agent": "curl/8.5.0", + "method": "GET", + "path": "/" + }, + "endpoints": [ + { + "path": "/", + "method": "GET", + "description": "Service information" + }, + { + "path": "/health", + "method": "GET", + "description": "Health check" + } + ] +} +``` + +#### GET /health - Health Check + +**Request:** + +```bash +curl http://localhost:5000/health | jq +``` + +**Response:** + +``` +{ + "status": "healthy", + "timestamp": "2026-01-28T11:31:27.003Z", + "uptime_seconds": 67 +} +``` + +## 4. Testing Evidence + +#### Screenshots + +**1. Main Endpoint (`GET` /)** + +![Main Endpoint](screenshots/01-main-endpoint.jpg) + +**2. Health Check (`GET` /health)** + +![Health Check](screenshots/02-health-check.jpg) + +#### Terminal Output + +``` +2026-01-28 14:30:19,373 - __main__ - INFO - Starting server on 0.0.0.0:5000 +INFO: Started server process [1126] +INFO: Waiting for application startup. +INFO: Application startup complete. +INFO: Uvicorn running on http://0.0.0.0:5000 (Press CTRL+C to quit) +2026-01-28 14:30:27,646 - app - INFO - GET / from 127.0.0.1 +INFO: 127.0.0.1:37898 - "GET / HTTP/1.1" 200 OK +2026-01-28 14:31:27,003 - app - INFO - Health check requested +INFO: 127.0.0.1:45926 - "GET /health HTTP/1.1" 200 OK + +``` + +## 5. Challenges & Solutions + +**Challenge 1: First Independent API Development** + +Previously I only assisted with creating endpoints. I had never built a complete web service from scratch, so my practical experience with FastAPI was limited. + +**Solution:** Studied FastAPI documentation. + +**Challenge 2: Understanding Application Architecture** + +I was unfamiliar with the relationship between FastAPI and ASGI servers. + +**Solution:** Learned that uvicorn is the ASGI server. FastAPI defines the application logic. Uvicorn serves it to handle HTTP requests. + +--- +## GitHub Community + +Starring repositories signals interest and support, helping projects gain visibility and recognition within the developer community. Following developers provides learning opportunities through their code contributions and fosters networking for potential collaboration. diff --git a/app_python/docs/LAB02.md b/app_python/docs/LAB02.md new file mode 100644 index 0000000000..788937154a --- /dev/null +++ b/app_python/docs/LAB02.md @@ -0,0 +1,229 @@ +# Lab 2 — Docker Containerization + +## 1. Docker Best Practices Applied + +### Non-Root User Implemetation + +Created dedicated user `appuser` instead of running as root. Security - limits damage if container is compromised. + +```dockerfile +RUN groupadd -r appuser && useradd -r -g appuser appuser +USER appuser +``` + +### Layer Caching Optimization + + +Ordered instructions to maximize Docker layer caching. Faster build - unchanged layers are reused from cache. + +```dockerfile +# Copy requirements first (changes less frequently) +COPY requirements.txt +RUN pip install --no-cache-dir -r requirements.txt +# Copy application code last (changes frequently) +COPY app.py . +``` + +### .dockerignore File + +Created `.dockerignore` to exclude unnecessary files. Smaller image size, faster builds, security (exclude secrets). + +``` +.git +__pycache__ +*.pyc +.env +.vscode/ +``` + +### Multi-Stage Build + +Used builder pattern with two stages. Smaller final image - build tools excluded from runtime. + +```dockerfile +FROM python:3.13-slim AS builder +# ... build dependencies +FROM python:3.13-slim +COPY --from=builder /opt/venv /opt/venv +``` + +### Specific Base Image Version + +Used `python:3.13-slim` instead of `python:latest`. Reproducibility - prevents breaking changes from updates. + +### Health Check + +Added HEALTHCHECK instruction. Container orchestration - Kubernetes/Docker can monitor health. + +```dockerfile +HEALTHCHECK --interval=30s --timeout=3s \ + CMD curl -f http://localhost:5000/health || exit 1 +``` + +## 2. Image Information & Decisions + +### Base Image Choice + +**Selected:** `python:3.13-slim` +**Justification:** + +- `slim` variant: 45MB vs 350MB for full Python image +- Python 3.13: Latest stable version +- Security: Regular security updates from official Python Docker team +- Compatibility: Contains essential system libraries for Python packages + +**Alternatives considered:** + +- `python:3.13-alpine`: Even smaller (17MB) but musl libc can cause compatibility issues +- `python:3.13`: Full image (350MB) - unnecessarily large + +### Final Image Size + +Final image size is 263MB. This is acceptable for FastAPI learning project, but there is a potential for optimization. + +### Layer Structure + +**Builder Stage** + +Base python:3.13-slim + +Virtual environment creation + +Requirements copy + +Package installation + +**Runtime Stage** + +python:3.13-slim again + +curl installation + +Non-root user creation + +Copy venv from builder + +App code copy + +Permissions fix + +Configuration/env setup + +### Optimization + +- Multi-stage build +- .dockerignore +- Layer ordering +- Slim base image + +## 3. Build & Run Process + +### Complete Build Output + +```bash +docker build -t fastapi-lab-app:latest -f Dockerfile . +Step 22/24 : ENV DEBUG=False + ---> Running in 33fe08aa5720 + ---> Removed intermediate container 33fe08aa5720 + ---> 5f5d31117ebd +Step 23/24 : HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 CMD curl -f http://localhost:5000/health || exit 1 + ---> Running in 0dbc6a6e4352 + ---> Removed intermediate container 0dbc6a6e4352 + ---> 600a000fa952 +Step 24/24 : CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "5000"] + ---> Running in 270f7a32ec5f + ---> Removed intermediate container 270f7a32ec5f + ---> cc6bae195ed2 +Successfully built cc6bae195ed2 +Successfully tagged fastapi-lab-app:latest +... +Step 22/24 : ENV DEBUG=False + ---> Running in 33fe08aa5720 + ---> Removed intermediate container 33fe08aa5720 + ---> 5f5d31117ebd +Step 23/24 : HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 CMD curl -f http://localhost:5000/health || exit 1 + ---> Running in 0dbc6a6e4352 + ---> Removed intermediate container 0dbc6a6e4352 + ---> 600a000fa952 +Step 24/24 : CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "5000"] + ---> Running in 270f7a32ec5f + ---> Removed intermediate container 270f7a32ec5f + ---> cc6bae195ed2 +Successfully built cc6bae195ed2 +Successfully tagged fastapi-lab-app:latest +``` + +### Container Running Output + +```bash +docker run -d -p 5000:5000 --name test-app fastapi-lab-app:latest +ec54aeed630c497775741259eab75cb6fb757e2d1a2faca3d2102328639cb77a +``` + +### Endpoint Testing + +```bash +curl http://localhost:5000/ +``` + +``` +{"service":{"name":"devops-info-request" +"version":"1.0.0","description":"DevOps course info service","framework":"FastAPI"},"system":{"hostname":"4fc82f359ee1","platform":"Linux","platform_version":"#1 SMP PREEMPT_DYNAMIC Fri, 30 Jan 2026 11:42:40 +0000","architecture":"x86_64","cpu_count":8,"python_version":"3.13.11"},"runtime":{"uptime_seconds":17,"uptime_human":"0 hours, 0 minutes","current_time":"2026-02-04T15:05:57.042Z","timezone":"UTC"},"request":{"client_ip":"172.17.0.1","user_agent":"curl/8.18.0","method":"GET","path":"/"},"endpoints":[{"path":"/","method":"GET","description":"Service information"},{"path":"/health","method":"GET","description":"Health check"}]} +``` + +```bash +curl http://localhost:5000/health +``` + +``` +{"status":"healthy","timestamp":"2026-02-04T15:06:06.033Z","uptime_seconds":26} +``` + +### Docker Hub Repository + +**URL:** https://hub.docker.com/repository/docker/flowelx/fastapi-lab-app/general + +## 4. Technical Analysis + +### Dockerfile Design Logic + +- Multi-stage build +- Layer ordering +- Virtual environment + +### Layer Order + +**Current order (optimized):** + +1. System packages (rare changes) +2. Python dependencies (occasional changes) +3. App code (frequent changes) + +If reversed: App code changes would invalidate all subsequent layers, causing full rebuilds. + +### Security Measures + +1. Non-root user (appuser) +2. Minimal base image +3. No secrets in image +4. Health checks + +### .dockerignore Benefits + +- Faster builds +- Smaller images +- Security + +## 5. Challenges & Solutions + +### Challenge: Image Size Optimization + +**Optimization Steps:** + +1. Changed from `python:3.13` to `python:3.13-slim` +2. Added multi-stage build +3. Added `.dockerignore` +4. Used `--no-cache-dir` in pip + +**What I learned:** +Docker layer caching is powerful. Proper ordering can save minutes per build. Non-root user is basic but essential. Smaller images = faster deployment. diff --git a/app_python/docs/LAB03.md b/app_python/docs/LAB03.md new file mode 100644 index 0000000000..c31a958187 --- /dev/null +++ b/app_python/docs/LAB03.md @@ -0,0 +1,184 @@ +# Lab 3 — Continuous Integration (CI/CD) + +## 1. Unit Testing + +### Framework chosen + +I chose `pytest` because of using plain `assert` statement instead of complex assertion methods. The framework has clear output with `-v` flag showing exactly what passed/failed. `pytest` is well-documented with many tutorials and examples. + +### Test Structure + +**Test Coverage:** + +1. `test_root_endpoint()` - Tests `GET /` endpoint + +2. `test_health_endpoint()` - Tests `GET /health` endpoint + +3. `test_404_error` - Tests error handling + +Each test is independent. Tests use FastAPI's `TestClient` (no live server needed). + +### How to Run Tests Locally + +```bash +cd app_python +pip install -r requirements.txt +pytest tests/test_app.py -v +``` + +### Terminal Output Showing All Tests Passing + +```bash +=================================================================== test session starts ==================================================================== +platform linux -- Python 3.14.2, pytest-8.0.0, pluggy-1.6.0 +rootdir: /home/flowelx/DevOps-Core-Course/app_python +plugins: anyio-4.12.1 +collected 3 items + +tests/test_app.py ... [100%] + +===================================================================== warnings summary ===================================================================== +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +venv/lib/python3.14/site-packages/starlette/_utils.py:40 +tests/test_app.py::test_404_error + /home/flowelx/DevOps-Core-Course/app_python/venv/lib/python3.14/site-packages/starlette/_utils.py:40: DeprecationWarning: 'asyncio.iscoroutinefunction' is deprecated and slated for removal in Python 3.16; use inspect.iscoroutinefunction() instead + return asyncio.iscoroutinefunction(obj) or (callable(obj) and asyncio.iscoroutinefunction(obj.__call__)) + +venv/lib/python3.14/site-packages/fastapi/routing.py:233 +venv/lib/python3.14/site-packages/fastapi/routing.py:233 + /home/flowelx/DevOps-Core-Course/app_python/venv/lib/python3.14/site-packages/fastapi/routing.py:233: DeprecationWarning: 'asyncio.iscoroutinefunction' is deprecated and slated for removal in Python 3.16; use inspect.iscoroutinefunction() instead + is_coroutine = asyncio.iscoroutinefunction(dependant.call) + +-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html +============================================================== 3 passed, 11 warnings in 0.27s ============================================================== +``` + +## 2. GitHub Actions CI Workflow + +### Workflow Trigger Strategy + +**Configuration:** + +```yaml +on: + push: + branches: [ main, lab03 ] + pull_request: + branches: [ main ] +``` + +CI runs on feature branch and main. It saves GitHub Actions minutes, focused on importnant branches. +Docker build only runs on push to `main`. This prevents unnecessary Docker builds for every commit. + +### Marketplace Actions Chosen + +1. `actions/checkout@v4` - Official GitHub action, reliable, well-maintained + +2. `actions/setup-python@v5` - Handles multiple Python versions, caching built-in + +3. `docker/login-action@v3` - Secure token-based login, handles credentials properly + +4. `docker/build-push-action@v5` - Single action for both operations, supports caching + +### Docker Tagging Strategy + +**Strategy:** Calendar Versioning + +**Format:** `YYYY.NN.DD` + +It is convinient for frequent updates. There is no need to track breaking changes. + +### Successful Workflow Run + +**Link to Workflow Run:** https://github.com/flowelx/DevOps-Core-Course/actions/runs/21786077651/job/62857660802 + +**Screenshot of Green Checkmark:** + +![successfull ci](screenshots/successful-ci.jpg) + +## CI Best Practices & Security + +### Status Badge in README + +![status badge](screenshots/status-badge.jpg) + +### Caching Implementation + +**Python Package Caching:** + +```yaml +- uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: 'app_python/requirements.txt' +``` + +### CI Best Practices Applied + +1. Path-based Triggers + +```yaml +paths: + - 'app_python/**' + - '.github/workflows/python-ci.yml' +``` + +2. **Job Dependencies** + +```yaml +build: + needs: test +``` + +3. **Conditional Execution** + +```yaml +if: github.event_name == 'push' && github.ref == 'refs/heads/main' +``` + +4. **Security Scanning** + +```yaml +- name: Security scan with pip-audit + run: | + cd app_python + pip install pip-audit + pip-audit -r requirements.txt || echo "Security scan completed" +``` + +5. **Linting** + +```yaml +- name: Run linter + run: | + cd app_python + flake8 app.py +``` + +6. **Test Reporting** + +```yaml +pytest tests/test_app.py +``` + +### Security Scanning Results + +**Tool Used:** `pip-audit` + +I couldn't use Snyk because the site did not open with or without vpn. So I applied `pip-audit`. + +**Scan Results:** + +``` +Found 2 known vulnerabilities in 1 package +Name Version ID Fix Versions +--------- ------- -------------- ------------ +starlette 0.38.6 CVE-2024-47874 0.40.0 +starlette 0.38.6 CVE-2025-54121 0.47.2 +``` diff --git a/app_python/docs/LAB04.md b/app_python/docs/LAB04.md new file mode 100644 index 0000000000..afc7dfec57 --- /dev/null +++ b/app_python/docs/LAB04.md @@ -0,0 +1,102 @@ +# Lab 4 — Infrastructure as Code (Terraform & Pulumi) + +## Task 1 — Terraform VM Creation + +### Cloud Provider Selection + +I chose **Yandex Cloud** for this lab because it's accessible in Russia without VPN issues and offers a generous free tier. + +### Terraform Version Used + +```bash +terraform version +Terraform v1.14.3 +on linux_amd64 +``` + +### Resources Created + +| Resource | Configuration | +|----------|--------------| +| **VM Name** | `terraform-vm` | +| **Zone** | `ru-central1-b` | +| **CPU** | 2 cores | +| **RAM** | 4 GB | +| **Boot Disk** | 20 GB | +| **OS Image** | Ubuntu | +| **Network** | Existing VPC (`default`) | +| **Public IP** | Enabled | + +### SSH Connection Command + +```bash +ssh ubunutu@IP +``` + +### Terminal output + +```bash +terraform plan +Plan: 2 to add, 0 to change, 0 to destroy. +``` + +```bash +terraform apply --auto-approve +yandex_vpc_security_group.vm-sg: Creation complete +yandex_compute_instance.vm: Creation complete + +Outputs: +external_ip = "158.160.16.68" +ssh_command = "ssh ubuntu@158.160.16.68" +``` + +### Proof of SSH Access to VM + +![ssh-access](screenshots/ssh_access.jpg) + +## Task 2 — Pulumi VM Creation + +### Cleanup Terraform + +```bash +terraform destroy -auto-approve +Destroy complete! Resources: 2 destroyed. +``` + +### Pulumi Setup + +- Language: Python +- Version: v3.130.0 + +### Execution Logs + +**pulumi preview:** + +```bash +pulumi preview ++ yandex:vpc:securityGroup vm-sg create ++ yandex:compute:instance pulumi-vm create +``` + +**pulumi up:** + +```bash +pulumi up --yes +Outputs: +external_ip : "158.160.16.69" +ssh_command : "ssh ubuntu@158.160.16.69" +``` + +## Task 3 — Comparison + +### Terraform vs Pulumi + +|Aspect | Terraform | Pulumi (Python)| +|---|---|---| +|**Ease of Learning** | Simpler HCL syntax | Requires Python knowledge| +|**Readability** | Clear, declarative | Mixed with Python code| +|**Flexibility** | Limited | Full Python| +|**Debugging** | Clear error messages | Python stack traces| +|**State** | Local tfstate file | Pulumi Cloud| + +**My Preference:** Terraform diff --git a/app_python/docs/screenshots/01-main-endpoint.jpg b/app_python/docs/screenshots/01-main-endpoint.jpg new file mode 100644 index 0000000000..22d858dec3 Binary files /dev/null and b/app_python/docs/screenshots/01-main-endpoint.jpg differ diff --git a/app_python/docs/screenshots/02-health-check.jpg b/app_python/docs/screenshots/02-health-check.jpg new file mode 100644 index 0000000000..4d2e089070 Binary files /dev/null and b/app_python/docs/screenshots/02-health-check.jpg differ diff --git a/app_python/docs/screenshots/03-formatted-output.jpg b/app_python/docs/screenshots/03-formatted-output.jpg new file mode 100644 index 0000000000..c686f487dd Binary files /dev/null and b/app_python/docs/screenshots/03-formatted-output.jpg differ diff --git a/app_python/docs/screenshots/ssh_access.jpg b/app_python/docs/screenshots/ssh_access.jpg new file mode 100644 index 0000000000..3a23d68d8a Binary files /dev/null and b/app_python/docs/screenshots/ssh_access.jpg differ diff --git a/app_python/docs/screenshots/status-badge.jpg b/app_python/docs/screenshots/status-badge.jpg new file mode 100644 index 0000000000..526a6e96ef Binary files /dev/null and b/app_python/docs/screenshots/status-badge.jpg differ diff --git a/app_python/docs/screenshots/successful-ci.jpg b/app_python/docs/screenshots/successful-ci.jpg new file mode 100644 index 0000000000..5320ec759e Binary files /dev/null and b/app_python/docs/screenshots/successful-ci.jpg differ diff --git a/app_python/requirements.txt b/app_python/requirements.txt new file mode 100644 index 0000000000..00365486c9 --- /dev/null +++ b/app_python/requirements.txt @@ -0,0 +1,5 @@ +fastapi==0.115.0 +uvicorn[standard]==0.32.0 +pytest==8.0.0 +httpx +flake8 \ No newline at end of file diff --git a/app_python/tests/__init__.py b/app_python/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/app_python/tests/test_app.py b/app_python/tests/test_app.py new file mode 100644 index 0000000000..0583f71165 --- /dev/null +++ b/app_python/tests/test_app.py @@ -0,0 +1,22 @@ +from fastapi.testclient import TestClient +from app import app + +client = TestClient(app) + +def test_root_endpoint(): + response = client.get("/") + assert response.status_code == 200 + + data = response.json() + assert "service" in data + assert data["service"]["name"] == "devops-info-request" + +def test_health_endpoint(): + response = client.get("/health") + assert response.status_code == 200 + data = response.json() + assert data["status"] == "healthy" + +def test_404_error(): + response = client.get("/not-exists") + assert response.status_code == 404 \ No newline at end of file diff --git a/cloud-pulumi/.gitignore b/cloud-pulumi/.gitignore new file mode 100644 index 0000000000..c47c5386a3 --- /dev/null +++ b/cloud-pulumi/.gitignore @@ -0,0 +1,7 @@ +Pulumi.*.yaml +!Pulumi.dev.yaml # если хотите хранить dev конфиг +!Pulumi.yaml +credentials.json +venv/ +__pycache__/ +*.pyc diff --git a/cloud-pulumi/Pulumi.yaml b/cloud-pulumi/Pulumi.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/cloud-pulumi/__main__.py b/cloud-pulumi/__main__.py new file mode 100644 index 0000000000..4bbe15ad76 --- /dev/null +++ b/cloud-pulumi/__main__.py @@ -0,0 +1,131 @@ +import pulumi +from pulumi_yandex import compute, vpc +import requests + +config = pulumi.Config() + +folder_id = config.require("folder_id") +cloud_id = config.require("cloud_id") + +zone = config.get("zone", "ru-central1-b") +vm_name = config.get("vm_name", "pulumi-vm") +subnet_id = config.get("subnet_id") +network_name = config.get("network_name", "default") +image_family = config.get("image_family", "ubuntu-2204-lts") +ssh_public_key = config.require("ssh_public_key") +cpu_cores = config.get_int("cpu_cores", 2) +memory_gb = config.get_int("memory_gb", 4) +disk_size_gb = config.get_int("disk_size_gb", 20) + +try: + my_ip = requests.get('https://api.ipify.org').text.strip() + pulumi.export('my_ip', my_ip) +except: + my_ip = "0.0.0.0" # fallback + pulumi.log.warn("Не удалось определить ваш IP, используется 0.0.0.0") + +image = compute.get_image( + family=image_family, + folder_id="standard-images" +) + +network = vpc.get_network(name=network_name) + +security_group = vpc.SecurityGroup( + "vm-sg", + name="pulumi-vm-security-group", + description="Правила firewall для ВМ", + network_id=network.id, + ingress=[ + vpc.SecurityGroupIngressArgs( + protocol="TCP", + description="SSH", + port=22, + v4_cidr_blocks=[f"{my_ip}/32"], + ), + vpc.SecurityGroupIngressArgs( + protocol="TCP", + description="HTTP", + port=80, + v4_cidr_blocks=["0.0.0.0/0"], + ), + vpc.SecurityGroupIngressArgs( + protocol="TCP", + description="App Port 5000", + port=5000, + v4_cidr_blocks=["0.0.0.0/0"], + ), + ], + egress=[ + vpc.SecurityGroupEgressArgs( + protocol="ANY", + description="Outbound traffic", + v4_cidr_blocks=["0.0.0.0/0"], + from_port=0, + to_port=65535, + ) + ], +) + +if not subnet_id: + subnet = vpc.Subnet( + "vm-subnet", + name="pulumi-vm-subnet", + zone=zone, + network_id=network.id, + v4_cidr_blocks=["192.168.10.0/24"], + ) + subnet_id = subnet.id + +vm = compute.Instance( + vm_name, + name=vm_name, + zone=zone, + platform_id="standard-v3", + resources=compute.InstanceResourcesArgs( + cores=cpu_cores, + memory=memory_gb, + core_fraction=20, + ), + boot_disk=compute.InstanceBootDiskArgs( + initialize_params=compute.InstanceBootDiskInitializeParamsArgs( + image_id=image.id, + size=disk_size_gb, + type="network-hdd", + ), + ), + network_interfaces=[ + compute.InstanceNetworkInterfaceArgs( + subnet_id=subnet_id, + nat=True, + security_group_ids=[security_group.id], + ) + ], + metadata={ + "user-data": f"""#cloud-config +users: + - name: ubuntu + groups: sudo + shell: /bin/bash + sudo: ['ALL=(ALL) NOPASSWD:ALL'] + ssh-authorized-keys: + - {ssh_public_key} +""", + }, + labels={ + "environment": config.get("environment", "development"), + "managed_by": "pulumi", + "project": config.get("project_name", "pulumi-demo"), + }, +) + +pulumi.export("vm_id", vm.id) +pulumi.export("vm_name", vm.name) +pulumi.export("external_ip", vm.network_interfaces[0].nat_ip_address) +pulumi.export("internal_ip", vm.network_interfaces[0].ip_address) +pulumi.export("ssh_command", vm.network_interfaces[0].nat_ip_address.apply( + lambda ip: f"ssh ubuntu@{ip}" +)) +pulumi.export("security_group_id", security_group.id) +pulumi.export("zone", zone) +pulumi.export("image_id", image.id) diff --git a/cloud-pulumi/requirements.txt b/cloud-pulumi/requirements.txt new file mode 100644 index 0000000000..27334e011d --- /dev/null +++ b/cloud-pulumi/requirements.txt @@ -0,0 +1,3 @@ +pulumi>=3.0.0 +pulumi-yandex>=0.5.0 +requests>=2.28.0 diff --git a/cloud-terraform/.gitignore b/cloud-terraform/.gitignore new file mode 100644 index 0000000000..d0d71c7336 --- /dev/null +++ b/cloud-terraform/.gitignore @@ -0,0 +1,35 @@ +# .gitignore +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* +.terraform/ +terrafrom.tfvars + +# Crash log files +crash.log +crash.*.log + +# Exclude all .tfvars files, which are likely to contain sensitive data +*.tfvars +*.tfvars.json + +# Ignore override files +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include example tfvars files +!terraform.tfvars.example + +# Ignore CLI configuration files +.terraformrc +terraform.rc + +# Ignore secret files +*.pem +*.key +*.priv diff --git a/cloud-terraform/.terraform.lock.hcl b/cloud-terraform/.terraform.lock.hcl new file mode 100644 index 0000000000..89ff6034cb --- /dev/null +++ b/cloud-terraform/.terraform.lock.hcl @@ -0,0 +1,16 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/http" { + version = "3.5.0" + hashes = [ + "h1:8bUoPwS4hahOvzCBj6b04ObLVFXCEmEN8T/5eOHmWOM=", + ] +} + +provider "registry.terraform.io/yandex-cloud/yandex" { + version = "0.187.0" + hashes = [ + "h1:wHAYDfBUlXMx1CmVwNWCr/SA7+CWO8aNC914WXUXNRQ=", + ] +} diff --git a/cloud-terraform/main.tf b/cloud-terraform/main.tf new file mode 100644 index 0000000000..298e55d878 --- /dev/null +++ b/cloud-terraform/main.tf @@ -0,0 +1,82 @@ +terraform { + required_providers { + yandex = { + source = "yandex-cloud/yandex" + } + } + required_version = ">= 0.13" +} + +provider "yandex" { + zone = var.zone + folder_id = var.folder_id +} + +data "http" "myip" { + url = "https://api.ipify.org" +} + +data "yandex_vpc_network" "existing-network" { + name = var.network_name +} + +resource "yandex_vpc_security_group" "vm-sg" { + name = var.sg_name + network_id = var.network_id + + ingress { + protocol = "TCP" + description = "SSH" + port = 22 + v4_cidr_blocks = ["${chomp(data.http.myip.response_body)}/32"] + } + + ingress { + protocol = "TCP" + description = "HTTP" + port = 80 + v4_cidr_blocks = ["0.0.0.0/0"] + } + + ingress { + protocol = "TCP" + description = "App Port 5000" + port = 5000 + v4_cidr_blocks = ["0.0.0.0/0"] + } + + egress { + protocol = "ANY" + description = "Outgoing traffic" + v4_cidr_blocks = ["0.0.0.0/0"] + from_port = 0 + to_port = 65535 + } +} + +resource "yandex_compute_instance" "vm" { + name = var.vm_name + + resources { + cores = 2 + memory = 4 + } + + boot_disk { + initialize_params { + image_id = var.image_id + } + } + + network_interface { + subnet_id = var.subnet_id + nat = true + security_group_ids = [yandex_vpc_security_group.vm-sg.id] + } + + metadata = { + ssh-keys = "ubuntu:${var.ssh_public_key}" + } +} + + diff --git a/cloud-terraform/outputs.tf b/cloud-terraform/outputs.tf new file mode 100644 index 0000000000..64ab05383f --- /dev/null +++ b/cloud-terraform/outputs.tf @@ -0,0 +1,4 @@ +output "vm_ip_address" { + description = "Public IP address VM" + value = yandex_compute_instance.vm.network_interface[0].nat_ip_address +} diff --git a/cloud-terraform/variables.tf b/cloud-terraform/variables.tf new file mode 100644 index 0000000000..e572809ed3 --- /dev/null +++ b/cloud-terraform/variables.tf @@ -0,0 +1,41 @@ +variable "zone" { + type = string + default = "ru-central1-b" +} + +variable "folder_id" { + type = string +} + +variable "vm_name" { + type = string + default = "terraform" +} + +variable "subnet_id" { + type = string +} + +variable "image_id" { + type = string + default = "fd804teg9bthv0h96s8v" +} + +variable "ssh_public_key" { + type = string + sensitive = true +} + +variable "network_name" { + type = string + default = "default" +} + +variable "sg_name" { + type = string + default = "terraform-vm-security-group" +} + +variable "network_id" { + type = string +}