diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..457603f
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,59 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+# CI/CD
+.github
+
+# Documentation
+*.md
+docs/
+
+# Python
+__pycache__
+*.py[cod]
+*$py.class
+*.so
+.Python
+*.egg-info/
+dist/
+build/
+*.egg
+
+# Virtual environments
+venv/
+env/
+ENV/
+.venv
+
+# Testing
+.pytest_cache/
+.coverage
+htmlcov/
+.tox/
+tests/
+
+# IDE
+.vscode/
+.idea/
+*.swp
+*.swo
+
+# Terraform
+terraform/
+*.tfstate
+*.tfvars
+
+# OS
+.DS_Store
+Thumbs.db
+
+# Logs
+*.log
+logs/
+
+# Temporary files
+*.tmp
+*.bak
+tmp/
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..46e18db
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,26 @@
+[flake8]
+max-line-length = 127
+exclude =
+ .git,
+ __pycache__,
+ .venv,
+ venv,
+ env,
+ build,
+ dist,
+ *.egg-info,
+ .pytest_cache,
+ .tox,
+ terraform
+
+# Error codes to ignore:
+# E203 - whitespace before ':'
+# E501 - line too long (handled by black)
+# W503 - line break before binary operator
+ignore = E203,E501,W503
+
+# Complexity
+max-complexity = 10
+
+# Enable specific checks
+select = E,W,F,C
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
new file mode 100644
index 0000000..61539aa
--- /dev/null
+++ b/.github/workflows/ci-cd.yml
@@ -0,0 +1,128 @@
+name: CI/CD
+
+on:
+ push:
+ branches: [ main, develop ]
+ pull_request:
+ branches: [ main ]
+
+jobs:
+ lint:
+ name: Code Linting
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install flake8 pylint black isort
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+
+ - name: Run flake8
+ run: |
+ flake8 scripts/ --count --select=E9,F63,F7,F82 --show-source --statistics
+ flake8 scripts/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+
+ - name: Check code formatting with black
+ run: black --check scripts/
+
+ - name: Check import sorting with isort
+ run: isort --check-only scripts/
+
+ test:
+ name: Run Tests
+ runs-on: ubuntu-latest
+ needs: lint
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install pytest pytest-cov
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+
+ - name: Run tests with coverage
+ run: |
+ pytest tests/ --cov=scripts --cov-report=xml --cov-report=term
+
+ - name: Upload coverage reports
+ uses: codecov/codecov-action@v4
+ with:
+ file: ./coverage.xml
+ flags: unittests
+ name: codecov-umbrella
+ fail_ci_if_error: false
+
+ docker:
+ name: Build Docker Image
+ runs-on: ubuntu-latest
+ needs: test
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Build Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: false
+ tags: portfolio-app:${{ github.sha }}
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+
+ - name: Test Docker image
+ run: |
+ docker build -t portfolio-app:test .
+ docker run --rm portfolio-app:test --help || true
+
+ deploy:
+ name: Deploy
+ runs-on: ubuntu-latest
+ needs: [lint, test, docker]
+ if: github.ref == 'refs/heads/main' && github.event_name == 'push'
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: us-east-1
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v2
+ continue-on-error: true
+
+ - name: Build and push Docker image to ECR
+ if: steps.login-ecr.outcome == 'success'
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ tags: |
+ ${{ steps.login-ecr.outputs.registry }}/portfolio-app:latest
+ ${{ steps.login-ecr.outputs.registry }}/portfolio-app:${{ github.sha }}
+
+ - name: Deployment notification
+ run: echo "Deployment completed successfully"
diff --git a/.github/workflows/sast.yml b/.github/workflows/sast.yml
new file mode 100644
index 0000000..0d6c52a
--- /dev/null
+++ b/.github/workflows/sast.yml
@@ -0,0 +1,98 @@
+name: SAST Security Scan
+
+on:
+ push:
+ branches: [ main, develop ]
+ pull_request:
+ branches: [ main, develop ]
+ schedule:
+ # Run daily at 2 AM UTC
+ - cron: '0 2 * * *'
+ workflow_dispatch:
+
+permissions:
+ contents: read
+ security-events: write
+ actions: read
+ pull-requests: write
+
+jobs:
+ security-scan:
+ name: SDLC Code Scanner Security Scan
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Run SDLC Code Scanner
+ id: security-scan
+
+ uses: williambrady/portfolio-code-scanner@v1.1.1
+ with:
+ scan-path: '.'
+ output-formats: 'json,html,markdown,sarif'
+ fail-on-severity: 'HIGH'
+ verbose: 'false'
+
+ - name: Upload SARIF to GitHub Code Scanning
+ uses: github/codeql-action/upload-sarif@v3
+ if: always() && steps.security-scan.outputs.sarif-path != ''
+ with:
+ sarif_file: ${{ steps.security-scan.outputs.sarif-path }}
+ category: 'sdlc-code-scanner'
+
+ - name: Upload Scan Reports
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: security-scan-reports
+ path: .sdlc-code-scanner-reports/
+ retention-days: 30
+
+ - name: Post Scan Summary
+ if: always()
+ run: |
+ echo "### Security Scan Results" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
+ echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
+ echo "| Critical | ${{ steps.security-scan.outputs.critical-count }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| High | ${{ steps.security-scan.outputs.high-count }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| Medium | ${{ steps.security-scan.outputs.medium-count }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| Low | ${{ steps.security-scan.outputs.low-count }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| **Total** | **${{ steps.security-scan.outputs.findings-count }}** |" >> $GITHUB_STEP_SUMMARY
+
+ - name: Comment PR with Results
+ if: github.event_name == 'pull_request' && always()
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const findings = '${{ steps.security-scan.outputs.findings-count }}';
+ const critical = '${{ steps.security-scan.outputs.critical-count }}';
+ const high = '${{ steps.security-scan.outputs.high-count }}';
+ const medium = '${{ steps.security-scan.outputs.medium-count }}';
+ const low = '${{ steps.security-scan.outputs.low-count }}';
+ const status = '${{ steps.security-scan.outputs.scan-status }}';
+
+ const statusEmoji = status === 'passed' ? ':white_check_mark:' : ':x:';
+
+ const body = `## SDLC Code Scanner Security Scan ${statusEmoji}
+
+ | Severity | Count |
+ |----------|-------|
+ | Critical | ${critical} |
+ | High | ${high} |
+ | Medium | ${medium} |
+ | Low | ${low} |
+ | **Total** | **${findings}** |
+
+ *View the full report in the [Actions artifacts](${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID})*
+ `;
+
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: body
+ });
diff --git a/.github/workflows/terraform.yml b/.github/workflows/terraform.yml
new file mode 100644
index 0000000..f8b684e
--- /dev/null
+++ b/.github/workflows/terraform.yml
@@ -0,0 +1,154 @@
+name: Terraform
+
+on:
+ push:
+ branches: [ main, develop ]
+ paths:
+ - 'terraform/**'
+ - '.github/workflows/terraform.yml'
+ pull_request:
+ branches: [ main ]
+ paths:
+ - 'terraform/**'
+ - '.github/workflows/terraform.yml'
+
+env:
+ TF_VERSION: 1.6.0
+ AWS_REGION: us-east-1
+
+jobs:
+ terraform-validate:
+ name: Terraform Validate
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: terraform
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup Terraform
+ uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: ${{ env.TF_VERSION }}
+
+ - name: Terraform Format Check
+ id: fmt
+ run: terraform fmt -check -recursive
+ continue-on-error: true
+
+ - name: Terraform Init
+ id: init
+ run: terraform init -backend=false
+
+ - name: Terraform Validate
+ id: validate
+ run: terraform validate -no-color
+
+ - name: Comment PR
+ uses: actions/github-script@v7
+ if: github.event_name == 'pull_request'
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const output = `#### Terraform Format and Style 🖌\`${{ steps.fmt.outcome }}\`
+ #### Terraform Initialization ⚙️\`${{ steps.init.outcome }}\`
+ #### Terraform Validation 🤖\`${{ steps.validate.outcome }}\`
+
+ *Pusher: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`;
+
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: output
+ })
+
+ terraform-plan:
+ name: Terraform Plan
+ runs-on: ubuntu-latest
+ needs: terraform-validate
+ if: github.event_name == 'pull_request'
+ defaults:
+ run:
+ working-directory: terraform
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Setup Terraform
+ uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: ${{ env.TF_VERSION }}
+
+ - name: Terraform Init
+ run: terraform init
+
+ - name: Terraform Plan
+ id: plan
+ run: terraform plan -no-color -input=false
+ continue-on-error: true
+
+ - name: Comment PR with Plan
+ uses: actions/github-script@v7
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const output = `#### Terraform Plan 📖\`${{ steps.plan.outcome }}\`
+
+ Show Plan
+
+ \`\`\`terraform
+ ${{ steps.plan.outputs.stdout }}
+ \`\`\`
+
+
+
+ *Pusher: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`;
+
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: output
+ })
+
+ terraform-apply:
+ name: Terraform Apply
+ runs-on: ubuntu-latest
+ needs: terraform-validate
+ if: github.ref == 'refs/heads/main' && github.event_name == 'push'
+ defaults:
+ run:
+ working-directory: terraform
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Setup Terraform
+ uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: ${{ env.TF_VERSION }}
+
+ - name: Terraform Init
+ run: terraform init
+
+ - name: Terraform Apply
+ run: terraform apply -auto-approve -input=false
+
+ - name: Apply notification
+ run: echo "Terraform apply completed successfully"
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..d7383dd
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,115 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+*.manifest
+*.spec
+pip-log.txt
+pip-delete-this-directory.txt
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.log
+.pytest_cache/
+.mypy_cache/
+.dmypy.json
+dmypy.json
+.pyre/
+.pytype/
+instance/
+.webassets-cache
+.scrapy
+docs/_build/
+.ipynb_checkpoints
+profile_default/
+ipython_config.py
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+.spyderproject
+.spyproject
+.ropeproject
+
+# Terraform
+**/.terraform/*
+*.tfstate
+*.tfstate.*
+crash.log
+crash.*.log
+*.tfvars
+*.tfvars.json
+override.tf
+override.tf.json
+*_override.tf
+*_override.tf.json
+.terraformrc
+terraform.rc
+
+# Docker
+*.log
+
+# IDEs
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+.DS_Store
+
+# OS
+Thumbs.db
+.Spotlight-V100
+.Trashes
+
+# Application specific
+*.db
+*.sqlite
+*.sqlite3
+
+# AWS
+.aws/
+
+# Secrets
+secrets.yml
+secrets.yaml
+*.pem
+*.key
+credentials.json
+
+# Build artifacts
+*.tar.gz
+*.zip
+
+# Logs
+logs/
+*.log
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..3eda2a9
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,112 @@
+# Pre-commit hooks for linting and code quality
+# Install: pip install pre-commit && pre-commit install
+# Run manually: pre-commit run --all-files
+# Skip hooks: git commit --no-verify (use sparingly)
+
+default_language_version:
+ python: python3
+
+repos:
+ # =============================================================================
+ # General file checks
+ # =============================================================================
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: trailing-whitespace
+ args: [--markdown-linebreak-ext=md]
+ - id: end-of-file-fixer
+ - id: check-yaml
+ args: [--unsafe] # Allow custom tags in CloudFormation
+ - id: check-json
+ - id: check-added-large-files
+ args: [--maxkb=1000]
+ - id: check-merge-conflict
+ - id: detect-private-key
+ - id: check-case-conflict
+ - id: mixed-line-ending
+ args: [--fix=lf]
+
+ # =============================================================================
+ # Python linting and formatting
+ # =============================================================================
+ - repo: https://github.com/psf/black
+ rev: 24.3.0
+ hooks:
+ - id: black
+ args: [--line-length=127]
+ files: ^scripts/|^tests/
+
+ - repo: https://github.com/pycqa/isort
+ rev: 5.13.2
+ hooks:
+ - id: isort
+ args: [--profile=black, --line-length=127]
+ files: ^scripts/|^tests/
+
+ - repo: https://github.com/pycqa/flake8
+ rev: 7.0.0
+ hooks:
+ - id: flake8
+ args: [--max-line-length=127, --max-complexity=10]
+ files: ^scripts/|^tests/
+
+ - repo: https://github.com/PyCQA/bandit
+ rev: 1.7.8
+ hooks:
+ - id: bandit
+ args: [-c, pyproject.toml, -r]
+ files: ^scripts/
+ additional_dependencies: ["bandit[toml]"]
+
+ # =============================================================================
+ # Terraform linting and formatting
+ # =============================================================================
+ - repo: https://github.com/antonbabenko/pre-commit-terraform
+ rev: v1.88.0
+ hooks:
+ - id: terraform_fmt
+ files: ^terraform/
+ - id: terraform_validate
+ files: ^terraform/
+ args:
+ - --hook-config=--retry-once-with-cleanup=true
+ - id: terraform_tflint
+ files: ^terraform/
+ args:
+ - --args=--config=__GIT_WORKING_DIR__/.tflint.hcl
+
+ # =============================================================================
+ # CloudFormation linting
+ # =============================================================================
+ - repo: https://github.com/aws-cloudformation/cfn-lint
+ rev: v0.86.1
+ hooks:
+ - id: cfn-lint
+ files: ^cloudformation/.*\.(yaml|yml|json)$
+
+ # =============================================================================
+ # Secrets detection
+ # =============================================================================
+ - repo: https://github.com/gitleaks/gitleaks
+ rev: v8.18.2
+ hooks:
+ - id: gitleaks
+
+ # =============================================================================
+ # Markdown linting (optional - remove if not needed)
+ # =============================================================================
+ - repo: https://github.com/igorshubovych/markdownlint-cli
+ rev: v0.39.0
+ hooks:
+ - id: markdownlint
+ args: [--fix, --disable, MD013, MD033, MD041, --]
+
+# =============================================================================
+# CI Configuration
+# =============================================================================
+ci:
+ autofix_commit_msg: "style: auto-fix by pre-commit hooks"
+ autofix_prs: true
+ autoupdate_commit_msg: "chore: update pre-commit hooks"
+ autoupdate_schedule: monthly
diff --git a/.tflint.hcl b/.tflint.hcl
new file mode 100644
index 0000000..94b2711
--- /dev/null
+++ b/.tflint.hcl
@@ -0,0 +1,48 @@
+# TFLint configuration
+# https://github.com/terraform-linters/tflint
+
+config {
+ # Call module type: "all" checks both local and remote modules
+ call_module_type = "local"
+}
+
+# AWS plugin for AWS-specific rules
+plugin "aws" {
+ enabled = true
+ version = "0.30.0"
+ source = "github.com/terraform-linters/tflint-ruleset-aws"
+}
+
+# Terraform plugin for general Terraform rules
+plugin "terraform" {
+ enabled = true
+ preset = "recommended"
+}
+
+# =============================================================================
+# Rule configurations
+# =============================================================================
+
+# Naming conventions
+rule "terraform_naming_convention" {
+ enabled = true
+}
+
+# Require descriptions for variables and outputs
+rule "terraform_documented_variables" {
+ enabled = true
+}
+
+rule "terraform_documented_outputs" {
+ enabled = true
+}
+
+# Standard module structure
+rule "terraform_standard_module_structure" {
+ enabled = true
+}
+
+# Workspace naming
+rule "terraform_workspace_remote" {
+ enabled = true
+}
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 0000000..98e3926
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,241 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+This is a GitHub template repository for portfolio projects following SDLC (Software Development Life Cycle) best practices. The repository is currently in its initial setup phase.
+
+## Architecture
+
+The codebase structure and architecture will be defined as the project develops. This section should be updated to reflect:
+
+- Primary technology stack and frameworks
+ - Terraform
+ - CloudFormation
+ - Python
+ - Docker
+- Project organization and module structure
+ - / - Entry point for Docker
+ - /terraform - Terraform configuration files
+ - /cloudformation - CloudFormation templates
+ - /scripts - Python application code
+- Key architectural patterns and design decisions
+ - Infrastructure as Code (IaC) with Terraform and/or CloudFormation
+ - Python application code for data processing and analysis
+ - Docker for containerization and portable execution
+ - CI/CD pipelines for automated testing and deployment
+ - Monitoring and logging for observability
+- Data flow and component interactions
+ - Data is processed and analyzed by the Python application
+ - Results are stored in a persistent storage solution
+ - Github Actions Logging
+ - AWS Cloudwatch Log Group
+ - AWS S3 Bucket
+ - Monitoring and logging are implemented for observability
+
+## Development Commands
+
+### Docker Commands
+
+```bash
+# Build the Docker image
+docker build -t portfolio-app .
+
+# Run the container locally
+docker run --rm portfolio-app
+
+# Run with mounted volumes for development
+docker run --rm -v $(pwd)/scripts:/app/scripts portfolio-app
+```
+
+### Python Development
+
+```bash
+# Install dependencies
+pip install -r requirements.txt
+
+# Run the application locally
+python scripts/main.py
+
+# Run tests
+pytest tests/
+
+# Run a single test
+pytest tests/test_name.py
+
+# Linting
+flake8 scripts/
+pylint scripts/
+
+# Formatting
+black scripts/
+isort scripts/
+```
+
+### Terraform Commands
+
+```bash
+# Initialize Terraform
+cd terraform && terraform init
+
+# Plan infrastructure changes
+terraform plan
+
+# Apply infrastructure changes
+terraform apply
+
+# Destroy infrastructure
+terraform destroy
+
+# Format Terraform files
+terraform fmt -recursive
+
+# Validate configuration
+terraform validate
+```
+
+### CloudFormation Commands
+
+```bash
+# Validate template
+aws cloudformation validate-template \
+ --template-body file://cloudformation/example-stack.yaml
+
+# Deploy stack
+aws cloudformation deploy \
+ --template-file cloudformation/example-stack.yaml \
+ --stack-name my-app-dev \
+ --parameter-overrides Environment=dev
+
+# Delete stack
+aws cloudformation delete-stack --stack-name my-app-dev
+
+# Lint templates (via cfn-lint)
+cfn-lint cloudformation/*.yaml
+```
+
+### Pre-commit Hooks
+
+```bash
+# Install hooks (run once after cloning)
+pre-commit install
+
+# Run all hooks manually
+pre-commit run --all-files
+
+# Run specific hook
+pre-commit run black --all-files
+pre-commit run terraform_fmt --all-files
+pre-commit run cfn-lint --all-files
+
+# Update hook versions
+pre-commit autoupdate
+
+# Skip hooks for a commit (use sparingly)
+git commit --no-verify -m "message"
+```
+
+### Security Scanning
+
+Security scanning is handled automatically via GitHub Actions using the [SDLC Code Scanner](https://github.com/williambrady/portfolio-code-scanner) action. The scan runs on:
+
+- Every push to `main` or `develop` branches
+- Every pull request to `main`
+- Daily at 2 AM UTC (scheduled)
+- Manual trigger via workflow_dispatch
+
+For local scanning during development, you can use the SDLC Code Scanner Docker image directly:
+
+```bash
+# Pull the scanner image
+docker pull ghcr.io/williambrady/portfolio-code-scanner:latest
+
+# Run a local scan
+docker run --rm \
+ -v $(pwd):/repo:ro \
+ -v $(pwd)/reports:/app/reports \
+ ghcr.io/williambrady/portfolio-code-scanner:latest \
+ scan-local --repo-path /repo --format json --format html
+```
+
+### CI/CD
+
+- GitHub Actions workflows are located in `.github/workflows/`
+- Workflows include:
+ - `ci-cd.yml`: Build, test, and deploy pipeline
+ - `terraform.yml`: Infrastructure validation and deployment
+ - `sast.yml`: Security scanning (SAST/LINT)
+- Infrastructure deployment is automated through Terraform
+- Security scans run automatically on every push/PR and daily at 2 AM UTC
+
+## Security Scanner
+
+Security scanning uses the [SDLC Code Scanner](https://github.com/williambrady/portfolio-code-scanner) GitHub Action, which integrates 17+ security tools:
+
+### Scanner Capabilities
+
+**Terraform Scanning:**
+
+- tfsec: Security-focused linting
+- Checkov: Policy-as-code compliance
+- Trivy: Vulnerability and misconfiguration detection
+- TFLint: Terraform best practices
+- Terraform validate: Syntax validation
+
+**Python Scanning:**
+
+- Bandit: Code security analysis (SQL injection, weak crypto, etc.)
+- Safety: Dependency vulnerability detection
+
+**Secrets Detection:**
+
+- Gitleaks: Credentials and API key detection
+
+**CloudFormation Scanning:**
+
+- cfn-lint: Template validation
+- cfn-nag: Security analysis
+- Checkov: Policy compliance
+
+**npm Scanning:**
+
+- npm audit: Dependency vulnerabilities
+- Snyk: Advanced vulnerability scanning (optional, requires token)
+
+### Scanner Configuration
+
+The scanner can be configured via inputs in `.github/workflows/sast.yml`:
+
+- **scan-path**: Directory to scan (default: `.`)
+- **output-formats**: Report formats (json, html, markdown, sarif)
+- **fail-on-severity**: Fail threshold (CRITICAL, HIGH, MEDIUM, LOW, NONE)
+- **config-path**: Path to custom config.yaml for advanced configuration
+
+For custom rule exclusions or path exclusions, create a `config.yaml` file and reference it via the `config-path` input.
+
+### Understanding Scan Results
+
+- **CRITICAL/HIGH**: Must fix before merging
+- **MEDIUM**: Should address soon
+- **LOW/INFO**: Nice to have fixes
+
+Scan results are:
+
+- Uploaded as workflow artifacts
+- Posted as PR comments (on pull requests)
+- Integrated with GitHub Code Scanning via SARIF
+
+See the [SDLC Code Scanner documentation](https://github.com/williambrady/portfolio-code-scanner) for comprehensive configuration options.
+
+## Conventions
+
+- **Pre-commit hooks must pass before pushing** - Run `pre-commit run --all-files` and fix any issues
+- Python code follows PEP 8 style guidelines
+- Use type hints in Python code
+- Terraform code should be formatted with `terraform fmt`
+- CloudFormation templates should pass `cfn-lint` validation
+- All infrastructure changes must be made through IaC (Terraform or CloudFormation)
+- Docker images should be built and tested in CI before deployment
+- Security scans must pass (no CRITICAL/HIGH findings) before merging to main
+- Document any security scan rule exclusions with justification
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..cb3339a
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,38 @@
+# Use official Python runtime as base image
+FROM python:3.11-slim
+
+# Set working directory in container
+WORKDIR /app
+
+# Set environment variables
+ENV PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1 \
+ PIP_NO_CACHE_DIR=1 \
+ PIP_DISABLE_PIP_VERSION_CHECK=1
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ gcc \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy requirements file
+COPY requirements.txt .
+
+# Install Python dependencies
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy application code
+COPY scripts/ ./scripts/
+
+# Create non-root user for security
+RUN useradd -m -u 1000 appuser && \
+ chown -R appuser:appuser /app
+
+# Switch to non-root user
+USER appuser
+
+# Set entrypoint
+ENTRYPOINT ["python", "scripts/main.py"]
+
+# Default command (can be overridden)
+CMD ["--help"]
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..223467b
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,136 @@
+# PolyForm Noncommercial License 1.0.0
+
+
+
+## Acceptance
+
+In order to get any license under these terms, you must agree
+to them as both strict obligations and conditions to all
+your licenses.
+
+## Copyright License
+
+The licensor grants you a copyright license for the software
+to do everything you might do with the software that would
+otherwise infringe the licensor's copyright in it for any
+permitted purpose. However, you may only distribute the
+software according to [Distribution License](#distribution-license) and make
+changes or new works based on the software according to
+[Changes and New Works License](#changes-and-new-works-license).
+
+## Distribution License
+
+The licensor grants you an additional copyright license
+to distribute copies of the software. Your license
+to distribute covers distributing the software with
+changes and new works permitted by [Changes and New Works
+License](#changes-and-new-works-license).
+
+## Notices
+
+You must ensure that anyone who gets a copy of any part of
+the software from you also gets a copy of these terms or the
+URL for them above, as well as copies of any plain-text lines
+beginning with `Required Notice:` that the licensor provided
+with the software. For example:
+
+> Required Notice: Copyright Crofton Cloud (https://crofton.cloud)
+
+## Changes and New Works License
+
+The licensor grants you an additional copyright license to
+make changes and new works based on the software for any
+permitted purpose.
+
+## Patent License
+
+The licensor grants you a patent license for the software that
+covers patent claims the licensor can license, or becomes able
+to license, that you would infringe by using the software.
+
+## Noncommercial Purposes
+
+Any noncommercial purpose is a permitted purpose.
+
+## Personal Uses
+
+Personal use for research, experiment, and testing for
+the benefit of public knowledge, personal study, private
+entertainment, hobby projects, amateur pursuits, or religious
+observance, without any anticipated commercial application,
+is use for a permitted purpose.
+
+## Noncommercial Organizations
+
+Use by any charitable organization, educational institution,
+public research organization, public safety or health
+organization, environmental protection organization, or
+government institution is use for a permitted purpose
+regardless of the source of funding or obligations resulting
+from the funding.
+
+## Fair Use
+
+You may have "fair use" rights for the software under the
+law. These terms do not limit them.
+
+## No Other Rights
+
+These terms do not allow you to sublicense or transfer any of
+your licenses to anyone else, or prevent the licensor from
+granting licenses to anyone else. These terms do not imply
+any other licenses.
+
+## Patent Defense
+
+If you make any written claim that the software infringes or
+contributes to infringement of any patent, your patent license
+for the software granted under these terms ends immediately. If
+your company makes such a claim, your patent license ends
+immediately for work on behalf of your company.
+
+## Violations
+
+The first time you are notified in writing that you have
+violated any of these terms, or done anything with the software
+not covered by your licenses, your licenses can nonetheless
+continue if you come into full compliance with these terms,
+and take practical steps to correct past violations, within
+32 days of receiving notice. Otherwise, all your licenses
+end immediately.
+
+## No Liability
+
+***As far as the law allows, the software comes as is, without
+any warranty or condition, and the licensor will not be liable
+to you for any damages arising out of these terms or the use
+or nature of the software, under any kind of legal claim.***
+
+## Definitions
+
+The **licensor** is the individual or entity offering these
+terms, and the **software** is the software the licensor makes
+available under these terms.
+
+**You** refers to the individual or entity agreeing to these
+terms.
+
+**Your company** is any legal entity, sole proprietorship,
+or other kind of organization that you work for, plus all
+organizations that have control over, are under the control of,
+or are under common control with that organization. **Control**
+means ownership of substantially all the assets of an entity,
+or the power to direct its management and policies by vote,
+contract, or otherwise. Control can be direct or indirect.
+
+**Your licenses** are all the licenses granted to you for the
+software under these terms.
+
+**Use** means anything you do with the software requiring one
+of your licenses.
+
+---
+
+Required Notice: Copyright (c) 2025 Crofton Cloud (https://crofton.cloud)
+
+For licensing inquiries, contact: licensing@crofton.cloud
diff --git a/cloudformation/README.md b/cloudformation/README.md
new file mode 100644
index 0000000..023e72c
--- /dev/null
+++ b/cloudformation/README.md
@@ -0,0 +1,77 @@
+# CloudFormation Templates
+
+This directory contains AWS CloudFormation templates for infrastructure provisioning.
+
+## When to Use CloudFormation vs Terraform
+
+| Use CloudFormation When | Use Terraform When |
+|------------------------|-------------------|
+| AWS-only infrastructure | Multi-cloud or hybrid environments |
+| Deep AWS service integration needed | Need provider ecosystem (GitHub, Datadog, etc.) |
+| Using AWS-native tools (Service Catalog, StackSets) | Team already knows HCL |
+| Compliance requires AWS-native IaC | Complex state management requirements |
+
+## Files
+
+- `example-stack.yaml` - Minimal example demonstrating CloudFormation structure
+
+## Usage
+
+### Deploy a Stack
+
+```bash
+# Deploy with default parameters
+aws cloudformation deploy \
+ --template-file cloudformation/example-stack.yaml \
+ --stack-name my-app-dev \
+ --parameter-overrides Environment=dev ProjectName=my-app
+
+# Deploy with capabilities (required if creating IAM resources)
+aws cloudformation deploy \
+ --template-file cloudformation/example-stack.yaml \
+ --stack-name my-app-dev \
+ --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM \
+ --parameter-overrides Environment=dev
+```
+
+### Validate Templates
+
+```bash
+# Validate syntax
+aws cloudformation validate-template \
+ --template-body file://cloudformation/example-stack.yaml
+
+# Lint with cfn-lint (included in security scanner)
+cfn-lint cloudformation/*.yaml
+```
+
+### Delete a Stack
+
+```bash
+aws cloudformation delete-stack --stack-name my-app-dev
+```
+
+## Extending the Template
+
+Add resources by following AWS CloudFormation resource type syntax:
+
+```yaml
+Resources:
+ MyLambdaFunction:
+ Type: AWS::Lambda::Function
+ Properties:
+ FunctionName: !Sub '${ProjectName}-${Environment}-handler'
+ Runtime: python3.11
+ Handler: index.handler
+ # ... additional properties
+```
+
+See [AWS CloudFormation Resource Types](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) for available resources.
+
+## If You Don't Need CloudFormation
+
+If your project uses Terraform exclusively:
+
+1. Delete this `cloudformation/` directory
+2. Remove CloudFormation references from the README
+3. The security scanner will automatically skip CF scanning when no templates exist
diff --git a/cloudformation/example-stack.yaml b/cloudformation/example-stack.yaml
new file mode 100644
index 0000000..065aeb7
--- /dev/null
+++ b/cloudformation/example-stack.yaml
@@ -0,0 +1,81 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Description: |
+ Example CloudFormation template demonstrating basic structure.
+ This is a minimal placeholder - extend or replace for your use case.
+
+ Common extensions:
+ - Add Lambda functions, API Gateway, DynamoDB tables
+ - Add VPC, subnets, security groups for networking
+ - Add IAM roles and policies for service permissions
+ - Add S3 buckets, CloudFront distributions for static hosting
+
+Parameters:
+ Environment:
+ Type: String
+ Default: dev
+ AllowedValues:
+ - dev
+ - staging
+ - prod
+ Description: Deployment environment
+
+ ProjectName:
+ Type: String
+ Default: portfolio-app
+ Description: Name used for resource naming and tagging
+
+Resources:
+ # Example S3 bucket with security best practices
+ ExampleBucket:
+ Type: AWS::S3::Bucket
+ Properties:
+ BucketName: !Sub '${ProjectName}-${Environment}-${AWS::AccountId}'
+ BucketEncryption:
+ ServerSideEncryptionConfiguration:
+ - ServerSideEncryptionByDefault:
+ SSEAlgorithm: AES256
+ PublicAccessBlockConfiguration:
+ BlockPublicAcls: true
+ BlockPublicPolicy: true
+ IgnorePublicAcls: true
+ RestrictPublicBuckets: true
+ VersioningConfiguration:
+ Status: Enabled
+ Tags:
+ - Key: Project
+ Value: !Ref ProjectName
+ - Key: Environment
+ Value: !Ref Environment
+ - Key: ManagedBy
+ Value: CloudFormation
+
+ # Example CloudWatch Log Group
+ ExampleLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Sub '/app/${ProjectName}/${Environment}'
+ RetentionInDays: 30
+ Tags:
+ - Key: Project
+ Value: !Ref ProjectName
+ - Key: Environment
+ Value: !Ref Environment
+
+Outputs:
+ BucketName:
+ Description: Name of the created S3 bucket
+ Value: !Ref ExampleBucket
+ Export:
+ Name: !Sub '${AWS::StackName}-BucketName'
+
+ BucketArn:
+ Description: ARN of the created S3 bucket
+ Value: !GetAtt ExampleBucket.Arn
+ Export:
+ Name: !Sub '${AWS::StackName}-BucketArn'
+
+ LogGroupName:
+ Description: Name of the CloudWatch Log Group
+ Value: !Ref ExampleLogGroup
+ Export:
+ Name: !Sub '${AWS::StackName}-LogGroupName'
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..3c8ea28
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,61 @@
+# Python project configuration
+# Used by various tools including black, isort, bandit, and mypy
+
+[project]
+name = "portfolio-template-sdlc"
+version = "0.1.0"
+description = "A template repository for portfolio projects following SDLC best practices"
+requires-python = ">=3.11"
+
+[tool.black]
+line-length = 127
+target-version = ["py311"]
+include = '\.pyi?$'
+extend-exclude = '''
+/(
+ \.git
+ | \.mypy_cache
+ | \.venv
+ | venv
+ | build
+ | dist
+)/
+'''
+
+[tool.isort]
+profile = "black"
+line_length = 127
+skip_gitignore = true
+extend_skip = [".git", ".venv", "venv"]
+
+[tool.bandit]
+exclude_dirs = ["tests", ".venv", "venv"]
+skips = ["B101"] # Skip assert warnings (used in tests)
+
+[tool.mypy]
+python_version = "3.11"
+warn_return_any = true
+warn_unused_configs = true
+disallow_untyped_defs = true
+ignore_missing_imports = true
+exclude = [
+ "^tests/",
+ "^\\.venv/",
+]
+
+[tool.pytest.ini_options]
+testpaths = ["tests"]
+python_files = ["test_*.py"]
+python_functions = ["test_*"]
+addopts = "-v --tb=short"
+
+[tool.coverage.run]
+source = ["scripts"]
+omit = ["tests/*", ".venv/*"]
+
+[tool.coverage.report]
+exclude_lines = [
+ "pragma: no cover",
+ "if __name__ == .__main__.:",
+ "raise NotImplementedError",
+]
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..9d4f24a
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,43 @@
+[pytest]
+# Pytest configuration
+
+# Test discovery patterns
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+
+# Test paths
+testpaths = tests
+
+# Output options
+addopts =
+ -v
+ --strict-markers
+ --tb=short
+ --cov=scripts
+ --cov-report=term-missing
+ --cov-report=html
+ --cov-report=xml
+
+# Markers
+markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
+ integration: marks tests as integration tests
+ unit: marks tests as unit tests
+
+# Coverage options
+[coverage:run]
+source = scripts
+omit =
+ */tests/*
+ */test_*.py
+ */__pycache__/*
+
+[coverage:report]
+exclude_lines =
+ pragma: no cover
+ def __repr__
+ raise AssertionError
+ raise NotImplementedError
+ if __name__ == .__main__.:
+ if TYPE_CHECKING:
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..d07973e
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,32 @@
+# Core dependencies
+boto3>=1.34.0
+python-dotenv>=1.0.0
+
+# Data processing
+pandas>=2.1.0
+numpy>=1.26.0
+
+# AWS SDK
+botocore>=1.34.0
+
+# Logging
+structlog>=24.1.0
+
+# Testing
+pytest>=7.4.0
+pytest-cov>=4.1.0
+pytest-mock>=3.12.0
+
+# Code quality
+flake8>=6.1.0
+pylint>=3.0.0
+black>=23.12.0
+isort>=5.13.0
+bandit>=1.7.0
+
+# Type checking
+mypy>=1.8.0
+types-boto3>=1.0.0
+
+# Pre-commit hooks
+pre-commit>=3.6.0
diff --git a/scripts/__init__.py b/scripts/__init__.py
new file mode 100644
index 0000000..89cbf42
--- /dev/null
+++ b/scripts/__init__.py
@@ -0,0 +1,3 @@
+"""Portfolio application package."""
+
+__version__ = "0.1.0"
diff --git a/scripts/main.py b/scripts/main.py
new file mode 100644
index 0000000..29e5d3b
--- /dev/null
+++ b/scripts/main.py
@@ -0,0 +1,240 @@
+"""
+Main application entry point for portfolio data processing.
+
+This is a template application that demonstrates:
+- Argument parsing
+- Logging setup
+- AWS service integration
+- Data processing pipeline
+"""
+
+import argparse
+import logging
+import sys
+from typing import Optional
+
+import boto3
+from botocore.exceptions import ClientError
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler(sys.stdout)],
+)
+
+logger = logging.getLogger(__name__)
+
+
+class DataProcessor:
+ """
+ Main data processing class.
+
+ This class handles data processing, AWS interactions,
+ and business logic for the portfolio application.
+ """
+
+ def __init__(self, environment: str = "dev"):
+ """
+ Initialize the data processor.
+
+ Args:
+ environment: The environment (dev, staging, prod)
+ """
+ self.environment = environment
+ self.s3_client: Optional[boto3.client] = None
+ self.cloudwatch_client: Optional[boto3.client] = None
+ logger.info(f"Initializing DataProcessor for environment: {environment}")
+
+ def setup_aws_clients(self) -> None:
+ """Initialize AWS service clients."""
+ try:
+ self.s3_client = boto3.client("s3")
+ self.cloudwatch_client = boto3.client("logs")
+ logger.info("AWS clients initialized successfully")
+ except ClientError as e:
+ logger.error(f"Failed to initialize AWS clients: {e}")
+ raise
+
+ def process_data(self, input_data: str) -> dict:
+ """
+ Process input data and return results.
+
+ Args:
+ input_data: The data to process
+
+ Returns:
+ Dictionary containing processing results
+ """
+ logger.info(f"Processing data: {input_data}")
+
+ # Example processing logic
+ result = {
+ "status": "success",
+ "input": input_data,
+ "output": f"Processed: {input_data}",
+ "environment": self.environment,
+ }
+
+ logger.info("Data processing completed successfully")
+ return result
+
+ def save_to_s3(self, bucket: str, key: str, data: str) -> bool:
+ """
+ Save data to S3 bucket.
+
+ Args:
+ bucket: S3 bucket name
+ key: Object key
+ data: Data to save
+
+ Returns:
+ True if successful, False otherwise
+ """
+ if not self.s3_client:
+ logger.error("S3 client not initialized")
+ return False
+
+ try:
+ self.s3_client.put_object(Bucket=bucket, Key=key, Body=data)
+ logger.info(f"Successfully saved data to s3://{bucket}/{key}")
+ return True
+ except ClientError as e:
+ logger.error(f"Failed to save to S3: {e}")
+ return False
+
+ def log_to_cloudwatch(self, log_group: str, log_stream: str, message: str) -> bool:
+ """
+ Send log message to CloudWatch.
+
+ Args:
+ log_group: CloudWatch log group name
+ log_stream: CloudWatch log stream name
+ message: Log message
+
+ Returns:
+ True if successful, False otherwise
+ """
+ if not self.cloudwatch_client:
+ logger.error("CloudWatch client not initialized")
+ return False
+
+ try:
+ # Create log stream if it doesn't exist
+ try:
+ self.cloudwatch_client.create_log_stream(logGroupName=log_group, logStreamName=log_stream)
+ except ClientError:
+ pass # Stream might already exist
+
+ # Send log event
+ import time
+
+ self.cloudwatch_client.put_log_events(
+ logGroupName=log_group,
+ logStreamName=log_stream,
+ logEvents=[
+ {
+ "timestamp": int(time.time() * 1000),
+ "message": message,
+ }
+ ],
+ )
+ logger.info(f"Successfully logged to CloudWatch: {log_group}/{log_stream}")
+ return True
+ except ClientError as e:
+ logger.error(f"Failed to log to CloudWatch: {e}")
+ return False
+
+
+def parse_arguments() -> argparse.Namespace:
+ """
+ Parse command line arguments.
+
+ Returns:
+ Parsed arguments
+ """
+ parser = argparse.ArgumentParser(
+ description="Portfolio Data Processing Application",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ parser.add_argument(
+ "--environment",
+ "-e",
+ choices=["dev", "staging", "prod"],
+ default="dev",
+ help="Environment to run in (default: dev)",
+ )
+
+ parser.add_argument(
+ "--input",
+ "-i",
+ type=str,
+ help="Input data to process",
+ )
+
+ parser.add_argument(
+ "--s3-bucket",
+ type=str,
+ help="S3 bucket for data storage",
+ )
+
+ parser.add_argument(
+ "--verbose",
+ "-v",
+ action="store_true",
+ help="Enable verbose logging",
+ )
+
+ return parser.parse_args()
+
+
+def main() -> int:
+ """
+ Main application entry point.
+
+ Returns:
+ Exit code (0 for success, 1 for failure)
+ """
+ args = parse_arguments()
+
+ # Set logging level
+ if args.verbose:
+ logging.getLogger().setLevel(logging.DEBUG)
+
+ logger.info("Starting Portfolio Data Processing Application")
+ logger.info(f"Environment: {args.environment}")
+
+ try:
+ # Initialize processor
+ processor = DataProcessor(environment=args.environment)
+
+ # Setup AWS clients if S3 bucket is specified
+ if args.s3_bucket:
+ processor.setup_aws_clients()
+
+ # Process data if input provided
+ if args.input:
+ result = processor.process_data(args.input)
+ logger.info(f"Processing result: {result}")
+
+ # Save to S3 if bucket specified
+ if args.s3_bucket:
+ processor.save_to_s3(
+ bucket=args.s3_bucket,
+ key=f"results/{args.environment}/output.txt",
+ data=str(result),
+ )
+ else:
+ logger.warning("No input data provided. Use --input to specify data to process.")
+
+ logger.info("Application completed successfully")
+ return 0
+
+ except Exception as e:
+ logger.error(f"Application failed: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/terraform/.terraform.lock.hcl b/terraform/.terraform.lock.hcl
new file mode 100644
index 0000000..39a6fbd
--- /dev/null
+++ b/terraform/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.100.0"
+ constraints = "~> 5.0"
+ hashes = [
+ "h1:edXOJWE4ORX8Fm+dpVpICzMZJat4AX0VRCAy/xkcOc0=",
+ "zh:054b8dd49f0549c9a7cc27d159e45327b7b65cf404da5e5a20da154b90b8a644",
+ "zh:0b97bf8d5e03d15d83cc40b0530a1f84b459354939ba6f135a0086c20ebbe6b2",
+ "zh:1589a2266af699cbd5d80737a0fe02e54ec9cf2ca54e7e00ac51c7359056f274",
+ "zh:6330766f1d85f01ae6ea90d1b214b8b74cc8c1badc4696b165b36ddd4cc15f7b",
+ "zh:7c8c2e30d8e55291b86fcb64bdf6c25489d538688545eb48fd74ad622e5d3862",
+ "zh:99b1003bd9bd32ee323544da897148f46a527f622dc3971af63ea3e251596342",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:9f8b909d3ec50ade83c8062290378b1ec553edef6a447c56dadc01a99f4eaa93",
+ "zh:aaef921ff9aabaf8b1869a86d692ebd24fbd4e12c21205034bb679b9caf883a2",
+ "zh:ac882313207aba00dd5a76dbd572a0ddc818bb9cbf5c9d61b28fe30efaec951e",
+ "zh:bb64e8aff37becab373a1a0cc1080990785304141af42ed6aa3dd4913b000421",
+ "zh:dfe495f6621df5540d9c92ad40b8067376350b005c637ea6efac5dc15028add4",
+ "zh:f0ddf0eaf052766cfe09dea8200a946519f653c384ab4336e2a4a64fdd6310e9",
+ "zh:f1b7e684f4c7ae1eed272b6de7d2049bb87a0275cb04dbb7cda6636f600699c9",
+ "zh:ff461571e3f233699bf690db319dfe46aec75e58726636a0d97dd9ac6e32fb70",
+ ]
+}
diff --git a/terraform/README.md b/terraform/README.md
new file mode 100644
index 0000000..82a9445
--- /dev/null
+++ b/terraform/README.md
@@ -0,0 +1,118 @@
+# Terraform Infrastructure
+
+This directory contains Terraform configurations for provisioning AWS infrastructure for the portfolio application.
+
+## Resources Provisioned
+
+- **CloudWatch Log Group**: For application logging
+- **S3 Bucket**: For data storage and log archival with lifecycle policies
+- **IAM Role**: For application permissions with policies for CloudWatch and S3 access
+
+## Prerequisites
+
+- Terraform 1.0 or higher
+- AWS CLI configured with appropriate credentials
+- AWS account with permissions to create resources
+
+## Usage
+
+### Initialize Terraform
+
+```bash
+terraform init
+```
+
+### Review Planned Changes
+
+```bash
+terraform plan
+```
+
+### Apply Configuration
+
+```bash
+terraform apply
+```
+
+### Destroy Infrastructure
+
+```bash
+terraform destroy
+```
+
+## Configuration
+
+### Variables
+
+Key variables can be customized in `variables.tf` or passed via command line:
+
+- `aws_region`: AWS region (default: us-east-1)
+- `project_name`: Project name (default: portfolio-app)
+- `environment`: Environment (dev/staging/prod, default: dev)
+- `log_retention_days`: CloudWatch log retention (default: 30)
+
+### Example: Override Variables
+
+```bash
+terraform apply -var="environment=prod" -var="project_name=my-app"
+```
+
+Or create a `terraform.tfvars` file:
+
+```hcl
+environment = "prod"
+project_name = "my-app"
+log_retention_days = 90
+```
+
+## State Management
+
+For production use, configure remote state storage:
+
+1. Create an S3 bucket for state storage
+2. Create a DynamoDB table for state locking
+3. Uncomment and configure the backend block in `main.tf`
+
+Example backend configuration:
+
+```hcl
+terraform {
+ backend "s3" {
+ bucket = "my-terraform-state-bucket"
+ key = "portfolio/terraform.tfstate"
+ region = "us-east-1"
+ encrypt = true
+ dynamodb_table = "terraform-state-lock"
+ }
+}
+```
+
+## Outputs
+
+After applying, Terraform will output:
+
+- CloudWatch Log Group name and ARN
+- S3 Bucket name and ARN
+- IAM Role name and ARN
+
+Use these outputs in your application configuration.
+
+## Security Best Practices
+
+- Store `terraform.tfvars` in `.gitignore` (already configured)
+- Use AWS IAM roles instead of access keys when possible
+- Enable S3 bucket encryption (already enabled)
+- Enable S3 versioning (already enabled)
+- Block public access to S3 buckets (already configured)
+- Use Terraform remote state with encryption
+- Review IAM policies for least privilege access
+
+## Cost Considerations
+
+The resources provisioned incur minimal costs:
+
+- CloudWatch Logs: Charged per GB ingested and stored
+- S3: Charged per GB stored, with lifecycle policies to reduce costs
+- IAM: No charge
+
+Estimate costs using the AWS Pricing Calculator before deploying to production.
diff --git a/terraform/main.tf b/terraform/main.tf
new file mode 100644
index 0000000..65025c3
--- /dev/null
+++ b/terraform/main.tf
@@ -0,0 +1,177 @@
+terraform {
+ required_version = ">= 1.0"
+
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.0"
+ }
+ }
+
+ # Configure backend for state management
+ # Uncomment and configure after creating S3 bucket and DynamoDB table
+ # backend "s3" {
+ # bucket = "your-terraform-state-bucket"
+ # key = "portfolio/terraform.tfstate"
+ # region = "us-east-1"
+ # encrypt = true
+ # dynamodb_table = "terraform-state-lock"
+ # }
+}
+
+provider "aws" {
+ region = var.aws_region
+
+ default_tags {
+ tags = {
+ Project = var.project_name
+ Environment = var.environment
+ ManagedBy = "Terraform"
+ }
+ }
+}
+
+# CloudWatch Log Group for application logs
+resource "aws_cloudwatch_log_group" "app_logs" {
+ name = "/aws/portfolio/${var.project_name}"
+ retention_in_days = var.log_retention_days
+
+ tags = {
+ Name = "${var.project_name}-logs"
+ }
+}
+
+# S3 Bucket for data storage and long-term log archival
+resource "aws_s3_bucket" "data_bucket" {
+ bucket = "${var.project_name}-data-${var.environment}"
+
+ tags = {
+ Name = "${var.project_name}-data"
+ }
+}
+
+# Enable versioning for S3 bucket
+resource "aws_s3_bucket_versioning" "data_bucket_versioning" {
+ bucket = aws_s3_bucket.data_bucket.id
+
+ versioning_configuration {
+ status = "Enabled"
+ }
+}
+
+# Enable encryption for S3 bucket
+resource "aws_s3_bucket_server_side_encryption_configuration" "data_bucket_encryption" {
+ bucket = aws_s3_bucket.data_bucket.id
+
+ rule {
+ apply_server_side_encryption_by_default {
+ sse_algorithm = "AES256"
+ }
+ }
+}
+
+# Block public access to S3 bucket
+resource "aws_s3_bucket_public_access_block" "data_bucket_public_access" {
+ bucket = aws_s3_bucket.data_bucket.id
+
+ block_public_acls = true
+ block_public_policy = true
+ ignore_public_acls = true
+ restrict_public_buckets = true
+}
+
+# S3 Bucket lifecycle policy
+resource "aws_s3_bucket_lifecycle_configuration" "data_bucket_lifecycle" {
+ bucket = aws_s3_bucket.data_bucket.id
+
+ rule {
+ id = "archive-old-logs"
+ status = "Enabled"
+
+ transition {
+ days = 30
+ storage_class = "STANDARD_IA"
+ }
+
+ transition {
+ days = 90
+ storage_class = "GLACIER"
+ }
+
+ expiration {
+ days = 365
+ }
+ }
+}
+
+# IAM Role for application (if running on EC2/ECS)
+resource "aws_iam_role" "app_role" {
+ name = "${var.project_name}-app-role"
+
+ assume_role_policy = jsonencode({
+ Version = "2012-10-17"
+ Statement = [
+ {
+ Action = "sts:AssumeRole"
+ Effect = "Allow"
+ Principal = {
+ Service = [
+ "ec2.amazonaws.com",
+ "ecs-tasks.amazonaws.com"
+ ]
+ }
+ }
+ ]
+ })
+
+ tags = {
+ Name = "${var.project_name}-app-role"
+ }
+}
+
+# IAM Policy for CloudWatch Logs
+resource "aws_iam_role_policy" "cloudwatch_logs_policy" {
+ name = "${var.project_name}-cloudwatch-logs"
+ role = aws_iam_role.app_role.id
+
+ policy = jsonencode({
+ Version = "2012-10-17"
+ Statement = [
+ {
+ Effect = "Allow"
+ Action = [
+ "logs:CreateLogGroup",
+ "logs:CreateLogStream",
+ "logs:PutLogEvents",
+ "logs:DescribeLogStreams"
+ ]
+ Resource = "${aws_cloudwatch_log_group.app_logs.arn}:*"
+ }
+ ]
+ })
+}
+
+# IAM Policy for S3 access
+resource "aws_iam_role_policy" "s3_access_policy" {
+ name = "${var.project_name}-s3-access"
+ role = aws_iam_role.app_role.id
+
+ policy = jsonencode({
+ Version = "2012-10-17"
+ Statement = [
+ {
+ Effect = "Allow"
+ Action = [
+ "s3:GetObject",
+ "s3:PutObject",
+ "s3:DeleteObject",
+ "s3:ListBucket"
+ ]
+ Resource = [
+ aws_s3_bucket.data_bucket.arn,
+ "${aws_s3_bucket.data_bucket.arn}/*"
+ ]
+ }
+ ]
+ })
+}
diff --git a/terraform/outputs.tf b/terraform/outputs.tf
new file mode 100644
index 0000000..0cf1153
--- /dev/null
+++ b/terraform/outputs.tf
@@ -0,0 +1,29 @@
+output "cloudwatch_log_group_name" {
+ description = "Name of the CloudWatch Log Group"
+ value = aws_cloudwatch_log_group.app_logs.name
+}
+
+output "cloudwatch_log_group_arn" {
+ description = "ARN of the CloudWatch Log Group"
+ value = aws_cloudwatch_log_group.app_logs.arn
+}
+
+output "s3_bucket_name" {
+ description = "Name of the S3 data bucket"
+ value = aws_s3_bucket.data_bucket.id
+}
+
+output "s3_bucket_arn" {
+ description = "ARN of the S3 data bucket"
+ value = aws_s3_bucket.data_bucket.arn
+}
+
+output "app_role_arn" {
+ description = "ARN of the IAM role for the application"
+ value = aws_iam_role.app_role.arn
+}
+
+output "app_role_name" {
+ description = "Name of the IAM role for the application"
+ value = aws_iam_role.app_role.name
+}
diff --git a/terraform/variables.tf b/terraform/variables.tf
new file mode 100644
index 0000000..c28ddfc
--- /dev/null
+++ b/terraform/variables.tf
@@ -0,0 +1,33 @@
+variable "aws_region" {
+ description = "AWS region for resources"
+ type = string
+ default = "us-east-1"
+}
+
+variable "project_name" {
+ description = "Name of the project"
+ type = string
+ default = "portfolio-app"
+}
+
+variable "environment" {
+ description = "Environment (dev, staging, prod)"
+ type = string
+ default = "dev"
+
+ validation {
+ condition = contains(["dev", "staging", "prod"], var.environment)
+ error_message = "Environment must be dev, staging, or prod."
+ }
+}
+
+variable "log_retention_days" {
+ description = "Number of days to retain CloudWatch logs"
+ type = number
+ default = 30
+
+ validation {
+ condition = contains([1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, 3653], var.log_retention_days)
+ error_message = "Log retention days must be a valid CloudWatch Logs retention period."
+ }
+}
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..17b8100
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+"""Test package for portfolio application."""
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..8f4e7ee
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,25 @@
+"""
+Pytest configuration and fixtures.
+"""
+
+import pytest
+
+
+@pytest.fixture
+def sample_data():
+ """Provide sample data for tests."""
+ return {
+ "test_input": "sample input data",
+ "test_bucket": "test-bucket",
+ "test_key": "test/path/to/file.txt",
+ }
+
+
+@pytest.fixture
+def mock_aws_credentials(monkeypatch):
+ """Mock AWS credentials for testing."""
+ monkeypatch.setenv("AWS_ACCESS_KEY_ID", "testing")
+ monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "testing")
+ monkeypatch.setenv("AWS_SECURITY_TOKEN", "testing")
+ monkeypatch.setenv("AWS_SESSION_TOKEN", "testing")
+ monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-1")
diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
index 0000000..ed6886e
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,164 @@
+"""
+Unit tests for the main application module.
+"""
+
+import os
+import sys
+from unittest.mock import MagicMock, patch
+
+import pytest
+from botocore.exceptions import ClientError
+
+# Add scripts directory to path
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
+
+from main import DataProcessor, parse_arguments # noqa: E402
+
+
+class TestDataProcessor:
+ """Test suite for DataProcessor class."""
+
+ def test_initialization(self):
+ """Test DataProcessor initialization."""
+ processor = DataProcessor(environment="dev")
+ assert processor.environment == "dev"
+ assert processor.s3_client is None
+ assert processor.cloudwatch_client is None
+
+ def test_initialization_with_environment(self):
+ """Test DataProcessor initialization with different environments."""
+ for env in ["dev", "staging", "prod"]:
+ processor = DataProcessor(environment=env)
+ assert processor.environment == env
+
+ @patch("boto3.client")
+ def test_setup_aws_clients_success(self, mock_boto_client):
+ """Test successful AWS client setup."""
+ processor = DataProcessor()
+ processor.setup_aws_clients()
+
+ assert mock_boto_client.call_count == 2
+ assert processor.s3_client is not None
+ assert processor.cloudwatch_client is not None
+
+ @patch("boto3.client")
+ def test_setup_aws_clients_failure(self, mock_boto_client):
+ """Test AWS client setup failure."""
+ mock_boto_client.side_effect = ClientError(
+ {"Error": {"Code": "AccessDenied", "Message": "Access denied"}},
+ "GetObject",
+ )
+
+ processor = DataProcessor()
+ with pytest.raises(ClientError):
+ processor.setup_aws_clients()
+
+ def test_process_data(self):
+ """Test data processing."""
+ processor = DataProcessor(environment="dev")
+ result = processor.process_data("test_input")
+
+ assert result["status"] == "success"
+ assert result["input"] == "test_input"
+ assert result["output"] == "Processed: test_input"
+ assert result["environment"] == "dev"
+
+ def test_save_to_s3_without_client(self):
+ """Test S3 save without initialized client."""
+ processor = DataProcessor()
+ result = processor.save_to_s3("test-bucket", "test-key", "test-data")
+ assert result is False
+
+ @patch("boto3.client")
+ def test_save_to_s3_success(self, mock_boto_client):
+ """Test successful S3 save."""
+ mock_s3 = MagicMock()
+ mock_boto_client.return_value = mock_s3
+
+ processor = DataProcessor()
+ processor.s3_client = mock_s3
+
+ result = processor.save_to_s3("test-bucket", "test-key", "test-data")
+
+ assert result is True
+ mock_s3.put_object.assert_called_once_with(Bucket="test-bucket", Key="test-key", Body="test-data")
+
+ @patch("boto3.client")
+ def test_save_to_s3_failure(self, mock_boto_client):
+ """Test S3 save failure."""
+ mock_s3 = MagicMock()
+ mock_s3.put_object.side_effect = ClientError(
+ {"Error": {"Code": "NoSuchBucket", "Message": "Bucket not found"}},
+ "PutObject",
+ )
+ mock_boto_client.return_value = mock_s3
+
+ processor = DataProcessor()
+ processor.s3_client = mock_s3
+
+ result = processor.save_to_s3("test-bucket", "test-key", "test-data")
+ assert result is False
+
+ def test_log_to_cloudwatch_without_client(self):
+ """Test CloudWatch logging without initialized client."""
+ processor = DataProcessor()
+ result = processor.log_to_cloudwatch("log-group", "log-stream", "message")
+ assert result is False
+
+
+class TestArgumentParsing:
+ """Test suite for command line argument parsing."""
+
+ @patch("sys.argv", ["main.py"])
+ def test_default_arguments(self):
+ """Test default argument values."""
+ args = parse_arguments()
+ assert args.environment == "dev"
+ assert args.input is None
+ assert args.s3_bucket is None
+ assert args.verbose is False
+
+ @patch("sys.argv", ["main.py", "--environment", "prod"])
+ def test_environment_argument(self):
+ """Test environment argument."""
+ args = parse_arguments()
+ assert args.environment == "prod"
+
+ @patch("sys.argv", ["main.py", "--input", "test_data"])
+ def test_input_argument(self):
+ """Test input argument."""
+ args = parse_arguments()
+ assert args.input == "test_data"
+
+ @patch("sys.argv", ["main.py", "--s3-bucket", "my-bucket"])
+ def test_s3_bucket_argument(self):
+ """Test S3 bucket argument."""
+ args = parse_arguments()
+ assert args.s3_bucket == "my-bucket"
+
+ @patch("sys.argv", ["main.py", "--verbose"])
+ def test_verbose_argument(self):
+ """Test verbose argument."""
+ args = parse_arguments()
+ assert args.verbose is True
+
+ @patch(
+ "sys.argv",
+ [
+ "main.py",
+ "--environment",
+ "staging",
+ "--input",
+ "data",
+ "--s3-bucket",
+ "bucket",
+ "--verbose",
+ ],
+ )
+ def test_all_arguments(self):
+ """Test all arguments together."""
+ args = parse_arguments()
+ assert args.environment == "staging"
+ assert args.input == "data"
+ assert args.s3_bucket == "bucket"
+ assert args.verbose is True