Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -129,4 +129,6 @@ dmypy.json
.pyre/

# Custom
*.json
output/
.DS_Store

29 changes: 28 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,28 @@
# aws-compliance-auditor
# aws-compliance-auditor

## Overview

Command Line utility for gauging security and data resilience policy compliance of AWS resources

## Requirements
- `poetry`
- `stsauth`

## Architecture Diagram

<link to lucid chart(s)>

## Environmental Variables
`AWS_PROFILE`: AWS Profile variable which is configured by `stsauth`

## Workflow

## Usage

1. Execute `poetry install` to install project dependencies
2. Assume AWS Profile by using `stsauth`
3. Execute `python main.py` in command line

## Contributing

<how to contribute>
75 changes: 75 additions & 0 deletions dynamodb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import boto3
import os
import logging
import utils

# Logging
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
logging.basicConfig(
format="[%(asctime)s] [%(levelname)-8s] - %(message)s",
level=getattr(logging, LOG_LEVEL),
datefmt="%Y-%m-%d %H:%M:%S",
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()

# Establish boto3 client
dynamo_client = boto3.client('dynamodb')


def describeTables():
return [dynamo_client.describe_table(TableName=table_name)['Table'] for table_name in
dynamo_client.list_tables()['TableNames']]


def hasBackupsEnabled(backup_response: dict) -> bool:
continuous_backups_status = backup_response['ContinuousBackupsDescription']['ContinuousBackupsStatus']
pitr_status = backup_response['ContinuousBackupsDescription']['PointInTimeRecoveryDescription'][
'PointInTimeRecoveryStatus']

has_continuous_backups_enabled = continuous_backups_status == "ENABLED"
has_pitr_enabled = pitr_status == "ENABLED"

return has_continuous_backups_enabled and has_pitr_enabled


def auditDynamo():
backup_data = {}

tables = describeTables()

for table in tables:

table_name = table['TableName']
tags = dynamo_client.list_tags_of_resource(ResourceArn=table['TableArn'])['Tags']

continuous_backups_response = dynamo_client.describe_continuous_backups(TableName=table_name)

backups_enabled = hasBackupsEnabled(continuous_backups_response)

if backups_enabled:
earliest_restore_point = \
continuous_backups_response['ContinuousBackupsDescription']['PointInTimeRecoveryDescription'][
'EarliestRestorableDateTime'].strftime("%m/%d/%Y, %H:%M:%S")
latest_restore_point = \
continuous_backups_response['ContinuousBackupsDescription']['PointInTimeRecoveryDescription'][
'LatestRestorableDateTime'].strftime("%m/%d/%Y, %H:%M:%S")

pitr_data = {
"earliest_restore_point": earliest_restore_point,
"latest_restore_point": latest_restore_point
}

backup_data[table_name] = {
"backup_data": pitr_data,
"backup_is_compliant": True,
"tags": utils.flattenTags(tags)
}
else:
backup_data[table_name] = {
"backup_data": {},
"backup_is_compliant": False,
"tags": utils.flattenTags(tags)
}

return backup_data
51 changes: 51 additions & 0 deletions efs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import boto3
import os
import logging

import utils

LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")

logging.basicConfig(
format="[%(asctime)s] [%(levelname)-8s] - %(message)s",
level=getattr(logging, LOG_LEVEL),
datefmt="%Y-%m-%d %H:%M:%S",
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()

efs_client = None


def getEFSClient():
global efs_client

if efs_client is None:
efs_client = boto3.client('efs')

return efs_client


def auditEFS():
backup_data = {}

file_systems = getEFSClient().describe_file_systems()['FileSystems']

for fs in file_systems:
fs_id = fs['FileSystemId']
fs_name = fs['Name']
try:
this_fs_backup_data = getEFSClient().describe_backup_policy(FileSystemId=fs_id)
backup_data[fs_name] = {
"backup_data": this_fs_backup_data['BackupPolicy']['Status'],
"backup_is_compliant": this_fs_backup_data['BackupPolicy']['Status'] == "ENABLED",
"tags": utils.flattenTags(fs['Tags'])
}
except:
backup_data[fs_name] = {
"backup_data": "NONE",
"backup_is_compliant": False,
"tags": utils.flattenTags(fs['Tags'])
}

return backup_data
7 changes: 7 additions & 0 deletions inspector.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import boto3

# TODO: call Inspector to get a list of tainted ECR images

# Don't make changes, just get points of contact for vulnerable images
# Build Markdown table that can be sent to relevant personnel
# Ask teams to open backlog issues
4 changes: 0 additions & 4 deletions inspector/inspector.py

This file was deleted.

52 changes: 38 additions & 14 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import redshift.redshift
import rds.rds as rds
import redshift.redshift as rs
import os

import json
import efs
import rds
import redshift as rs
import dynamodb as ddb
import utils
import logging
import os
import functools

LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")

Expand All @@ -16,24 +18,46 @@
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()


# TODO: only return DBs that DO NOT have auto-backup enabled; check SBX instances that Simon made
# TODO: add click options (ex: if --backup, only gauge backups; if --rds, only audit RDS resources, etc)

def main():
logger.info("Auditing DynamoDB resources")
dynamo_status = ddb.auditDynamo()
logger.info("Auditing RDS resources")
rds_status = rds.auditRDS()
logger.info("Auditing Redshift resources")
redshift_status = rs.auditRedshift()
logger.info("Auditing EFS resources")
efs_status = efs.auditEFS()

aggregate_backup_status = {
"rds": rds_status,
"redshift": redshift_status,
"efs": efs_status,
"dynamo": dynamo_status
}

logger.info("Building Markdown tables")

compliant_markdown, noncompliant_markdown = utils.buildMarkdown(aggregate_backup_status)

redshift_status = rs.getClusterSnapshots()
# Write output
account = utils.getShortAccountName(os.getenv("AWS_PROFILE", "NULL_PROFILE"))

rds_status = rds.getRDSBackupData()
output_file = open(f'output/{account}_audit_results.md', 'w')

final_markdown = f"# HQR {account} BACKUP COMPLIANCE REPORT\n" + noncompliant_markdown + "\n\n" + compliant_markdown

output_file.write(final_markdown)

# aggregated_backup_status_json = json.dumps(aggregated_backup_status, indent=4, sort_keys=False, default=str)
# Write output
account = os.getenv("AWS_PROFILE", "NULL_PROFILE")
output_file = open(f'backup_status_{account}.json', 'w+')
output_file.write(rds_status)
output_file.close()

logger.info("Audit complete")




if __name__ == '__main__':
main()


17 changes: 16 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ authors = ["Luke Daigle <ddaigle@bellese.io>"]
python = "^3.8"
boto3 = "^1.22.11"
click = "^8.1.3"
tabulate = "^0.8.9"

[tool.poetry.dev-dependencies]
pytest = "^5.2"
Expand Down
Loading