Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
62be394
refactor: swithc from singular utils.py to app_processing, aws_3 and …
bdebek-splunk Jan 16, 2025
caff4c7
docs: update README to include new modules directory and its purpose
bdebek-splunk Jan 16, 2025
51fe80b
fix: update validate_data method signature to return a dictionary and…
bdebek-splunk Jan 16, 2025
93b87d2
refactor: update deployment workflow, deleted AwsS3Connector, added …
bdebek-splunk Jan 20, 2025
bdd94cc
fix: update deployment report to use add_data method for successful a…
bdebek-splunk Jan 20, 2025
05dbeb9
fix: added report_generator.py to the architectire tree
bdebek-splunk Jan 20, 2025
e69d682
style: clean up code formatting and improve type hinting in deploymen…
bdebek-splunk Jan 20, 2025
e88b721
refactor: removed hardcoded endpoint from instance url defined in dep…
bdebek-splunk Jan 20, 2025
08c3f0e
refactor: reworked splunkcloud.py and deploy.py modules
bdebek-splunk Jan 20, 2025
c8d4813
fix: remove redundant status print statement in AppInspectService
bdebek-splunk Jan 20, 2025
ec1c621
docs:updated README
edro15 Jan 21, 2025
b2faa37
chore:updated deployment config files to support experience key
edro15 Jan 21, 2025
2b21c85
ci:added deps to workflow
edro15 Jan 21, 2025
89c53cf
feat+refactor:added yml validation against schema
edro15 Jan 21, 2025
d864a9f
docs:edited README
edro15 Jan 21, 2025
a57470e
style:removed whitespace
edro15 Jan 21, 2025
efecedb
docs:updated README
edro15 Jan 21, 2025
060e145
fix: add break condition for successful app validation in AppInspectS…
bdebek-splunk Jan 21, 2025
4c0f6d7
chore: remove unused AWS credentials from deployment workflows and sc…
bdebek-splunk Jan 21, 2025
401e7d7
fix: revert deletion of AWS credentials in github workflow files
bdebek-splunk Jan 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ Assumptions:
│ ├── collections.conf
│ └── logging.conf
├── deploy.py
├── modules
│ ├── apps_processing.py
│ ├── aws_s3.py
│ └── splunkcloud.py
└── environments
├── prod
│ ├── es
Expand All @@ -38,6 +42,7 @@ Assumptions:
* deployment instructions per each environment (`deployment.yml`)
* specific apps configurations (e.g. `uat/es/app1`)
* `deploy.py` Used by the automation to perform the deployment
* `modules/` Contains methods used in deployment automation

This repository follows the same structure. Please navigate it to verify its content.

Expand Down
58 changes: 36 additions & 22 deletions deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,58 +2,71 @@
import json
import os

import yaml

from utils import *
from modules.splunkcloud import SplunkCloudConnector
from modules.aws_s3 import AwsS3Connector
from modules.apps_processing import AppFilesProcessor

# FOR LOCAL TESTING
# from dotenv import load_dotenv
# load_dotenv(dotenv_path="local.env")

SPLUNK_USERNAME = os.getenv("SPLUNK_USERNAME")
SPLUNK_PASSWORD = os.getenv("SPLUNK_PASSWORD")
SPLUNK_TOKEN = os.getenv("SPLUNK_TOKEN")

AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")


def main():
if len(sys.argv) != 2:
print("Usage: python script.py <path_to_yaml_file>")
sys.exit(1)

yaml_file_path = "environments/" + sys.argv[1] + "/deployment.yml"

# Initiate deployment report
deployment_report = {}

try:
data = read_yaml(yaml_file_path)
except FileNotFoundError:
print(f"Error: The file '{yaml_file_path}' was not found.")
except yaml.YAMLError as e:
print(f"Error parsing YAML file: {e}")
sys.exit(1)
# Initiate AppFilesProcessor object
app_processor = AppFilesProcessor(yaml_file_path)

### 1. Validate data and retrieve all apps listed in deployment.yml from S3 ###
private_apps, splunkbase_apps = validate_data(data)
data, private_apps, splunkbase_apps = app_processor.validate_data()
# List all apps in yaml file and then their S3 bucket
if private_apps:
apps = data.get("apps", {}).keys()
s3_buckets = [data["apps"][app]["s3-bucket"] for app in apps]
app_directories = [data["apps"][app]["source"] for app in apps]
target_url = data["target"]["url"]
# Download all apps from S3

# Initiate AwsS3Connector object
s3_connector = AwsS3Connector(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
# Check for private apps
if private_apps:
print("Found private apps in deployment.yml, starting deployment...")
# Loop through all apps
for app, bucket, directory in zip(apps, s3_buckets, app_directories):
object_name = directory
file_name = f"{app}.tgz"
# Donwload app from S3
download_file_from_s3(bucket, object_name, file_name)
s3_connector.download_file_from_s3(bucket, object_name, file_name)

### 2. Upload_local_configuration ###
# Check if the configuration exists for the app
path = os.path.join("environments", sys.argv[1], app)
print(path)
if path:
unpack_merge_conf_and_meta_repack(app, path)
app_processor.unpack_merge_conf_and_meta_repack(app, path)
else:
print(f"No configuration found for app {app}. Skipping.")

### 3. Validate app for Splunk Cloud ###
report, token = cloud_validate_app(app)
# Initiate SplunkCloudConnector object
cloud_connector = SplunkCloudConnector(
SPLUNK_USERNAME, SPLUNK_PASSWORD, SPLUNK_TOKEN, target_url
)
report, token = cloud_connector.cloud_validate_app(app)
if report is None:
print(f"App {app} failed validation.")
deployment_report[app] = {"validation": "failed"}
Expand All @@ -66,7 +79,7 @@ def main():
and result["failure"] == 0
and result["manual_check"] == 0
):
distribution_status = distribute_app(app, target_url, token)
distribution_status = cloud_connector.distribute_app(app, token)
if distribution_status == 200:
print(f"App {app} successfully distributed.\n")
deployment_report[app]["distribution"] = "success"
Expand All @@ -90,17 +103,18 @@ def main():
for splunkbase_app in splunkbase_apps_dict:
app = splunkbase_apps_dict[splunkbase_app]
app_name = splunkbase_app
version = app['version']
app_id = get_app_id(app_name)
token = os.getenv("SPLUNK_TOKEN")
license = get_license_url(app_name)
install_status = install_splunkbase_app(app_name, app_id, version, target_url, token, license)
version = app["version"]
app_id = cloud_connector.get_app_id(app_name)
license = cloud_connector.get_license_url(app_name)
install_status = cloud_connector.install_splunkbase_app(
app_name, app_id, version, license
)
print(f"App {app_name} installation status: {install_status}")
deployment_report[app_name] = {
"splunkbase_installation": install_status,
"version": version,
"app_id": app_id,
}
}
else:
print("No Splunkbase apps found in deployment.yml, skipping...")

Expand Down
209 changes: 209 additions & 0 deletions modules/apps_processing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,209 @@
import sys
import os
import yaml
import shutil
import configparser
import tarfile
from io import StringIO


class AppFilesProcessor:
"""Class for handling local app files and configurations."""

def __init__(self, yml_path):
self.yml_path = yml_path

def _read_yaml(self) -> dict:
"""Read and return the contents of a YAML file."""
file_path = self.yml_path
with open(file_path, "r") as file:
return yaml.safe_load(file)

def validate_data(self) -> tuple:
"""
Validate the data in the YAML file.
Return boolean values for private_apps and splunkbase_apps presence in the environment configuration
validate_data(data) -> (dict, bool, bool)
"""
try:
data = self._read_yaml()
except FileNotFoundError:
print(f"Error: The file '{self.yml_path}' was not found.")
except yaml.YAMLError as e:
print(f"Error parsing YAML file: {e}")
sys.exit(1)

if "apps" not in data:
print("Error: The 'apps' key is missing in deploy.yml fime.")
sys.exit(1)
if "target" not in data:
print("Error: The 'target' key is missing in deploy.yml file.")
sys.exit(1)
if "url" not in data["target"]:
print("Error: The 'url' key is missing in the 'target' section.")
sys.exit(1)
if "splunkbase-apps" not in data:
print("Error: The 'splunkbase-apps' key is missing.")
sys.exit(1)

app_dict = data.get("apps", {})
splunkbase_dict = data.get("splunkbase-apps", {})

private_apps = True if app_dict else False
splunkbase_apps = True if splunkbase_dict else False

return data, private_apps, splunkbase_apps

def _preprocess_empty_headers(self, file_path: str) -> list:
"""
Preprocess the file to handle empty section headers by replacing `[]` with a valid section name.
"""
valid_lines = []
with open(file_path, "r") as file:
for line in file:
# Replace empty section headers with a placeholder
if line.strip() == "[]":
valid_lines.append("[DEFAULT]\n") # Or any placeholder section name
else:
valid_lines.append(line)
return valid_lines

def _replace_default_with_empty_header(self, file_path: str) -> None:
"""
Replace '[DEFAULT]' header with '[]' in the specified file.
"""
with open(file_path, "r") as file:
lines = file.readlines()

with open(file_path, "w") as file:
for line in lines:
# Replace '[DEFAULT]' with '[]'
if line.strip() == "[DEFAULT]":
file.write("[]\n")
else:
file.write(line)

def merge_or_copy_conf(self, source_path: str, dest_path: str) -> None:
# Get the filename from the source path
filename = os.path.basename(source_path)
dest_file = os.path.join(dest_path, filename)

# Check if the file exists in the destination directory
if not os.path.exists(dest_file):
# If the file doesn't exist, copy it
shutil.copy(source_path, dest_path)
print(f"Copied {filename} to {dest_path}")
else:
# If the file exists, merge the configurations
print(f"Merging {filename} with existing file in {dest_path}")

# Read the source file
source_config = configparser.ConfigParser()
source_config.read(source_path)

# Read the destination file
dest_config = configparser.ConfigParser()
dest_config.read(dest_file)

# Merge source into destination
for section in source_config.sections():
if not dest_config.has_section(section):
dest_config.add_section(section)
for option, value in source_config.items(section):
dest_config.set(section, option, value)

# Write the merged configuration back to the destination file
with open(dest_file, "w") as file:
dest_config.write(file)
print(f"Merged configuration saved to {dest_file}")

def merge_or_copy_meta(self, local_meta_file: str, default_dir: str) -> None:
"""Merge local.meta with default.meta"""
filename = os.path.basename(local_meta_file)
dest_file = os.path.join(default_dir, "default.meta")

# Check if the file exists in the destination directory
if not os.path.exists(dest_file):
# If the file doesn't exist, copy it
shutil.copy(local_meta_file, dest_file)
print(f"Copied {filename} to {dest_file}")
else:
# If the file exists, merge the configurations
print(f"Merging {filename} with existing file in {dest_file}")

# Preprocess the default file
default_preprocessed_lines = self._preprocess_empty_headers(dest_file)
default_preprocessed_content = StringIO("".join(default_preprocessed_lines))

# Read the default.meta file
default_meta = configparser.ConfigParser()
default_meta.read_file(default_preprocessed_content)

# Preprocess the local file
local_preprocessed_lines = self._preprocess_empty_headers(local_meta_file)
local_preprocessed_content = StringIO("".join(local_preprocessed_lines))

# Read the local.meta file
local_meta = configparser.ConfigParser()
local_meta.read_file(local_preprocessed_content)

# Merge local.meta into default.meta
for section in local_meta.sections():
if not default_meta.has_section(section):
default_meta.add_section(section)
for option, value in local_meta.items(section):
if default_meta.has_option(section, option):
# Merge logic: Option exists in both, decide whether to overwrite
default_value = default_meta.get(section, option)
if value != default_value:
print(
f"Conflict detected: {section} {option} - {default_value} -> {value}"
)
# Overwrite the option in default.meta
default_meta.set(section, option, value)
default_meta.set(section, option, value)

# Write the merged configuration back to the output file
with open(dest_file, "w") as file:
default_meta.write(file)

# Replace '[DEFAULT]' with '[]' in the output file
self._replace_default_with_empty_header(dest_file)

print(f"Merged metadata saved to {dest_file}")

def unpack_merge_conf_and_meta_repack(self, app: str, path: str) -> None:
"""Unpack the app, load environment configuration files and repack the app."""
temp_dir = "temp_unpack"
os.makedirs(temp_dir, exist_ok=True)

# Unpack the tar.gz file
with tarfile.open(f"{app}.tgz", "r:gz") as tar:
tar.extractall(path=temp_dir)
# Create default directory for unpacked app
base_default_dir = f"{temp_dir}/{app}"
# Load the environment configuration files
app_dir = path
# Copy all .conf files in app_dir to temp_dir of unpacked app
for file in os.listdir(app_dir):
if file.endswith(".conf"):
default_dir = base_default_dir + "/default"
os.makedirs(default_dir, exist_ok=True)
source_path = os.path.join(app_dir, file)
self.merge_or_copy_conf(source_path, default_dir)
# Copy all metadata files in app_dir to temp_dir of unpacked app
for file in os.listdir(app_dir):
if file.endswith(".meta"):
default_dir = base_default_dir + "/metadata"
os.makedirs(default_dir, exist_ok=True)
source_path = os.path.join(app_dir, file)
self.merge_or_copy_meta(source_path, default_dir)
# Repack the app and place it in the root directory
with tarfile.open(f"{app}.tgz", "w:gz") as tar:
for root, _, files in os.walk(f"{temp_dir}/{app}"):
for file in files:
full_path = os.path.join(root, file)
arcname = os.path.relpath(full_path, temp_dir)
tar.add(full_path, arcname=arcname)
24 changes: 24 additions & 0 deletions modules/aws_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import boto3


class AwsS3Connector:
"""Class to connect to AWS S3 and download files."""

def __init__(self, aws_access_key_id, aws_secret_access_key):
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key

def download_file_from_s3(
self, bucket_name: str, object_name: str, file_name: str
) -> None:
"""Download a file from an S3 bucket."""
s3 = boto3.client(
"s3",
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
)
try:
s3.download_file(bucket_name, object_name, file_name)
print(f"Downloaded {object_name} from {bucket_name} to {file_name}")
except Exception as e:
print(f"Error downloading {object_name} from {bucket_name}: {e}")
Loading