153 lines
5.9 KiB
Python
153 lines
5.9 KiB
Python
#!/usr/bin/env python3
|
|
import os
|
|
import datetime
|
|
from pathlib import Path
|
|
from google.cloud import storage
|
|
|
|
# =============================================================================
|
|
# CONFIGURATION CONSTANTS
|
|
# =============================================================================
|
|
|
|
CREATE_BACKUP = False
|
|
UPLOAD_FIRMWARE = True
|
|
UPLOAD_MANIFEST = True
|
|
UPLOAD_DATA = True
|
|
|
|
|
|
# The name of your Google Cloud Storage bucket.
|
|
GCS_BUCKET_NAME = 'boothifier' # <-- Change this
|
|
|
|
# The destination directory (prefix) inside your bucket.
|
|
# For example, 'release' or 'prod'. Use an empty string ('') to use the bucket root.
|
|
DESTINATION_DIR = 'latest' # <-- Change this (or leave '' for bucket root)
|
|
BACKUPS_DIR = 'backups'
|
|
|
|
LOCAL_ROOT_PATH = Path(__file__).parent.resolve()
|
|
|
|
# Path to your Google Cloud credentials JSON file.
|
|
GOOGLE_APPLICATION_CREDENTIALS_PATH = str(LOCAL_ROOT_PATH / 'loyal-column-439819-e3-8cddff2ee2c2.json')
|
|
|
|
|
|
# Local path to the firmware file.
|
|
LOCAL_FIRMWARE_PATH = str(LOCAL_ROOT_PATH / 'firmware.bin') # <-- Change this if needed
|
|
LOCAL_MANIFEST_PATH = str(LOCAL_ROOT_PATH / 'update.json') # <-- Change this if needed
|
|
|
|
# Local path to the data directory.
|
|
LOCAL_DATA_DIRECTORY = 'data' # <-- Change this if needed
|
|
|
|
# =============================================================================
|
|
# SET UP GOOGLE CLOUD CREDENTIALS
|
|
# =============================================================================
|
|
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = GOOGLE_APPLICATION_CREDENTIALS_PATH
|
|
|
|
# =============================================================================
|
|
# HELPER FUNCTIONS
|
|
# =============================================================================
|
|
|
|
def backup_existing_files(bucket, destination_prefix, backups_prefix, backup_folder):
|
|
"""
|
|
Copy every blob in the bucket with a name starting with destination_prefix
|
|
(except any files already in a backup folder) into a backup folder under backups_prefix.
|
|
|
|
Files will be copied into:
|
|
<backups_prefix>/<backup_folder>/<relative_path>
|
|
"""
|
|
blobs = bucket.list_blobs(prefix=destination_prefix)
|
|
for blob in blobs:
|
|
# Compute the file's relative path by removing the destination_prefix.
|
|
if destination_prefix:
|
|
# Remove the destination_prefix and any leading slashes.
|
|
relative_path = blob.name[len(destination_prefix):].lstrip('/')
|
|
else:
|
|
relative_path = blob.name
|
|
|
|
# Construct the new blob name in the backup folder under backups_prefix.
|
|
if backups_prefix:
|
|
new_blob_name = f"{backups_prefix}/{backup_folder}/{relative_path}"
|
|
else:
|
|
new_blob_name = f"{backup_folder}/{relative_path}"
|
|
|
|
print(f"Backing up: copying '{blob.name}' to '{new_blob_name}'")
|
|
bucket.copy_blob(blob, bucket, new_blob_name)
|
|
|
|
def upload_file(bucket, local_path, destination_blob_name):
|
|
"""
|
|
Upload a single file to the bucket with the specified blob name.
|
|
Overwrites the blob if it already exists.
|
|
"""
|
|
print(f"Uploading file '{local_path}' to '{destination_blob_name}'")
|
|
blob = bucket.blob(destination_blob_name)
|
|
# Set Cache-Control header to force clients to always fetch fresh content.
|
|
blob.cache_control = 'private, max-age=0, no-transform'
|
|
blob.upload_from_filename(local_path)
|
|
|
|
def upload_directory(bucket, local_directory, destination_prefix):
|
|
"""
|
|
Recursively upload the contents of the local directory to the bucket under
|
|
destination_prefix. The directory structure is preserved.
|
|
"""
|
|
for root, _, files in os.walk(local_directory):
|
|
for file in files:
|
|
file_local_path = os.path.join(root, file)
|
|
# Compute the relative path of the file from the base directory.
|
|
rel_path = os.path.relpath(file_local_path, local_directory)
|
|
if destination_prefix:
|
|
dest_blob_name = f"{destination_prefix}/{rel_path}"
|
|
else:
|
|
dest_blob_name = rel_path
|
|
upload_file(bucket, file_local_path, dest_blob_name)
|
|
|
|
# =============================================================================
|
|
# MAIN FUNCTION
|
|
# =============================================================================
|
|
|
|
def main():
|
|
# Initialize the Google Cloud Storage client.
|
|
client = storage.Client()
|
|
bucket = client.bucket(GCS_BUCKET_NAME)
|
|
|
|
# Normalize the destination prefix by stripping any trailing slashes.
|
|
destination_prefix = DESTINATION_DIR.strip('/')
|
|
backups_prefix = BACKUPS_DIR.strip('/')
|
|
|
|
if(CREATE_BACKUP):
|
|
# Create a backup folder name with a timestamp.
|
|
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
backup_folder = f"backup_{timestamp}"
|
|
print(f"Creating backup folder '{backup_folder}' inside '{destination_prefix}' and copying existing files...")
|
|
backup_existing_files(bucket, destination_prefix, backups_prefix, backup_folder)
|
|
|
|
|
|
if(UPLOAD_FIRMWARE):
|
|
print("Uploading firmware...")
|
|
# Upload the firmware file.
|
|
if destination_prefix:
|
|
firmware_destination = f"{destination_prefix}/firmware.bin"
|
|
else:
|
|
firmware_destination = "firmware.bin"
|
|
upload_file(bucket, LOCAL_FIRMWARE_PATH, firmware_destination)
|
|
|
|
if(UPLOAD_MANIFEST):
|
|
print("Uploading manifest...")
|
|
# Upload the manifest.
|
|
if destination_prefix:
|
|
manifest_destination = f"{destination_prefix}/update.json"
|
|
else:
|
|
manifest_destination = "update.json"
|
|
upload_file(bucket, LOCAL_MANIFEST_PATH, manifest_destination)
|
|
|
|
if(UPLOAD_DATA):
|
|
print("Uploading data directory...")
|
|
# Upload the data directory.
|
|
if destination_prefix:
|
|
data_destination = f"{destination_prefix}/data"
|
|
else:
|
|
data_destination = "data"
|
|
upload_directory(bucket, LOCAL_DATA_DIRECTORY, data_destination)
|
|
|
|
|
|
print("Upload complete.")
|
|
|
|
if __name__ == '__main__':
|
|
main()
|