Skip to content

Commit

Permalink
publish_release.py: upload archives to s3
Browse files Browse the repository at this point in the history
  • Loading branch information
sdarwin committed Mar 12, 2024
1 parent 774a12a commit 03085f9
Showing 1 changed file with 39 additions and 2 deletions.
41 changes: 39 additions & 2 deletions publish_release.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@
# Downloads snapshots from artifactory, renames them, confirms the sha hash,
# and then uploads the files back to artifactory.
#
# TODO:
#
# 2024-03-11 If permanently switching from jfrog to s3, adjust the dryrun setting
# concerning "Upload extracted files to S3 for the website docs". That should happen or not happen
# based on other s3 uploads. Check all dryrun configurations throughout the file.
#
# Instructions
#
Expand Down Expand Up @@ -45,6 +50,7 @@
from dotenv import load_dotenv

jfrogURL = "https://boostorg.jfrog.io/artifactory/"
s3_archives_bucket = "boost-archives"


def fileHash(fileName):
Expand Down Expand Up @@ -105,10 +111,27 @@ def copyJFROGFile(sourceRepo, sourceFileName, destRepo, destFileName, suffix):

def uploadJFROGFile(sourceFileName, destRepo):
# Upload a file to JFROG
print("Uploading: %s" % (sourceFileName))
print("Uploading: %s to JFROG" % (sourceFileName))
os.system("jfrog rt upload %s %s" % (sourceFileName, destRepo))


def uploadS3File(sourceFileName, destRepo):
# Upload an archive to S3
print("Uploading: %s to S3" % (sourceFileName))
archivePathLocal = sourceFileName
archivePathRemote = re.sub("^main/", "", destRepo)
archivePathRemote = "remote1:" + s3_archives_bucket + "/" + archivePathRemote
result = subprocess.run(
"export AWS_PROFILE=%s;rclone -v --s3-no-check-bucket copy --checksum %s %s"
% ("production", archivePathLocal, archivePathRemote),
check=True,
shell=True,
text=True,
)
if options.progress:
print(result)


#####
usage = "usage: %prog [options] boost_version # Example: %prog 1_85_0"
parser = OptionParser(usage=usage)
Expand Down Expand Up @@ -140,6 +163,14 @@ def uploadJFROGFile(sourceFileName, destRepo):
dest="dryrun",
)

parser.add_option(
"--dry-run-s3",
default=True,
action="store_true",
help="don't upload release archives to s3",
dest="dryrun_s3",
)

(options, args) = parser.parse_args()
if len(args) != 1:
print("Too Many arguments")
Expand Down Expand Up @@ -216,7 +247,7 @@ def uploadJFROGFile(sourceFileName, destRepo):
copyJFROGFile(sourceRepo, snapshotName, destRepo, actualName, s)
uploadJFROGFile(actualName + s + ".json", destRepo)

# Upload the files to S3
# Upload extracted files to S3 for the website docs
aws_profiles = {
"production": "boost.org.v2",
"stage": "stage.boost.org.v2",
Expand Down Expand Up @@ -261,6 +292,12 @@ def uploadJFROGFile(sourceFileName, destRepo):
% (profile, archivePathLocal, archivePathRemote)
)

# Upload archives to S3
if not options.dryrun_s3:
for s in suffixes:
uploadS3File(actualName + s, destRepo)
uploadS3File(actualName + s + ".json", destRepo)

###############################################################################
#
# Inform CDN origins about uploaded files
Expand Down

0 comments on commit 03085f9

Please sign in to comment.