-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathstage.sh
executable file
·90 lines (69 loc) · 3.42 KB
/
stage.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/bin/bash
# Staging script for copying deployment resources to an S3 bucket. The resources
# copied here are used as part of the deployment process for this project as
# as some runtime dependencies such as product images and seed data for loading
# products and categories into DDB and CSVs for training Personalize models.
set -e
BUCKET=$1
#Path with trailing /
S3PATH=$2
# remove this line if you want to keep the objects private in your S3 bucket
export S3PUBLIC=" --acl public-read"
if [ ! -d "local" ]; then
mkdir local
fi
touch local/stage.log
if [ "$BUCKET" == "" ]; then
echo "Usage: $0 BUCKET [S3PATH]"
echo " where BUCKET is the S3 bucket to upload resources to and S3PATH is optional path but if specified must have a trailing '/'"
exit 1
fi
BUCKET_LOCATION="$(aws s3api get-bucket-location --bucket ${BUCKET}|grep ":"|cut -d\" -f4)"
if [ -z "$BUCKET_LOCATION" ]; then
BUCKET_DOMAIN="s3.amazonaws.com"
else
BUCKET_DOMAIN="s3-${BUCKET_LOCATION}.amazonaws.com"
fi
# Remove Mac desktop storage files so they don't get packaged & uploaded
find . -name '.DS_Store' -type f -delete
echo " + Staging to $BUCKET in $S3PATH"
echo " + Uploading CloudFormation Templates"
aws s3 cp aws/cloudformation-templates/ s3://${BUCKET}/${S3PATH}cloudformation-templates --recursive $S3PUBLIC
echo " For CloudFormation : https://${BUCKET_DOMAIN}/${BUCKET}/${S3PATH}cloudformation-templates/template.yaml"
echo " + Copying Notebook Dependencies"
[ -e "retaildemostore-notebooks.zip" ] && rm retaildemostore-notebooks.zip
rsync -av --progress ./generators/datagenerator ./workshop --exclude __pycache__
cp ./generators/requirements.txt ./workshop
[ ! -d "./workshop/data" ] && mkdir ./workshop/data
cp ./src/products/src/products-service/data/products.yaml ./workshop/data
cp ./src/users/src/users-service/data/users.json.gz ./workshop/data
echo " + Packaging Notebooks"
zip -qr retaildemostore-notebooks.zip ./workshop/ -x "*.DS_Store" "*.ipynb_checkpoints*" "*.csv"
echo " + Uploading Notebooks"
aws s3 cp retaildemostore-notebooks.zip s3://${BUCKET}/${S3PATH}notebooks/retaildemostore-notebooks.zip $S3PUBLIC
echo " + Packaging Source"
[ -e "retaildemostore-source.zip" ] && rm retaildemostore-source.zip
zip -qr retaildemostore-source.zip ./src/ -x "*.DS_Store" "*__pycache__*" "*/aws-lambda/*" "*/node_modules/*" "*.zip"
echo " + Uploading Source"
aws s3 cp retaildemostore-source.zip s3://${BUCKET}/${S3PATH}source/retaildemostore-source.zip $S3PUBLIC
echo " + Upload seed data"
aws s3 cp src/products/src/products-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC
aws s3 cp src/users/src/users-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC
# Sync CSVs used for Personalize pre-create campaign Lambda function
echo " + Copying CSVs for Personalize model pre-create training"
aws s3 sync s3://retail-demo-store-code/csvs s3://${BUCKET}/${S3PATH}csvs --only-show-errors $S3PUBLIC
# Stage AWS Lambda functions
echo " + Staging AWS Lambda functions"
for function in ./src/aws-lambda/*/
do
echo " + Staging $function"
cd $function
chmod +x ./stage.sh
./stage.sh ${BUCKET} ${S3PATH} > ../../../local/stage.log
cd -
done
# Sync product images
echo " + Copying product images"
aws s3 sync ./images s3://${BUCKET}/${S3PATH}images --only-show-errors $S3PUBLIC
echo " + Done s3://${BUCKET}/${S3PATH} "
echo " For CloudFormation : https://${BUCKET_DOMAIN}/${BUCKET}/${S3PATH}cloudformation-templates/template.yaml"