Skip to content

Commit b67dee9

Browse files
committed
Added python manual upload script and test file
1 parent 1976799 commit b67dee9

2 files changed

Lines changed: 163 additions & 0 deletions

File tree

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
import boto3
2+
import hashlib
3+
import json
4+
import os
5+
import argparse
6+
from pathlib import Path
7+
8+
9+
def map_dynamo_type(value: Any):
10+
if isinstance(value, str):
11+
return {"S": value}
12+
elif isinstance(value, bool):
13+
return {"BOOL": value}
14+
elif isinstance(value, (int, float, Decimal)):
15+
return {"N": str(value)}
16+
elif value is None:
17+
return {"NULL": True}
18+
elif isinstance(value, list):
19+
return {"L": [map_dynamo_type(item) for item in value]}
20+
elif isinstance(value, dict):
21+
return {"M": {k: map_dynamo_type(v) for k, v in value.items()}}
22+
elif isinstance(value, Row):
23+
return {"M": {k: map_dynamo_type(v) for k, v in value.asDict().items()}}
24+
else:
25+
logging.warning(f"Unsupported value type: {type(value)}", "Converting it to string")
26+
return {"S": value}
27+
28+
29+
def upload_to_s3(s3_client, bucket, filepath, dry_run=False):
30+
filename = os.path.basename(filepath)
31+
s3_key = f"manual-uploads/{filename}.json"
32+
33+
if dry_run:
34+
print(f"[DRY RUN] Would upload {filepath} to s3://{bucket}/{s3_key}")
35+
return
36+
37+
try:
38+
s3_client.upload_file(filepath, bucket, s3_key)
39+
print(f"Uploaded {filepath} to s3://{bucket}/{s3_key}")
40+
except Exception as e:
41+
print(f"Failed to upload {filepath}: {e}")
42+
43+
44+
def upload_to_dynamo(dynamo_client, table_name, filepath):
45+
with open(filepath) as f:
46+
item = json.load(f)
47+
48+
try:
49+
dynamo_client.put_item(
50+
TableName=table_name, Item={key: map_dynamo_type(value) for key, value in item.items()}
51+
)
52+
print(f"Uploaded {filepath} to DynamoDB table {table_name}")
53+
except Exception as e:
54+
print(f"Failed to upload {filepath}: {e}")
55+
56+
57+
def main():
58+
parser = argparse.ArgumentParser()
59+
parser.add_argument("--env")
60+
parser.add_argument("--upload-s3", type=Path)
61+
parser.add_argument("--upload-dynamo", type=Path)
62+
parser.add_argument("--region", default="eu-west-2")
63+
parser.add_argument("--s3-bucket")
64+
parser.add_argument("--dynamo-table")
65+
parser.add_argument("--dry-run", action="store_true")
66+
args = parser.parse_args()
67+
68+
if not args.s3_bucket:
69+
args.s3_bucket = f"eligibility-signposting-api-{args.env}-eli-rules"
70+
if not args.dynamo_table:
71+
args.dynamo_table = f"eligibility-signposting-api-{args.env}-eligibility_datastore"
72+
73+
session = boto3.Session()
74+
s3 = session.client("s3", region_name=args.region)
75+
dynamo = session.client("dynamodb", region_name=args.region)
76+
77+
if args.upload_s3:
78+
for filepath in args.upload_s3.glob("*.json"):
79+
upload_to_s3(s3, args.s3_bucket, str(filepath), args.dry_run)
80+
81+
if args.upload_dynamo:
82+
for filepath in args.upload_dynamo.glob("*.json"):
83+
upload_to_dynamo(dynamo, args.dynamo_table, str(filepath))
84+
85+
86+
if __name__ == "__main__":
87+
main()
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
import json
2+
import tempfile
3+
import subprocess
4+
import os
5+
from pathlib import Path
6+
import pytest
7+
from moto import mock_s3, mock_dynamodb
8+
import boto3
9+
10+
11+
@pytest.fixture
12+
def test_data_dir(tmp_path):
13+
data = {"ID_NUMBER": "123", "ATTRIBUTE_TYPE": "Test", "value": 99}
14+
file_path = tmp_path / "test.json"
15+
with open(file_path, "w") as f:
16+
json.dump(data, f)
17+
return tmp_path, data
18+
19+
20+
@mock_s3
21+
@mock_dynamodb
22+
def test_script_cli_end_to_end(test_data_dir):
23+
# Arrange
24+
data_dir, expected_data = test_data_dir
25+
env = "test"
26+
region = "eu-west-2"
27+
s3_bucket = f"api-{env}-rules"
28+
dynamo_table = f"api-{env}-datastore"
29+
30+
s3 = boto3.client("s3", region_name=region)
31+
s3.create_bucket(Bucket=s3_bucket)
32+
33+
dynamodb = boto3.client("dynamodb", region_name=region)
34+
dynamodb.create_table(
35+
TableName=dynamo_table,
36+
KeySchema=[
37+
{"AttributeName": "ID_NUMBER", "KeyType": "HASH"},
38+
{"AttributeName": "ATTRIBUTE_TYPE", "KeyType": "RANGE"}
39+
],
40+
AttributeDefinitions=[
41+
{"AttributeName": "ID_NUMBER", "AttributeType": "S"},
42+
{"AttributeName": "ATTRIBUTE_TYPE", "AttributeType": "S"}
43+
],
44+
BillingMode="PAY_PER_REQUEST"
45+
)
46+
47+
# Act
48+
result = subprocess.run(
49+
[
50+
"python", "your_script.py",
51+
"--env", env,
52+
"--upload-s3", str(data_dir),
53+
"--upload-dynamo", str(data_dir),
54+
"--region", region
55+
],
56+
capture_output=True,
57+
text=True
58+
)
59+
60+
# Assert
61+
key = f"manual-uploads/test.json.json"
62+
obj = s3.get_object(Bucket=s3_bucket, Key=key)
63+
uploaded_s3_data = json.load(obj["Body"])
64+
assert uploaded_s3_data == expected_data
65+
66+
item = dynamodb.get_item(
67+
TableName=dynamo_table,
68+
Key={
69+
"ID_NUMBER": {"S": expected_data["ID_NUMBER"]},
70+
"ATTRIBUTE_TYPE": {"S": expected_data["ATTRIBUTE_TYPE"]}
71+
}
72+
)["Item"]
73+
assert item["value"]["N"] == "99"
74+
75+
assert result.returncode == 0
76+
assert "Uploaded" in result.stdout

0 commit comments

Comments
 (0)