Skip to content

Commit 267665e

Browse files
committed
Unit test now passes
1 parent b67dee9 commit 267665e

3 files changed

Lines changed: 49 additions & 36 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ python-json-logger = "^3.3.0"
3535
fhir-resources = "^8.0.0"
3636
python-dateutil = "^2.9.0"
3737
pyhamcrest = "^2.1.0"
38+
moto = "^5.1.5"
3839

3940
[tool.poetry.group.dev.dependencies]
4041
ruff = "^0.11.0"

scripts/manual_uploads/manual_s3_dynamo_upload.py

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
import os
55
import argparse
66
from pathlib import Path
7+
from typing import Any, Dict, List, Optional
8+
from decimal import Decimal
9+
710

811

912
def map_dynamo_type(value: Any):
@@ -28,7 +31,8 @@ def map_dynamo_type(value: Any):
2831

2932
def upload_to_s3(s3_client, bucket, filepath, dry_run=False):
3033
filename = os.path.basename(filepath)
31-
s3_key = f"manual-uploads/{filename}.json"
34+
print(f"Filepath: {filepath}")
35+
s3_key = f"manual-uploads/{filename}"
3236

3337
if dry_run:
3438
print(f"[DRY RUN] Would upload {filepath} to s3://{bucket}/{s3_key}")
@@ -54,7 +58,8 @@ def upload_to_dynamo(dynamo_client, table_name, filepath):
5458
print(f"Failed to upload {filepath}: {e}")
5559

5660

57-
def main():
61+
def run_upload(args=None):
62+
print("\n\n\n***** We are in main *****\n\n\n")
5863
parser = argparse.ArgumentParser()
5964
parser.add_argument("--env")
6065
parser.add_argument("--upload-s3", type=Path)
@@ -63,25 +68,29 @@ def main():
6368
parser.add_argument("--s3-bucket")
6469
parser.add_argument("--dynamo-table")
6570
parser.add_argument("--dry-run", action="store_true")
66-
args = parser.parse_args()
6771

68-
if not args.s3_bucket:
69-
args.s3_bucket = f"eligibility-signposting-api-{args.env}-eli-rules"
70-
if not args.dynamo_table:
71-
args.dynamo_table = f"eligibility-signposting-api-{args.env}-eligibility_datastore"
72+
if args is None:
73+
parsed_args = parser.parse_args()
74+
else:
75+
parsed_args = parser.parse_args(args)
76+
77+
if not parsed_args.s3_bucket:
78+
parsed_args.s3_bucket = f"eligibility-signposting-api-{parsed_args.env}-eli-rules"
79+
if not parsed_args.dynamo_table:
80+
parsed_args.dynamo_table = f"eligibility-signposting-api-{parsed_args.env}-eligibility_datastore"
7281

7382
session = boto3.Session()
74-
s3 = session.client("s3", region_name=args.region)
75-
dynamo = session.client("dynamodb", region_name=args.region)
83+
s3 = session.client("s3", region_name=parsed_args.region)
84+
dynamo = session.client("dynamodb", region_name=parsed_args.region)
7685

77-
if args.upload_s3:
78-
for filepath in args.upload_s3.glob("*.json"):
79-
upload_to_s3(s3, args.s3_bucket, str(filepath), args.dry_run)
86+
if parsed_args.upload_s3:
87+
for filepath in parsed_args.upload_s3.glob("*.json"):
88+
upload_to_s3(s3, parsed_args.s3_bucket, str(filepath), parsed_args.dry_run)
8089

81-
if args.upload_dynamo:
82-
for filepath in args.upload_dynamo.glob("*.json"):
83-
upload_to_dynamo(dynamo, args.dynamo_table, str(filepath))
90+
if parsed_args.upload_dynamo:
91+
for filepath in parsed_args.upload_dynamo.glob("*.json"):
92+
upload_to_dynamo(dynamo, parsed_args.dynamo_table, str(filepath))
8493

8594

8695
if __name__ == "__main__":
87-
main()
96+
run_upload()
Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
1+
import os
12
import json
23
import tempfile
34
import subprocess
4-
import os
55
from pathlib import Path
66
import pytest
7-
from moto import mock_s3, mock_dynamodb
7+
88
import boto3
9+
from moto import mock_aws
10+
11+
from scripts.manual_uploads.manual_s3_dynamo_upload import run_upload
912

1013

1114
@pytest.fixture
@@ -17,9 +20,8 @@ def test_data_dir(tmp_path):
1720
return tmp_path, data
1821

1922

20-
@mock_s3
21-
@mock_dynamodb
22-
def test_script_cli_end_to_end(test_data_dir):
23+
@mock_aws
24+
def test_script_cli_end_to_end(test_data_dir, capsys):
2325
# Arrange
2426
data_dir, expected_data = test_data_dir
2527
env = "test"
@@ -28,7 +30,10 @@ def test_script_cli_end_to_end(test_data_dir):
2830
dynamo_table = f"api-{env}-datastore"
2931

3032
s3 = boto3.client("s3", region_name=region)
31-
s3.create_bucket(Bucket=s3_bucket)
33+
s3.create_bucket(
34+
Bucket=s3_bucket,
35+
CreateBucketConfiguration={"LocationConstraint": region}
36+
)
3237

3338
dynamodb = boto3.client("dynamodb", region_name=region)
3439
dynamodb.create_table(
@@ -45,20 +50,18 @@ def test_script_cli_end_to_end(test_data_dir):
4550
)
4651

4752
# Act
48-
result = subprocess.run(
49-
[
50-
"python", "your_script.py",
51-
"--env", env,
52-
"--upload-s3", str(data_dir),
53-
"--upload-dynamo", str(data_dir),
54-
"--region", region
55-
],
56-
capture_output=True,
57-
text=True
58-
)
53+
return_code = run_upload([
54+
"--env", env,
55+
"--upload-s3", str(data_dir),
56+
"--upload-dynamo", str(data_dir),
57+
"--region", region,
58+
"--s3-bucket", s3_bucket,
59+
"--dynamo-table", dynamo_table
60+
])
61+
captured = capsys.readouterr()
5962

6063
# Assert
61-
key = f"manual-uploads/test.json.json"
64+
key = f"manual-uploads/test.json"
6265
obj = s3.get_object(Bucket=s3_bucket, Key=key)
6366
uploaded_s3_data = json.load(obj["Body"])
6467
assert uploaded_s3_data == expected_data
@@ -72,5 +75,5 @@ def test_script_cli_end_to_end(test_data_dir):
7275
)["Item"]
7376
assert item["value"]["N"] == "99"
7477

75-
assert result.returncode == 0
76-
assert "Uploaded" in result.stdout
78+
assert "Uploaded" in captured.out
79+
assert "Error" not in captured.err

0 commit comments

Comments
 (0)