66from botocore .exceptions import BotoCoreError
77from dotenv import load_dotenv
88
9- logging .basicConfig (level = logging .INFO , format = "%(asctime)s - %(levelname)s - %(message)s" )
9+ logging .basicConfig (
10+ level = logging .INFO , format = "%(asctime)s - %(levelname)s - %(message)s"
11+ )
1012logger = logging .getLogger ("behave.environment" )
1113
1214
@@ -24,13 +26,19 @@ def _load_environment_variables(context):
2426 context .aws_access_key_id = os .getenv ("AWS_ACCESS_KEY_ID" )
2527 context .aws_secret_access_key = os .getenv ("AWS_SECRET_ACCESS_KEY" )
2628 context .aws_session_token = os .getenv ("AWS_SESSION_TOKEN" )
27- context .abort_on_aws_error = os .getenv ("ABORT_ON_AWS_FAILURE" , "false" ).lower () == "true"
29+ context .abort_on_aws_error = (
30+ os .getenv ("ABORT_ON_AWS_FAILURE" , "false" ).lower () == "true"
31+ )
2832 context .keep_seed = os .getenv ("KEEP_SEED" , "false" ).lower () == "true"
29- context .dynamodb_table_name = os .getenv ("DYNAMODB_TABLE_NAME" , "eligibilty_data_store" )
33+ context .dynamodb_table_name = os .getenv (
34+ "DYNAMODB_TABLE_NAME" , "eligibilty_data_store"
35+ )
3036 context .s3_bucket = os .getenv ("S3_BUCKET_NAME" )
3137 context .s3_upload_dir = os .getenv ("S3_UPLOAD_DIR" , "" )
3238 context .s3_data_path = Path (os .getenv ("S3_JSON_SOURCE_DIR" , "./data/s3" )).resolve ()
33- context .api_gateway_url = os .getenv ("API_GATEWAY_URL" , "https://test.eligibility-signposting-api.nhs.uk" )
39+ context .api_gateway_url = os .getenv (
40+ "API_GATEWAY_URL" , "https://test.eligibility-signposting-api.nhs.uk"
41+ )
3442
3543 logger .info ("ABORT_ON_AWS_FAILURE=%s" , context .abort_on_aws_error )
3644 logger .info ("KEEP_SEED=%s" , context .keep_seed )
@@ -59,13 +67,19 @@ def _setup_s3(context):
5967
6068 json_files = list (context .s3_data_path .glob ("*.json" ))
6169 for file_path in json_files :
62- key = f"{ context .s3_upload_dir } /{ file_path .name } " if context .s3_upload_dir else file_path .name
70+ key = (
71+ f"{ context .s3_upload_dir } /{ file_path .name } "
72+ if context .s3_upload_dir
73+ else file_path .name
74+ )
6375 try :
6476 s3_client .upload_file (str (file_path ), context .s3_bucket , key )
65- logger .info ("Uploaded %s to s3://%s/%s" , file_path .name , context .s3_bucket , key )
66- except (boto3 .exceptions .Boto3Error , BotoCoreError ):
77+ logger .info (
78+ "Uploaded %s to s3://%s/%s" , file_path .name , context .s3_bucket , key
79+ )
80+ except (Exception , BotoCoreError ):
6781 logger .exception ("Failed to upload %s" , file_path .name )
68- except (boto3 . exceptions . Boto3Error , BotoCoreError ):
82+ except (Exception , BotoCoreError ):
6983 logger .exception ("S3 upload setup failed" )
7084 if context .abort_on_aws_error :
7185 context .abort_all = True
@@ -95,6 +109,44 @@ def before_scenario(context, scenario):
95109 logger .info ("Running scenario: %s" , scenario .name )
96110
97111
112+ def before_feature (context , feature ):
113+ """Initialize feature-level context for data setup tracking."""
114+ context .feature_data_setup_done = False
115+ context .feature_dynamodb_items_count = 0
116+ context .feature_uploader = None
117+ logger .info ("Initialized feature context for: %s" , feature .name )
118+
119+
120+ def after_feature (context , feature ):
121+ """Cleanup feature-level DynamoDB data."""
122+ if getattr (context , "keep_seed" , False ):
123+ logger .info (
124+ "KEEP_SEED=true — skipping feature-level DynamoDB cleanup for: %s" ,
125+ feature .name ,
126+ )
127+ return
128+
129+ if hasattr (context , "feature_uploader" ) and context .feature_uploader :
130+ if context .feature_dynamodb_items_count > 0 :
131+ logger .info (
132+ "Cleaning up %d DynamoDB items for feature: %s" ,
133+ context .feature_dynamodb_items_count ,
134+ feature .name ,
135+ )
136+ try :
137+ # Use the uploader's cleanup method if available
138+ if hasattr (context .feature_uploader , "delete_data" ):
139+ context .feature_uploader .delete_data ()
140+ logger .info (
141+ "Successfully cleaned up DynamoDB data for feature: %s" ,
142+ feature .name ,
143+ )
144+ except Exception :
145+ logger .exception (
146+ "Failed to cleanup DynamoDB data for feature: %s" , feature .name
147+ )
148+
149+
98150def after_all (context ):
99151 if context .keep_seed :
100152 logger .info ("KEEP_SEED=true — skipping cleanup." )
@@ -107,11 +159,17 @@ def after_all(context):
107159 s3_client = boto3 .client ("s3" , region_name = context .aws_region )
108160 json_files = list (context .s3_data_path .glob ("*.json" ))
109161 for file_path in json_files :
110- key = f"{ context .s3_upload_dir } /{ file_path .name } " if context .s3_upload_dir else file_path .name
162+ key = (
163+ f"{ context .s3_upload_dir } /{ file_path .name } "
164+ if context .s3_upload_dir
165+ else file_path .name
166+ )
111167 try :
112168 s3_client .delete_object (Bucket = context .s3_bucket , Key = key )
113169 logger .info ("Deleted s3://%s/%s" , context .s3_bucket , key )
114- except (boto3 .exceptions .Boto3Error , BotoCoreError ):
115- logger .exception ("Failed to delete s3://%s/%s" , context .s3_bucket , key )
170+ except (Exception , BotoCoreError ):
171+ logger .exception (
172+ "Failed to delete s3://%s/%s" , context .s3_bucket , key
173+ )
116174 except Exception :
117175 logger .exception ("S3 cleanup failed" )
0 commit comments