2121
2222
2323def get_test_human_name (file ):
24-
2524 exp = get_expectation_impl (get_test_name (file ))
26- template_json = exp ._prescriptive_renderer (configuration = ExpectationConfiguration (get_test_name (file ), kwargs = get_params1 (file )))[0 ]
25+ template_json = \
26+ exp ._prescriptive_renderer (configuration = ExpectationConfiguration (get_test_name (file ), kwargs = get_params1 (file )))[0 ]
2727 if type (template_json ) is not dict :
2828 template_json = template_json .to_json_dict ()
2929 template_str = template_json ['string_template' ]['template' ]
3030 params = get_params1 (file )
3131 result_string = template_str
3232 new_params = {}
33- for key ,value in params .items ():
33+ for key , value in params .items ():
3434 if type (value ) == list :
3535 if key == 'value_set' :
3636 for i in value :
@@ -57,15 +57,14 @@ def get_test_human_name(file):
5757 return result_string
5858
5959
60- def get_json (json_name ,validate_id ):
60+ def get_json (json_name , validate_id ):
6161 file_name = f"great_expectations/uncommitted/validations/{ validate_id } .json"
6262 content_object = s3 .Object (qa_bucket , f"{ qa_bucket } /{ file_name } " )
6363 file_content = content_object .get ()['Body' ].read ().decode ('utf-8' )
6464 json_content = json .loads (file_content )
6565 return json_content
6666
6767
68-
6968def get_suit_status ():
7069 return "passed"
7170
@@ -75,23 +74,25 @@ def get_test_name(file):
7574
7675
7776def get_suit_name (file , i ):
78- return file ['meta' ]['batch_kwargs' ]['data_asset_name' ] + "." + i ['expectation_config' ]['kwargs' ]['column' ] if 'column' in i ['expectation_config' ]['kwargs' ] else file ['meta' ]['batch_kwargs' ]['data_asset_name' ]
77+ return f"{ file ['meta' ]['batch_kwargs' ]['data_asset_name' ]} .{ i ['expectation_config' ]['kwargs' ]['column' ]} " if "column" in \
78+ i [
79+ "expectation_config" ][
80+ "kwargs" ] else \
81+ file ["meta" ]["batch_kwargs" ]["data_asset_name" ]
7982
8083
8184def get_jira_ticket (file ):
8285 if 'Bug Ticket' in file ['expectation_config' ]['meta' ]:
8386
8487 return {
85- "name" : "Bug ticket" ,
86- "url" : file ['expectation_config' ]['meta' ]['Bug Ticket' ],
87- "type" : "issue"
88- }
88+ "name" : "Bug ticket" ,
89+ "url" : file ['expectation_config' ]['meta' ]['Bug Ticket' ],
90+ "type" : "issue"
91+ }
8992 else :
9093 return {}
9194
9295
93-
94-
9596def get_severity (file ):
9697 return file ['expectation_config' ]['meta' ]['Severity' ] if 'Severity' in file ['expectation_config' ]['meta' ] else ""
9798
@@ -105,7 +106,8 @@ def get_stop_suit_time():
105106
106107
107108def parse_datetime (date_str ):
108- return datetime .timestamp (datetime . strptime (date_str , '%Y%m%dT%H%M%S.%fZ' ))* 1000
109+ return datetime .timestamp (datetime .strptime (date_str , '%Y%m%dT%H%M%S.%fZ' )) * 1000
110+
109111
110112def get_start_test_time (file ):
111113 return parse_datetime (file ['meta' ]['run_id' ]['run_name' ])
@@ -120,91 +122,90 @@ def get_params(file):
120122 del params ['result_format' ]
121123 result = []
122124 for param in params :
123- result .append ({"name" : param , "value" : str (params [param ])}) if isinstance (params [param ], list ) else result .append ({"name" : param , "value" : params [param ]})
125+ result .append ({"name" : param , "value" : str (params [param ])}) if isinstance (params [param ],
126+ list ) else result .append (
127+ {"name" : param , "value" : params [param ]})
124128 return result
125129
130+
126131def get_params1 (file ):
127132 params = file ['expectation_config' ]['kwargs' ]
128- # del params['result_format']
129133 return params
130134
135+
131136def get_test_status (file ):
132137 return "passed" if file ['success' ] is True else "failed"
133138
139+
134140def get_test_description (file ):
135141 result = ""
136- for f in file [' result' ]:
137- if str (f )!= ' observed_value' :
138- result = result + "\n " + str (f ) + ": " + str (file ['result' ][f ])+ "\n "
142+ for f in file [" result" ]:
143+ if str (f ) != " observed_value" :
144+ result = result + "\n " + f" { str (f )} : { str (file ['result' ][f ])} " + "\n "
139145 return result
140146
141147
142148def get_observed_value (file ):
143149 try :
144- return "Observed value: " + str (file ['result' ]['observed_value' ]) if 'observed_value' in file ['result' ] else "Unexpected count: " + str (file ['result' ]['unexpected_count' ])
150+ return f"Observed value: { str (file ['result' ]['observed_value' ])} " if "observed_value" in file [
151+ "result" ] else f"Unexpected count: { str (file ['result' ]['unexpected_count' ])} "
145152 except KeyError :
146153 return 'Column not exist'
147154
148155
149-
150156def get_exception_message (file ):
151157 return file ['exception_info' ]['exception_message' ]
152158
159+
153160def get_exception_traceback (file ):
154161 return file ['exception_info' ]['exception_traceback' ]
155162
156163
157-
158- def get_folder_key (folder ,folder_key ):
159-
160-
161- folder = folder + str (folder_key ) + '/'
164+ def get_folder_key (folder , folder_key ):
165+ folder = f"{ folder } { str (folder_key )} /"
162166 bucket .put_object (Key = folder )
163167
164168 return folder_key
165169
166170
167- def create_categories_json (json_name ,key ):
171+ def create_categories_json (json_name , key ):
168172 data = [
169- {
170- "name" : "Ignored tests" ,
171- "matchedStatuses" : [
172- "skipped"
173- ]
174- },
175- {
176- "name" : "Passed tests" ,
177- "matchedStatuses" : [
178- "passed"
179- ]
180- },
181- {
182- "name" : "Broken tests" ,
183- "matchedStatuses" : [
184- "broken"
185- ]
186- },
187- {
188- "name" : "Failed tests" ,
189- "matchedStatuses" : [
190- "failed"
191- ]
192- }
193- ]
173+ {
174+ "name" : "Ignored tests" ,
175+ "matchedStatuses" : [
176+ "skipped"
177+ ]
178+ },
179+ {
180+ "name" : "Passed tests" ,
181+ "matchedStatuses" : [
182+ "passed"
183+ ]
184+ },
185+ {
186+ "name" : "Broken tests" ,
187+ "matchedStatuses" : [
188+ "broken"
189+ ]
190+ },
191+ {
192+ "name" : "Failed tests" ,
193+ "matchedStatuses" : [
194+ "failed"
195+ ]
196+ }
197+ ]
194198
195199 result = json .dumps (data )
196- # with open("dags/reportsx/categories.json", "w") as file:
197- s3 .Object (qa_bucket , "allure/" + json_name + key + "/result/categories.json" ).put (Body = bytes (result .encode ('UTF-8' )))
198-
200+ s3 .Object (qa_bucket , f"allure/{ json_name } { key } /result/categories.json" ).put (Body = bytes (result .encode ("UTF-8" )))
199201
200202
201-
202- def get_uuid (i , json_name ,key ):
203+ def get_uuid (i , json_name , key ):
203204 fl = ""
204- objs = list (bucket .objects .filter (Prefix = ' allure/' + json_name + key + ' /allure-report/history' ))
205- if (len (objs )> 0 ):
205+ objs = list (bucket .objects .filter (Prefix = f" allure/{ json_name } { key } /allure-report/history" ))
206+ if (len (objs ) > 0 ):
206207
207- df = wr .s3 .read_json (path = [' s3://' + qa_bucket + ' /allure/' + json_name + key + ' /allure-report/history/history.json' ])
208+ df = wr .s3 .read_json (path = [f" s3://{ qa_bucket } /allure/{ json_name } { key } /allure-report/history/history.json" ])
208209
209210 fl = json .loads (df .to_json ())
210211 keys = list (fl .keys ())
@@ -214,58 +215,55 @@ def get_uuid(i, json_name,key):
214215 return datetime .now ().strftime ("%S%f" )
215216
216217
218+ def create_suit_json (json_name , key , validate_id ):
219+ bucket .put_object (Key = f"allure/{ json_name } { key } /result/" )
217220
218- def create_suit_json (json_name ,key ,validate_id ):
219- bucket .put_object (Key = "allure/" + json_name + key + "/result/" )
220-
221- file = get_json (json_name ,validate_id )
221+ file = get_json (json_name , validate_id )
222222 start_time = get_start_suit_time (file )
223223 stop_time = get_stop_test_time (file )
224- # for i in range(len(file['results'])):
225224 for i in file ['results' ]:
226225 uuid = str (get_uuid (list (file ['results' ]).index (i ), json_name , key ))
227226 data = {
228- "uuid" : uuid ,
229- "historyId" : uuid ,
230- "status" : get_test_status (i ),
231- "parameters" : get_params (i ),
232- "labels" : [{
233- "name" : "test" ,
234- "value" : get_test_name (i )
235- }, {
236- "name" : "suite" ,
237- "value" : get_suit_name (file ,i )
238- },
239- {
240- "name" : "severity" ,
241- "value" : get_severity (i )
242- }
243- ],
244- "links" : [get_jira_ticket (i )],
245- "name" : get_test_name (i ),
246- "description" : get_test_description (i ),
247- "statusDetails" : {"known" : False , "muted" : False , "flaky" : False ,
248- "message" : get_observed_value (i ) if get_test_status (i )== 'failed' else "" ,
249- "trace" : get_exception_traceback (i )},
250- "start" : start_time ,
251- "stop" : stop_time ,
252- "steps" : [
253- {
254- "status" : get_test_status (i ),
255- "name" : get_test_human_name (i ),
256- "start" : get_start_test_time (file ),
257- "stop" : get_stop_test_time (file )
258- }]
259- }
260-
261-
227+ "uuid" : uuid ,
228+ "historyId" : uuid ,
229+ "status" : get_test_status (i ),
230+ "parameters" : get_params (i ),
231+ "labels" : [{
232+ "name" : "test" ,
233+ "value" : get_test_name (i )
234+ }, {
235+ "name" : "suite" ,
236+ "value" : get_suit_name (file , i )
237+ },
238+ {
239+ "name" : "severity" ,
240+ "value" : get_severity (i )
241+ }
242+ ],
243+ "links" : [get_jira_ticket (i )],
244+ "name" : get_test_name (i ),
245+ "description" : get_test_description (i ),
246+ "statusDetails" : {"known" : False , "muted" : False , "flaky" : False ,
247+ "message" : get_observed_value (i ) if get_test_status (i ) == 'failed' else "" ,
248+ "trace" : get_exception_traceback (i )},
249+ "start" : start_time ,
250+ "stop" : stop_time ,
251+ "steps" : [
252+ {
253+ "status" : get_test_status (i ),
254+ "name" : get_test_human_name (i ),
255+ "start" : get_start_test_time (file ),
256+ "stop" : get_stop_test_time (file )
257+ }]
258+ }
262259
263260 result = json .dumps (data )
264261
265- s3 .Object (qa_bucket , "allure/" + json_name + key + "/result/" + uuid + "-result.json" ).put (Body = bytes (result .encode ('UTF-8' )))
262+ s3 .Object (qa_bucket , f"allure/{ json_name } { key } /result/{ uuid } -result.json" ).put (
263+ Body = bytes (result .encode ("UTF-8" )))
266264
267265
268- def transfer_folder (root_src_dir ,root_dst_dir ):
266+ def transfer_folder (root_src_dir , root_dst_dir ):
269267 for src_dir , dirs , files in os .walk (root_src_dir ):
270268 dst_dir = src_dir .replace (root_src_dir , root_dst_dir , 1 )
271269 if not os .path .exists (dst_dir ):
@@ -281,16 +279,8 @@ def transfer_folder(root_src_dir,root_dst_dir):
281279 shutil .copy (src_file , dst_dir )
282280
283281
284-
285-
286-
287- def create_json_report (json_name ,cloudfront ,folder_key ,validate_id ):
288- key = "/" + get_folder_key ("allure/" + json_name + "/" ,folder_key )
289- create_suit_json (json_name ,key ,validate_id )
290- create_categories_json (json_name ,key )
291- return cloudfront + "/allure/" + json_name + key + "/allure-report/index.html" , json_name + key
292-
293-
294-
295-
296-
282+ def create_json_report (json_name , cloudfront , folder_key , validate_id ):
283+ key = "/" + get_folder_key (f"allure/{ json_name } /" , folder_key )
284+ create_suit_json (json_name , key , validate_id )
285+ create_categories_json (json_name , key )
286+ return f"{ cloudfront } /allure/{ json_name } { key } /allure-report/index.html" , json_name + key
0 commit comments