Skip to content

Commit ce3e7dc

Browse files
authored
Merge pull request #11 from suskind/export_findings_to_defect_dojo
script to export findings to DefectDojo Generic JSON format
2 parents 2bc54ba + 7cd9ad7 commit ce3e7dc

1 file changed

Lines changed: 97 additions & 0 deletions

File tree

findings_to_defectdojo.py

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
#!/usr/bin/env python
2+
"""
3+
Export all findings in a target to DefectDojo JSON format
4+
5+
Choose "Generic Findings Import" under "Findings" > "Import Scan Results" > "Scan type"
6+
7+
Run:
8+
9+
$ python3 findings_to_defectdojo.py -t '<TARGET_ID>' -o <OUTPUT_FILE_PATH>
10+
11+
"""
12+
import argparse
13+
import requests
14+
import json
15+
from urllib.parse import urljoin
16+
from datetime import datetime
17+
18+
# Define the JWT or it will be asked when you run the script
19+
jwt_token = None
20+
21+
api_base_url = 'https://api.probely.com'
22+
target_endpoint = urljoin(api_base_url, "targets/{target}/")
23+
finding_list_endpoint = urljoin(api_base_url, "targets/{target}/findings/")
24+
25+
def map_severity(probely_severity):
26+
if probely_severity == 10:
27+
return 'Low'
28+
elif probely_severity == 20:
29+
return 'Medium'
30+
elif probely_severity == 30:
31+
return 'High'
32+
else:
33+
return None
34+
35+
def main():
36+
parser = argparse.ArgumentParser()
37+
parser.add_argument('-t', '--target', help='Target id', required=True)
38+
parser.add_argument('-o', '--output', help='Output file', type=argparse.FileType('w'), required=True)
39+
args = parser.parse_args()
40+
41+
if jwt_token is None:
42+
token = input("API Token:")
43+
else:
44+
token = jwt_token
45+
46+
if token is None or token == '':
47+
print('Error: JWT is required')
48+
return
49+
headers = {'Authorization': "JWT {}".format(token)}
50+
51+
response_target = requests.get(
52+
target_endpoint.format(target=args.target),
53+
headers=headers
54+
)
55+
response_target.raise_for_status()
56+
target_res = response_target.json()
57+
58+
target_name = target_res['site'].get('name')
59+
target_url = target_res['site'].get('url')
60+
print(f'Exporting findings: {target_name} - {target_url}')
61+
62+
# Findings
63+
response = requests.get(
64+
finding_list_endpoint.format(target=args.target),
65+
headers=headers,
66+
params={'length': 500}
67+
)
68+
response.raise_for_status()
69+
findings_res = response.json()['results']
70+
71+
result = {
72+
'name': f'{target_name} - {target_url}',
73+
'findings': []
74+
}
75+
for finding in findings_res:
76+
result['findings'].append({
77+
'title': finding['definition']['name'],
78+
'unique_id_from_tool': finding['id'],
79+
'description': finding['definition']['desc'],
80+
'severity': map_severity(finding['severity']),
81+
'mitigation': finding['fix'],
82+
'date': datetime.strptime(finding['last_found'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d"),
83+
'cve': None,
84+
'cwe': None,
85+
'cvssv3': finding['cvss_vector'],
86+
'file_path': finding['path'],
87+
'endpoints': [finding['path']],
88+
'active': True if finding['state'] == 'notfixed' else False,
89+
'verified': True,
90+
'false_p': True if finding['state'] == 'invalid' else False
91+
})
92+
93+
args.output.write(json.dumps(result, indent=2))
94+
print('Done')
95+
96+
if __name__ == '__main__':
97+
main()

0 commit comments

Comments
 (0)