-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsonarqube_download_report.py
119 lines (100 loc) · 3.22 KB
/
sonarqube_download_report.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
from typing import Optional
import json
import os
import urllib.parse
import requests # type: ignore[import-untyped]
def download_report(
sonarqube_host_url: str,
sonarqube_token: str,
sonarqube_org: str,
sonarqube_project: str,
branch: Optional[str],
pull_request_id: Optional[str],
report_path: str,
):
headers = {"Authorization": f"Bearer {sonarqube_token}"}
additional_search_params: dict[str, str] = {}
if pull_request_id:
additional_search_params["pullRequest"] = pull_request_id
elif branch:
additional_search_params["branch"] = branch
else:
raise ValueError("Branch or pull request id is required")
issues = _get_all_pages(
# https://sonarcloud.io/web_api/api/issues/search?deprecated=false
f"{sonarqube_host_url}/api/issues/search?"
+ urllib.parse.urlencode(
{
"additionalFields": "_all",
"organization": sonarqube_org,
"projects": sonarqube_project,
**additional_search_params,
}
),
headers,
"issues",
["issues", "components", "rules"],
)
hotspots = _get_all_pages(
# https://sonarcloud.io/web_api/api/hotspots/search?deprecated=false
f"{sonarqube_host_url}/api/hotspots/search?"
+ urllib.parse.urlencode(
{
"projectKey": sonarqube_project,
**additional_search_params,
}
),
headers,
"hotspots",
["hotspots", "components"],
)
hotspot_rules = _get_all_pages(
# https://sonarcloud.io/web_api/api/rules/search?deprecated=false
f"{sonarqube_host_url}/api/rules/search?"
+ urllib.parse.urlencode(
{
"organization": sonarqube_org,
"rule_keys": ",".join(
hotspot["ruleKey"] for hotspot in hotspots["hotspots"]
),
"f": "name,lang,severity",
}
),
headers,
"rules",
["rules"],
)
report = issues
report["issues"] += hotspots["hotspots"]
report["components"] += hotspots["components"]
report["rules"] += hotspot_rules["rules"]
with open(report_path, "w") as f:
json.dump(report, f, indent=2)
def _get_all_pages(
partial_url: str, headers: dict, stop_prop_name: str, merge_props: list[str]
) -> dict:
result = {}
page = 1
while True:
with requests.get(f"{partial_url}&p={page}&ps=500", headers=headers) as r:
r.raise_for_status()
page_data = r.json()
if page == 1:
result = page_data
else:
for merge_prop in merge_props:
result[merge_prop] += page_data[merge_prop]
if len(page_data[stop_prop_name]) == 0:
break
page += 1
return result
if __name__ == "__main__":
download_report(
os.environ["SONARQUBE_HOST_URL"],
os.environ["SONARQUBE_TOKEN"],
os.environ["SONARQUBE_ORG"],
os.environ["SONARQUBE_PROJECT"],
os.environ.get("BRANCH", ""),
os.environ.get("PULL_REQUEST_ID", ""),
os.environ["REPORT_PATH"],
)