From 110d15d09f572df0f06d9c9fb18fd8b6baae3dcf Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Fri, 28 Mar 2025 12:51:51 +0100 Subject: [PATCH 01/29] Audit usage of Scanner for .NET 9,.2.x that has a vuln (#1606) * Fixes #1594 * Audits usage of Scanner for .NET 9.2.x that has a vuln * Add audit rule for #1594 * Formatting --- doc/sonar-audit.md | 1 + sonar/audit/rules.json | 5 +++++ sonar/audit/rules.py | 1 + sonar/tasks.py | 28 ++++++++++++++++------------ test/unit/test_issues.py | 2 +- test/unit/utilities.py | 1 + 6 files changed, 25 insertions(+), 13 deletions(-) diff --git a/doc/sonar-audit.md b/doc/sonar-audit.md index 786dc503..32905d7f 100644 --- a/doc/sonar-audit.md +++ b/doc/sonar-audit.md @@ -193,6 +193,7 @@ sonar-audit --what projects -f projectsAudit.csv --csvSeparator ';' - Last background task with failed SCM detection - Last background task on main branch `FAILED` - Last analysis with an obsolete scanner version (by default more than 2 years old) + - Last analysis was with Scanner for .Net 9.2.x that has a vulnerability - Projects analyzed with apparently a wrong scanner (Can't be certain in all cases) - Projects with too many analysis history data points (due to wrong housekeeping settings or wrong usage of `sonar.projectVersion`) diff --git a/sonar/audit/rules.json b/sonar/audit/rules.json index 371e17d5..a4850c19 100644 --- a/sonar/audit/rules.json +++ b/sonar/audit/rules.json @@ -486,6 +486,11 @@ "type": "OPERATIONS", "message": "{} was last analyzed with {} version {} released on {}, it is obsolete, strongly consider updating the scanner" }, + "VULNERABLE_DOTNET_SCANNER": { + "severity": "HIGH", + "type": "OPERATIONS", + "message": "{} was last analyzed with Scanner for .NET version {} which has a vulnerability, strongly consider updating the scanner" + }, "NOT_LATEST_SCANNER": { "severity": "MEDIUM", "type": "OPERATIONS", diff --git a/sonar/audit/rules.py b/sonar/audit/rules.py index af2ea256..db93de14 100644 --- a/sonar/audit/rules.py +++ b/sonar/audit/rules.py @@ -137,6 +137,7 @@ class RuleId(enum.Enum): PROJ_HISTORY_COUNT = 1314 PROJ_TOO_MANY_ACCEPTED = 1315 PROJ_TOO_MANY_FP = 1316 + VULNERABLE_DOTNET_SCANNER = 1317 NOT_USING_BRANCH_ANALYSIS = 1400 SIF_UNDETECTED_SCM = 1401 diff --git a/sonar/tasks.py b/sonar/tasks.py index fc22083f..e755ed0f 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -319,25 +319,27 @@ def __audit_scanner_version(self, audit_settings: types.ConfigSettings) -> list[ context = self.scanner_context() scanner_type = context.get("sonar.scanner.app", None) scanner_version = context.get("sonar.scanner.appVersion", None) + proj = self.concerned_object log.debug("Scanner type = %s, Scanner version = %s", scanner_type, scanner_version) if not scanner_version: log.warning( "%s has been scanned with scanner '%s' with no version, skipping check scanner version obsolescence", - str(self.concerned_object), + str(proj), scanner_type, ) return [] if scanner_type not in SCANNER_VERSIONS: log.warning( "%s has been scanned with scanner '%s' which is not inventoried, skipping check on scanner obsolescence", - str(self.concerned_object), + str(proj), scanner_type, ) return [] if scanner_type == "Ant": - return [Problem(get_rule(RuleId.ANT_SCANNER_DEPRECATED), self.concerned_object, str(self.concerned_object))] + return [Problem(get_rule(RuleId.ANT_SCANNER_DEPRECATED), proj, str(proj))] + problems = [] if scanner_type in ("ScannerGradle", "ScannerMaven"): scanner_version = scanner_version.split("/")[0].replace("-SNAPSHOT", "") scanner_version = [int(n) for n in scanner_version.split(".")] @@ -361,18 +363,20 @@ def __audit_scanner_version(self, audit_settings: types.ConfigSettings) -> list[ tuple_version_list = [tuple(int(n) for n in v.split(".")) for v in versions_list] tuple_version_list.sort(reverse=True) - delta_days = (datetime.datetime.today() - release_date).days index = tuple_version_list.index(scanner_version) + + log.debug("Auditing Scanner for .NET v9.2.x") + if scanner_type == "ScannerMSBuild" and scanner_version[0:2] == (9, 2): + problems.append(Problem(get_rule(RuleId.VULNERABLE_DOTNET_SCANNER), proj, str(proj), str_version)) + log.debug("Scanner used is %d versions old", index) - if delta_days <= audit_settings.get("audit.projects.scannerMaxAge", 730): - return [] - rule = get_rule(RuleId.OBSOLETE_SCANNER) if index >= 3 else get_rule(RuleId.NOT_LATEST_SCANNER) - return [ - Problem( - rule, self.concerned_object, str(self.concerned_object), scanner_type, str_version, util.date_to_string(release_date, with_time=False) - ) - ] + if delta_days > audit_settings.get("audit.projects.scannerMaxAge", 730): + rule = get_rule(RuleId.OBSOLETE_SCANNER) if index >= 3 else get_rule(RuleId.NOT_LATEST_SCANNER) + release_date = util.date_to_string(release_date, with_time=False) + problems.append(Problem(rule, proj, str(proj), scanner_type, str_version, release_date)) + + return problems def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: """Audits a background task and returns the list of found problems""" diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index eae7cb7f..65ab6bda 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -232,7 +232,7 @@ def test_search_by_large() -> None: assert len(issues.search_by_project(tutil.SQ, "pytorch")) > 10000 params = {"components": "pytorch", "project": "pytorch"} - + # Versions below 10.4 did not have enough python rules to break the 10K limit on the pytorch project if tutil.SQ.version() >= (10, 4, 0): with pytest.raises(issues.TooManyIssuesError): diff --git a/test/unit/utilities.py b/test/unit/utilities.py index 08cd1101..22705c25 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -151,6 +151,7 @@ def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str] file = None return file, args + def __get_redacted_cmd(string_arguments: str) -> str: """Gets a cmd line and redacts the token""" args = string_arguments.split(" ") From 8517dfcbffc02b725cfeb2b738d7eba757071384 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Mon, 31 Mar 2025 17:44:01 +0200 Subject: [PATCH 02/29] Pass dummy privateKey when updating GH ALM DOPFixes #1604 (#1607) * Fixes #1604 * Add class attributes typing --- sonar/devops.py | 54 ++++++++++++++++++++++++------------------------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/sonar/devops.py b/sonar/devops.py index debb5cf2..5d5c0712 100644 --- a/sonar/devops.py +++ b/sonar/devops.py @@ -20,7 +20,7 @@ """Abstraction of the SonarQube DevOps platform concept""" from __future__ import annotations -from typing import Optional +from typing import Optional, Union from http import HTTPStatus import json @@ -57,9 +57,9 @@ class DevopsPlatform(sq.SqObject): def __init__(self, endpoint: platform.Platform, key: str, platform_type: str) -> None: """Constructor""" super().__init__(endpoint=endpoint, key=key) - self.type = platform_type #: DevOps platform type - self.url = None #: DevOps platform URL - self._specific = None #: DevOps platform specific settings + self.type: str = platform_type #: DevOps platform type + self.url: Union[str, None] = None #: DevOps platform URL + self._specific: Union[dict[str, str], None] = None #: DevOps platform specific settings DevopsPlatform.CACHE.put(self) log.debug("Created object %s", str(self)) @@ -91,9 +91,8 @@ def create(cls, endpoint: platform.Platform, key: str, plt_type: str, url_or_wor params = {"key": key} try: if plt_type == "github": - params.update( - {"appId": _TO_BE_SET, "clientId": _TO_BE_SET, "clientSecret": _TO_BE_SET, "privateKey": _TO_BE_SET, "url": url_or_workspace} - ) + params.update({k: _TO_BE_SET for k in ("appId", "clientId", "clientSecret", "privateKey")}) + params["url"] = url_or_workspace endpoint.post(_CREATE_API_GITHUB, params=params) elif plt_type == "azure": # TODO: pass secrets on the cmd line @@ -135,7 +134,6 @@ def refresh(self) -> bool: """Reads / Refresh a DevOps platform information :return: Whether the operation succeeded - :rtype: bool """ data = json.loads(self.get(DevopsPlatform.API[c.LIST]).text) for alm_data in data.get(self.type, {}): @@ -149,7 +147,6 @@ def to_json(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr :param ConfigSettings export_settings: Config params for the export :return: The configuration of the DevOps platform (except secrets) - :rtype: dict """ json_data = {"key": self.key, "type": self.type, "url": self.url} json_data.update(self.sq_json.copy()) @@ -165,10 +162,9 @@ def set_pat(self, pat: str, user_name: Optional[str] = None) -> bool: def update(self, **kwargs) -> bool: """Updates a DevOps platform with information from data - :param dict data: data to update the DevOps platform configuration - (url, clientId, workspace, appId depending on the type of platform) + :param dict kwargs: data to update the DevOps platform configuration + (url, clientId, workspace, appId, privateKey, "clientSecret" depending on the type of platform) :return: Whether the operation succeeded - :rtype: bool """ alm_type = kwargs["type"] if alm_type != self.type: @@ -176,22 +172,27 @@ def update(self, **kwargs) -> bool: return False params = {"key": self.key, "url": kwargs["url"]} + additional = () if alm_type == "bitbucketcloud": - params.update({"clientId": kwargs["clientId"], "workspace": kwargs["workspace"]}) + additional = ("clientId", "workspace") elif alm_type == "github": - params.update({"clientId": kwargs["clientId"], "appId": kwargs["appId"]}) - - ok = self.post(f"alm_settings/update_{alm_type}", params=params).ok - self.url = kwargs["url"] - self._specific = {k: v for k, v in params.items() if k not in ("key", "url")} + additional = ("clientId", "appId", "privateKey", "clientSecret") + for k in additional: + params[k] = kwargs.get(k, _TO_BE_SET) + try: + ok = self.post(f"alm_settings/update_{alm_type}", params=params).ok + self.url = kwargs["url"] + self._specific = {k: v for k, v in params.items() if k not in ("key", "url")} + except (ConnectionError, RequestException) as e: + util.handle_error(e, f"updating devops platform {self.key}/{alm_type}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + ok = False return ok def count(endpoint: platform.Platform, platf_type: Optional[str] = None) -> int: """ - :param str platf_type: Filter for a specific type, defaults to None (see DEVOPS_PLATFORM_TYPES set) + :param platf_type: Filter for a specific type, defaults to None (see DEVOPS_PLATFORM_TYPES set) :return: Count of DevOps platforms - :rtype: int """ get_list(endpoint=endpoint) if platf_type is None: @@ -203,7 +204,7 @@ def count(endpoint: platform.Platform, platf_type: Optional[str] = None) -> int: def get_list(endpoint: platform.Platform) -> dict[str, DevopsPlatform]: """Reads all DevOps platforms from SonarQube - :param platform.Platform endpoint: Reference to the SonarQube platform + :param endpoint: Reference to the SonarQube platform :return: List of DevOps platforms :rtype: dict{: } """ @@ -218,10 +219,9 @@ def get_list(endpoint: platform.Platform) -> dict[str, DevopsPlatform]: def get_object(endpoint: platform.Platform, key: str) -> DevopsPlatform: """ - :param platform.Platform endpoint: Reference to the SonarQube platform - :param str devops_platform_key: Key of the platform (its name) + :param endpoint: Reference to the SonarQube platform + :param key: Key of the devops platform (its name) :return: The DevOps platforms corresponding to key, or None if not found - :rtype: DevopsPlatform """ if len(DevopsPlatform.CACHE) == 0: get_list(endpoint) @@ -230,10 +230,9 @@ def get_object(endpoint: platform.Platform, key: str) -> DevopsPlatform: def exists(endpoint: platform.Platform, key: str) -> bool: """ - :param platform.Platform endpoint: Reference to the SonarQube platform - :param str devops_platform_key: Key of the platform (its name) + :param endpoint: Reference to the SonarQube platform + :param key: Key of the devops platform (its name) :return: Whether the platform exists - :rtype: bool """ return get_object(endpoint=endpoint, key=key) is not None @@ -278,7 +277,6 @@ def import_config(endpoint: platform.Platform, config_data: types.ObjectJsonRepr def devops_type(endpoint: platform.Platform, key: str) -> Optional[str]: """ :return: The type of a DevOps platform (see DEVOPS_PLATFORM_TYPES), or None if not found - :rtype: str or None """ o = get_object(endpoint=endpoint, key=key) if o is None: From 7a3bb3e5a1da374fa1b5066a35185f65aa4489d3 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Mon, 31 Mar 2025 17:57:31 +0200 Subject: [PATCH 03/29] Disallow-set-type-on-mqr (#1611) * Fixes #1604 * Add class attributes typing * Fixes #1608 * Fixes #1610 * Add test for #1610 * Add stop of platforms once tests are done * Quality pass --- conf/run_tests.sh | 9 +++++---- sonar/issues.py | 4 +++- sonar/platform.py | 6 ++++++ sonar/settings.py | 11 +++++------ test/unit/test_issues.py | 16 ++++++++++------ 5 files changed, 29 insertions(+), 17 deletions(-) diff --git a/conf/run_tests.sh b/conf/run_tests.sh index 277a41d3..70f9cc18 100755 --- a/conf/run_tests.sh +++ b/conf/run_tests.sh @@ -30,13 +30,14 @@ echo "Running tests" "$CONFDIR/prep_tests.sh" -export SONAR_HOST_URL=${1:-${SONAR_HOST_URL}} - -for target in latest lts latest-ce lts-ce cloud +for target in latest cb lts 9 9-ce cloud do + sonar start -i $target && sleep 30 if [ -d "$ROOTDIR/test/$target/" ]; then coverage run --branch --source="$ROOTDIR" -m pytest "$ROOTDIR/test/$target/" --junit-xml="$buildDir/xunit-results-$target.xml" coverage xml -o "$buildDir/coverage-$target.xml" fi + if [ "$target" != "latest" ]; then + sonar stop -i $target + fi done - diff --git a/sonar/issues.py b/sonar/issues.py index dec2342a..69a29670 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -39,7 +39,7 @@ from sonar.util.types import ApiParams, ApiPayload, ObjectJsonRepr, ConfigSettings -from sonar import users, findings, changelog, projects, rules +from sonar import users, findings, changelog, projects, rules, exceptions import sonar.utilities as util COMPONENT_FILTER_OLD = "componentKeys" @@ -352,6 +352,8 @@ def set_type(self, new_type: str) -> bool: :return: Whether the operation succeeded :rtype: bool """ + if self.endpoint.is_mqr_mode(): + raise exceptions.UnsupportedOperation("Setting issue type is not supported in MQR mode") log.debug("Changing type of issue %s from %s to %s", self.key, self.type, new_type) try: r = self.post("issues/set_type", {"issue": self.key, "type": new_type}) diff --git a/sonar/platform.py b/sonar/platform.py index 8b01e18f..d5f31d94 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -705,6 +705,12 @@ def _audit_lta_latest(self) -> list[Problem]: # pylint: disable-next=E0606 return [Problem(rule, self.url, ".".join([str(n) for n in sq_vers]), ".".join([str(n) for n in v]))] + def is_mqr_mode(self) -> bool: + """Returns whether the platform is in MQR mode""" + if self.version() >= (10, 8, 0): + return self.get_setting(settings.MQR_ENABLED) + return self.version() >= (10, 2, 0) + # --------------------- Static methods ----------------- # this is a pointer to the module object instance itself. diff --git a/sonar/settings.py b/sonar/settings.py index 78aac07a..6ad1f096 100644 --- a/sonar/settings.py +++ b/sonar/settings.py @@ -60,9 +60,11 @@ COMPONENT_VISIBILITY = "visibility" PROJECT_DEFAULT_VISIBILITY = "projects.default.visibility" AI_CODE_FIX = "sonar.ai.suggestions.enabled" +MQR_ENABLED = "sonar.multi-quality-mode.enabled" DEFAULT_BRANCH = "-DEFAULT_BRANCH-" +_GLOBAL_SETTINGS_WITHOUT_DEF = (AI_CODE_FIX, MQR_ENABLED) _SQ_INTERNAL_SETTINGS = ( "sonaranalyzer", @@ -293,7 +295,7 @@ def is_global(self) -> bool: if self.component: return False if self._is_global is None: - self._is_global = self.definition() is not None or self.key == AI_CODE_FIX + self._is_global = self.definition() is not None or self.key in _GLOBAL_SETTINGS_WITHOUT_DEF return self._is_global def is_internal(self) -> bool: @@ -306,10 +308,7 @@ def is_internal(self) -> bool: if categ in ("languages", "analysisScope", "tests", "authentication"): return True - for prefix in internal_settings: - if self.key.startswith(prefix): - return True - return False + return any(self.key.startswith(prefix) for prefix in internal_settings) def is_settable(self) -> bool: """Returns whether a setting can be set""" @@ -354,7 +353,7 @@ def category(self) -> tuple[str, str]: m = re.match(r"^sonar\.forceAuthentication$", self.key) if m: return (AUTH_SETTINGS, None) - if self.key not in (NEW_CODE_PERIOD, PROJECT_DEFAULT_VISIBILITY, COMPONENT_VISIBILITY) and not re.match( + if self.key not in (NEW_CODE_PERIOD, PROJECT_DEFAULT_VISIBILITY, MQR_ENABLED, COMPONENT_VISIBILITY) and not re.match( r"^(email|sonar\.core|sonar\.allowPermission|sonar\.builtInQualityProfiles|sonar\.ai|" r"sonar\.cpd|sonar\.dbcleaner|sonar\.developerAggregatedInfo|sonar\.governance|sonar\.issues|sonar\.lf|sonar\.notifications|" r"sonar\.portfolios|sonar\.qualitygate|sonar\.scm\.disabled|sonar\.scm\.provider|sonar\.technicalDebt|sonar\.validateWebhooks|" diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 65ab6bda..54d22bbd 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -25,7 +25,7 @@ import pytest import utilities as tutil -from sonar import issues +from sonar import issues, exceptions from sonar import utilities as util from sonar.util import constants as c @@ -108,11 +108,15 @@ def test_set_type() -> None: issue = list(issues_d.values())[0] old_type = issue.type new_type = c.VULN if old_type == c.BUG else c.BUG - assert issue.set_type(new_type) - issue.refresh() - assert issue.type == new_type - assert not issue.set_type("NON_EXISTING") - issue.set_type(old_type) + if tutil.SQ.is_mqr_mode(): + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_type(new_type) + else: + assert issue.set_type(new_type) + issue.refresh() + assert issue.type == new_type + assert not issue.set_type("NON_EXISTING") + issue.set_type(old_type) def test_assign() -> None: From 696629f17cac4e7afa5186f01ea8f0ff50b637ed Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Fri, 4 Apr 2025 18:15:54 +0200 Subject: [PATCH 04/29] CB tests (#1612) * Version is a tuple * Fixes #1613 * Compat main branch name with CB * Leverage run_success_cmd * Add test/9 dirs for tests * use get_main_branch() rather than proj.main_branch().name * Add CLI args index tools * Simplify code * Adapt to CB * Adapt to CB * Formatting * Remove function for subs patterns --- sonar-project.properties | 2 +- sonar/findings.py | 4 +--- sonar/platform.py | 6 +++--- sonar/projects.py | 24 ++++++++++++++++-------- sonar/tasks.py | 24 ++---------------------- test/unit/test_findings.py | 7 +++---- test/unit/test_issues.py | 7 +++++-- test/unit/test_projects.py | 22 ++++++++++++++-------- test/unit/test_tasks.py | 14 -------------- test/unit/utilities.py | 31 +++++++++++++++++++------------ 10 files changed, 64 insertions(+), 77 deletions(-) diff --git a/sonar-project.properties b/sonar-project.properties index f608a347..25c6adb2 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -15,7 +15,7 @@ sonar.sarifReportPaths=build/results_sarif.sarif # sonar.externalIssuesReportPaths=build/shellcheck.json,build/trivy.json # sonar.python.bandit.reportPaths=build/bandit-report.json -sonar.tests=test/latest, test/lts +sonar.tests=test/latest, test/lts, test/9, test/9-ce sonar.coverage.exclusions=setup*.py, test/**/*, conf/*2sonar.py, cli/cust_measures.py, sonar/custom_measures.py, cli/support.py, cli/projects_export.py, cli/projects_import.py sonar.cpd.exclusions=setup*.py diff --git a/sonar/findings.py b/sonar/findings.py index 22205b35..bfebcef9 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -200,9 +200,7 @@ def _load_from_search(self, jsondata: types.ApiPayload) -> None: if self.pull_request is None: self.branch = jsondata.get("branch", None) if self.branch is None: - main_branch = projects.Project.get_object(self.endpoint, self.projectKey).main_branch() - if main_branch: - self.branch = main_branch.name + self.branch = projects.Project.get_object(self.endpoint, self.projectKey).main_branch_name() else: self.branch = re.sub("^BRANCH:", "", self.branch) diff --git a/sonar/platform.py b/sonar/platform.py index d5f31d94..772b608a 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -113,9 +113,9 @@ def version(self) -> tuple[int, int, int]: if self.is_sonarcloud(): return 0, 0, 0 if self._version is None: - self._version = self.get("/api/server/version").text.split(".") - log.debug("Version = %s", self._version) - return tuple(int(n) for n in self._version[0:3]) + self._version = tuple(int(n) for n in self.get("/api/server/version").text.split(".")) + log.debug("Version = %s", str(self._version)) + return self._version[0:3] def edition(self) -> str: """ diff --git a/sonar/projects.py b/sonar/projects.py index f90441d0..cbfe38e9 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -329,15 +329,24 @@ def branches(self, use_cache: bool = True) -> dict[str, branches.Branch]: self._branches = {} return self._branches + def main_branch_name(self) -> str: + """ + :return: Project main branch name + """ + if self.endpoint.edition() == "community": + return self.sq_json.get("branch", "main") + b = self.main_branch() + return b.name if b else "" + def main_branch(self) -> Optional[branches.Branch]: """ :return: Main branch of the project - :rtype: Branch """ - for b in self.branches().values(): - if b.is_main(): - return b - if self.endpoint.edition() != "community": + if self.endpoint.edition() == "community": + raise exceptions.UnsupportedOperation("Main branch is not supported in Community Edition") + try: + return next(b for b in self.branches().values() if b.is_main()) + except StopIteration: log.warning("Could not find main branch for %s", str(self)) return None @@ -854,9 +863,7 @@ def get_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, obje def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, int]: if filters: - for k in "branch", "pullRequest": - if k in filters: - filters[k] = [filters[k]] + filters = {k: [v] for k, v in filters.items() if k in ("branch", "pullRequest")} branches_or_prs = self.get_branches_and_prs(filters) if branches_or_prs is None: return super().count_third_party_issues(filters) @@ -870,6 +877,7 @@ def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> if k not in issue_counts: issue_counts[k] = 0 issue_counts[k] += total + log.debug("Issues count = %s", str(issue_counts)) return issue_counts def __sync_community(self, another_project: object, sync_settings: types.ConfigSettings) -> tuple[list[dict[str, str]], dict[str, int]]: diff --git a/sonar/tasks.py b/sonar/tasks.py index e755ed0f..e03b3bcf 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -388,8 +388,8 @@ def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: if self.has_scanner_context(): if audit_settings.get("audit.projects.exclusions", True): context = self.scanner_context() - susp_exclusions = _get_suspicious_exclusions(audit_settings.get("audit.projects.suspiciousExclusionsPatterns", "")) - susp_exceptions = _get_suspicious_exceptions(audit_settings.get("audit.projects.suspiciousExclusionsExceptions", "")) + susp_exclusions = util.csv_to_list(audit_settings.get("audit.projects.suspiciousExclusionsPatterns", "")) + susp_exceptions = util.csv_to_list(audit_settings.get("audit.projects.suspiciousExclusionsExceptions", "")) for prop in ("sonar.exclusions", "sonar.global.exclusions"): if context.get(prop, None) is None: continue @@ -452,23 +452,3 @@ def search_last(endpoint: pf.Platform, component_key: str, **params) -> Optional def search_all(endpoint: pf.Platform, component_key: str, **params) -> list[Task]: """Search all background tasks of a given component""" return search(endpoint=endpoint, component_key=component_key, **params) - - -def _get_suspicious_exclusions(patterns: str) -> list[str]: - """Builds suspicious exclusions pattern list""" - global __SUSPICIOUS_EXCLUSIONS - if __SUSPICIOUS_EXCLUSIONS is not None: - return __SUSPICIOUS_EXCLUSIONS - # __SUSPICIOUS_EXCLUSIONS = [x.strip().replace('*', '\\*').replace('.', '\\.').replace('?', '\\?') - __SUSPICIOUS_EXCLUSIONS = util.csv_to_list(patterns) - return __SUSPICIOUS_EXCLUSIONS - - -def _get_suspicious_exceptions(patterns: str) -> list[str]: - """Builds suspicious exceptions patterns list""" - global __SUSPICIOUS_EXCEPTIONS - if __SUSPICIOUS_EXCEPTIONS is not None: - return __SUSPICIOUS_EXCEPTIONS - # __SUSPICIOUS_EXCEPTIONS = [x.strip().replace('*', '\\*').replace('.', '\\.').replace('?', '\\?') - __SUSPICIOUS_EXCEPTIONS = util.csv_to_list(patterns) - return __SUSPICIOUS_EXCEPTIONS diff --git a/test/unit/test_findings.py b/test/unit/test_findings.py index 5b48072b..9cc3c5cf 100644 --- a/test/unit/test_findings.py +++ b/test/unit/test_findings.py @@ -98,10 +98,9 @@ def test_findings_export_sarif_explicit() -> None: """Test SARIF export""" util.clean(util.JSON_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", JSON_OPTS + [f"-{opt.KEYS_SHORT}", f"{util.LIVE_PROJECT}", f"--{opt.FORMAT}", "sarif"]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd( + findings_export.main, f"{CMD} {util.SQS_OPTS} --{opt.REPORT_FILE} {util.JSON_FILE} --{opt.KEYS} {util.LIVE_PROJECT} --{opt.FORMAT} sarif" + ) assert util.file_contains(util.JSON_FILE, "schemas/json/sarif-2.1.0-rtm.4") util.clean(util.JSON_FILE) diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 54d22bbd..b957e892 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -25,7 +25,7 @@ import pytest import utilities as tutil -from sonar import issues, exceptions +from sonar import issues, exceptions, logging from sonar import utilities as util from sonar.util import constants as c @@ -141,7 +141,10 @@ def test_changelog() -> None: assert str(issue) == f"Issue key '{issue_key}'" assert issue.is_false_positive() changelog_l = list(issue.changelog().values()) - nb_changes = 3 if tutil.SQ.version() >= (2025, 1, 0) else 1 + if tutil.SQ.version() >= (2025, 1, 0) or tutil.SQ.edition() == "community" and tutil.SQ.version() >= (25, 1, 0): + nb_changes = 3 + else: + nb_changes = 1 assert len(changelog_l) == nb_changes changelog = changelog_l[0] assert changelog.is_resolve_as_fp() diff --git a/test/unit/test_projects.py b/test/unit/test_projects.py index e8418e08..f4543d98 100644 --- a/test/unit/test_projects.py +++ b/test/unit/test_projects.py @@ -22,10 +22,9 @@ """ projects tests """ from collections.abc import Generator -from requests import RequestException import pytest -from sonar import projects, exceptions, qualityprofiles, qualitygates +from sonar import projects, exceptions, qualityprofiles, qualitygates, rules from sonar.audit import config import utilities as util @@ -54,9 +53,12 @@ def test_create_delete() -> None: """test_create_delete""" proj = projects.Project.create(endpoint=util.SQ, key=util.TEMP_KEY, name="temp") assert proj.key == util.TEMP_KEY - assert proj.main_branch().name == "main" - proj.rename_main_branch("foobar") - assert proj.main_branch().name == "foobar" + if util.SQ.edition() != "community": + assert proj.main_branch().name == "main" + proj.rename_main_branch("foobar") + assert proj.main_branch().name == "foobar" + else: + assert proj.main_branch_name() == "main" assert proj.delete() with pytest.raises(exceptions.ObjectNotFound): proj.refresh() @@ -102,10 +104,14 @@ def test_get_findings() -> None: def test_count_third_party_issues() -> None: """test_count_third_party_issues""" - proj = projects.Project.get_object(endpoint=util.SQ, key="checkstyle-issues") + proj = projects.Project.get_object(endpoint=util.SQ, key="third-party-issues") + filters = None + if util.SQ.edition() != "community": + filters = {"branch": "develop"} if util.SQ.version() >= (10, 0, 0): - assert len(proj.count_third_party_issues(filters={"branch": "develop"})) > 0 - assert len(proj.count_third_party_issues(filters={"branch": "non-existing-branch"})) == 0 + assert len(proj.count_third_party_issues(filters=filters)) > 0 + if util.SQ.edition() != "community": + assert len(proj.count_third_party_issues(filters={"branch": "non-existing-branch"})) == 0 def test_webhooks() -> None: diff --git a/test/unit/test_tasks.py b/test/unit/test_tasks.py index 9347502a..663ca564 100644 --- a/test/unit/test_tasks.py +++ b/test/unit/test_tasks.py @@ -76,20 +76,6 @@ def test_search_all_task() -> None: """test_search_all_task""" assert len(tasks.search_all_last(tutil.SQ)) > 0 - -def test_suspicious_patterns() -> None: - """test_suspicious_patterns""" - pats = "\\*\\*/[^\/]+/\\*\\*, \\*\\*/\\*[\.\w]*, \\*\\*/\\*, \\*\\*/\\*\\.(java|jav|cs|csx|py|php|js|ts|sql|html|css|cpp|c|h|hpp)\\*?" - s_pats = tasks._get_suspicious_exclusions(pats) - l_pats = [s.strip() for s in pats.split(",")] - logging.debug(f"{s_pats} == {l_pats}") - assert set(tasks._get_suspicious_exclusions(pats)) == set([s.strip() for s in pats.split(",")]) - assert set(tasks._get_suspicious_exclusions(None)) == set([s.strip() for s in pats.split(",")]) - pats = "\\*\\*/(__pycache__|libs|lib|vendor|node_modules)/\\*\\*" - assert set(tasks._get_suspicious_exceptions(pats)) == set([s.strip() for s in pats.split(",")]) - assert set(tasks._get_suspicious_exceptions(None)) == set([s.strip() for s in pats.split(",")]) - - # Test does not work - You can't request branch master when scan happened without the branch spec # def test_search_branch() -> None: # """test_search_branch""" diff --git a/test/unit/utilities.py b/test/unit/utilities.py index 22705c25..e0502a41 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -26,7 +26,7 @@ import os import sys import datetime -from typing import Optional +from typing import Optional, Union from unittest.mock import patch import pytest @@ -141,24 +141,31 @@ def is_url(value: str) -> bool: def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str]]: """Gets the list arguments and output file of a sonar-tools cmd""" - args = string_arguments.split(" ") - try: - file = args[args.index(f"--{opt.REPORT_FILE}") + 1] - except ValueError: + args = __split_args(string_arguments) + for option in (f"-{opt.REPORT_FILE_SHORT}", f"--{opt.REPORT_FILE}"): try: - file = args[args.index(f"-{opt.REPORT_FILE_SHORT}") + 1] + return args[args.index(option) + 1], args except ValueError: - file = None - return file, args + pass + return None, args + + +def __split_args(string_arguments: str) -> list[str]: + return [s for s in string_arguments.split(" ") if s != ""] + + +def __get_option_index(args: Union[str, list], option: str) -> Optional[str]: + if isinstance(args, str): + args = __split_args(args) + return args.index(option) + 1 def __get_redacted_cmd(string_arguments: str) -> str: """Gets a cmd line and redacts the token""" - args = string_arguments.split(" ") - - for option in (f"-{opt.TOKEN_SHORT}", f"--{opt.TOKEN}", f"-T", f"--tokenTarget"): + args = __split_args(string_arguments) + for option in (f"-{opt.TOKEN_SHORT}", f"--{opt.TOKEN}", "-T", "--tokenTarget"): try: - ndx = args.index(f"{option}") + 1 + ndx = __get_option_index(args, option) args[ndx] = util.redacted_token(args[ndx]) except ValueError: pass From f8b851b60330da4bc9d56f1625121fff397fb6b2 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Fri, 4 Apr 2025 18:17:51 +0200 Subject: [PATCH 05/29] Fix-1617 (#1619) * Version is a tuple * Fixes #1613 * Compat main branch name with CB * Leverage run_success_cmd * Add test/9 dirs for tests * use get_main_branch() rather than proj.main_branch().name * Add CLI args index tools * Simplify code * Adapt to CB * Adapt to CB * Formatting * Fixes #1617 #1564 --- sonar/sqobject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sonar/sqobject.py b/sonar/sqobject.py index 91b3d37c..51ffb179 100644 --- a/sonar/sqobject.py +++ b/sonar/sqobject.py @@ -244,7 +244,7 @@ def search_objects(endpoint: object, object_class: any, params: types.ApiParams, q = Queue(maxsize=0) for page in range(2, nb_pages + 1): new_params[p_field] = page - q.put((endpoint, api, objects_list, key_field, returned_field, object_class, new_params)) + q.put((endpoint, api, objects_list, key_field, returned_field, object_class, new_params.copy())) for i in range(threads): log.debug("Starting %s search thread %d", object_class.__name__, i) worker = Thread(target=__search_thread, args=[q]) From 89ff1d10684eba7b63f8e1f50260cd4297b73dd5 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Tue, 8 Apr 2025 18:27:07 +0200 Subject: [PATCH 06/29] Sonar-rules-improvements (#1622) * Add QP option * Add export for only a given QP --- README.md | 9 ++++-- cli/options.py | 1 + cli/rules_cli.py | 43 ++++++++++++++++----------- sonar/rules.py | 3 ++ test/unit/test_audit.py | 1 - test/unit/test_rules.py | 65 +++++++++++++++++------------------------ test/unit/test_tasks.py | 1 + test/unit/utilities.py | 9 ++++-- 8 files changed, 70 insertions(+), 62 deletions(-) diff --git a/README.md b/README.md index d6d0b068..48ae1416 100644 --- a/README.md +++ b/README.md @@ -312,7 +312,9 @@ Exports rules from a SonarQube Server or Cloud platform configuration. Basic Usage: `sonar-rules -e -f ` - `-f`: Define the output file, if not specified `stdout` is used - `-e` or `--export`: Specify the export operation -- `-h`: Display help with teh full list of options +- `-l` or `--languages`: Export only rules of given languages (comma separated, defined by they Sonar key, not its name) +- `--qualityProfiles`: Export rules defined in a given quality profile. In this case the `--languages` option is mandatory and should specify a single language +- `-h`: Display help with the full list of options ## Required Permissions @@ -326,7 +328,10 @@ export SONAR_TOKEN=squ_83356c9b2db891d45da2a119a29cdc4d03fe654e # Exports all rules from https://sonar.acme-corp.com, in CSV or in JSON sonar-rules -f rules.csv sonar-rules -f rules.json -sonar-rules >rules.csv +# Exports rules for languages Java, C# and C++ +sonar-rules -l "java, cs, cpp" -f rules.csv +# Exports rules of quality profile "Sonar way" of language Java +sonar-rules -l java --qualityProfile "Sonar way" >rules.csv ``` # Using sonar-tools in Docker diff --git a/cli/options.py b/cli/options.py index b4f1b5b4..abc3809f 100644 --- a/cli/options.py +++ b/cli/options.py @@ -82,6 +82,7 @@ WITH_BRANCHES = "withBranches" LANGUAGES = "languages" +QP = "qualityProfiles" PORTFOLIOS = "portfolios" diff --git a/cli/rules_cli.py b/cli/rules_cli.py index 3cf4ea83..2915c0fb 100755 --- a/cli/rules_cli.py +++ b/cli/rules_cli.py @@ -26,7 +26,7 @@ from cli import options import sonar.logging as log -from sonar import rules, platform, exceptions, errcodes, version +from sonar import rules, platform, exceptions, errcodes, version, qualityprofiles import sonar.utilities as util TOOL_NAME = "sonar-rules" @@ -38,8 +38,9 @@ def __parse_args(desc: str) -> object: parser = options.set_output_file_args(parser, allowed_formats=("json", "csv")) parser = options.add_language_arg(parser, "rules") parser = options.add_import_export_arg(parser, "rules", import_opt=False) - args = options.parse_and_check(parser=parser, logger_name=TOOL_NAME) - return args + """Adds the language selection option""" + parser.add_argument(f"--{options.QP}", required=False, help="Quality profile to filter rules, requires a --languages option") + return options.parse_and_check(parser=parser, logger_name=TOOL_NAME) def __write_rules_csv(file: str, rule_list: dict[str, rules.Rule], separator: str = ",") -> None: @@ -76,27 +77,35 @@ def main() -> int: endpoint = platform.Platform(**kwargs) endpoint.verify_connection() endpoint.set_user_agent(f"{TOOL_NAME} {version.PACKAGE_VERSION}") - except (options.ArgumentsError, exceptions.ObjectNotFound) as e: - util.exit_fatal(e.message, e.errcode) - file = kwargs[options.REPORT_FILE] - fmt = util.deduct_format(kwargs[options.FORMAT], file) - params = {"include_external": "false"} - if options.LANGUAGES in kwargs: - params["languages"] = kwargs[options.LANGUAGES] - rule_list = rules.get_list(endpoint=endpoint, use_cache=False, **params) + file = kwargs[options.REPORT_FILE] + fmt = util.deduct_format(kwargs[options.FORMAT], file) + params = {"include_external": "false"} + + if kwargs[options.QP] is not None: + if kwargs[options.LANGUAGES] is None and kwargs[options.QP] is not None: + util.exit_fatal(f"Option --{options.QP} requires --{options.LANGUAGES}", errcodes.ARGS_ERROR) + if len(kwargs[options.LANGUAGES]) > 1: + util.exit_fatal(f"Option --{options.QP} requires a single --{options.LANGUAGES} value", errcodes.ARGS_ERROR) + qp = qualityprofiles.get_object(endpoint=endpoint, name=kwargs[options.QP], language=kwargs[options.LANGUAGES][0]) + rule_list = qp.rules() + else: + if options.LANGUAGES in kwargs: + params["languages"] = kwargs[options.LANGUAGES] + rule_list = rules.get_list(endpoint=endpoint, use_cache=False, **params) - try: if fmt == "csv": __write_rules_csv(file=file, rule_list=rule_list, separator=kwargs[options.CSV_SEPARATOR]) else: __write_rules_json(file=file, rule_list=rule_list) - except (PermissionError, FileNotFoundError) as e: - util.exit_fatal(f"OS error while projects export file: {e}", exit_code=errcodes.OS_ERROR) - log.info("%d rules exported from %s", len(rule_list), endpoint.url) - util.stop_clock(start_time) - sys.exit(0) + log.info("%d rules exported from %s", len(rule_list), endpoint.url) + util.stop_clock(start_time) + sys.exit(0) + except exceptions.SonarException as e: + util.exit_fatal(e.message, e.errcode) + except OSError as e: + util.exit_fatal(f"OS error: {e}", exit_code=errcodes.OS_ERROR) if __name__ == "__main__": diff --git a/sonar/rules.py b/sonar/rules.py index cdab6e95..a75e1cf5 100644 --- a/sonar/rules.py +++ b/sonar/rules.py @@ -360,6 +360,9 @@ def get_list(endpoint: platform.Platform, use_cache: bool = True, **params) -> d lang_list = languages.get_list(endpoint).keys() incl_ext = params.pop("include_external", None) incl_ext = [incl_ext] if incl_ext else ["false", "true"] + for lang_key in lang_list: + if not languages.exists(endpoint, lang_key): + raise exceptions.ObjectNotFound(key=lang_key, message=f"Language '{lang_key}' does not exist") log.info("Getting rules for %d languages", len(lang_list)) for lang_key in lang_list: for inc in incl_ext: diff --git a/test/unit/test_audit.py b/test/unit/test_audit.py index 1785e5b9..102230ee 100644 --- a/test/unit/test_audit.py +++ b/test/unit/test_audit.py @@ -56,7 +56,6 @@ def test_audit_disabled(get_csv_file: Generator[str]) -> None: def test_audit(get_csv_file: Generator[str]) -> None: """test_audit""" file = get_csv_file - logging.debug(f"{CMD} --{opt.REPORT_FILE} {file}") util.run_success_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {file}") # Ensure no duplicate alarms #1478 lines = [] diff --git a/test/unit/test_rules.py b/test/unit/test_rules.py index 3ec90c46..38f4fa6f 100644 --- a/test/unit/test_rules.py +++ b/test/unit/test_rules.py @@ -42,34 +42,17 @@ def test_rules() -> None: """test_rules""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS): - rules_cli.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(util.CSV_FILE) - util.clean(util.CSV_FILE) + util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)}') def test_rules_json_format() -> None: """test_rules_json_format""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", JSON_OPTS + [f"--{opt.FORMAT}", "json"]): - rules_cli.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(util.JSON_FILE) - util.clean(util.JSON_FILE) + util.run_success_cmd(rules_cli.main, f'{" ".join(JSON_OPTS)}') def test_rules_filter_language() -> None: """Tests that you can export rules for a single or a few languages""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.LANGUAGES}", "py,jcl"]): - rules_cli.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(util.CSV_FILE) + util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} py,jcl') with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) line = next(csvreader) @@ -86,12 +69,7 @@ def test_rules_filter_language() -> None: def test_rules_misspelled_language_1() -> None: """Tests that you can export rules for a single or a few languages, misspelled""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.LANGUAGES}", "Python,TypeScript"]): - rules_cli.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(util.CSV_FILE) + util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} Python,TypeScript') with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) line = next(csvreader) @@ -107,19 +85,8 @@ def test_rules_misspelled_language_1() -> None: def test_rules_misspelled_language_2() -> None: - """Tests that you can export rules for a single or a few languages, misspelled and not fixed""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.LANGUAGES}", "Python,gosu , aPex"]): - rules_cli.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(util.CSV_FILE) - with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: - fh.readline() # Skip header - for line in fh: - (_, lang, _) = line.split(",", maxsplit=2) - assert lang in ("py", "apex") - util.clean(util.CSV_FILE) + """test_rules_misspelled_language_2""" + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} "Python ,gosu, aPex"', errcodes.NO_SUCH_KEY) def test_get_rule() -> None: @@ -208,3 +175,23 @@ def test_new_taxo() -> None: else: assert my_rule.severity in rules.LEGACY_SEVERITIES assert my_rule.type in rules.LEGACY_TYPES + + +def test_non_existing_qp() -> None: + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.QP} non-existing --{opt.LANGUAGES} java', errcodes.NO_SUCH_KEY) + + +def test_non_existing_language() -> None: + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} assembly-lang', errcodes.NO_SUCH_KEY) + + +def test_qp_non_existing_language() -> None: + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.QP} "Sonar way" --{opt.LANGUAGES} javac', errcodes.NO_SUCH_KEY) + + +def test_qp_multiple_languages() -> None: + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.QP} "Sonar way" --{opt.LANGUAGES} java,c', errcodes.ARGS_ERROR) + + +def test_os_error() -> None: + util.run_failed_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} java,c -f /rules.csv', errcodes.OS_ERROR) diff --git a/test/unit/test_tasks.py b/test/unit/test_tasks.py index 663ca564..d5bf0ffa 100644 --- a/test/unit/test_tasks.py +++ b/test/unit/test_tasks.py @@ -76,6 +76,7 @@ def test_search_all_task() -> None: """test_search_all_task""" assert len(tasks.search_all_last(tutil.SQ)) > 0 + # Test does not work - You can't request branch master when scan happened without the branch spec # def test_search_branch() -> None: # """test_search_branch""" diff --git a/test/unit/utilities.py b/test/unit/utilities.py index e0502a41..c3c174d5 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -26,6 +26,7 @@ import os import sys import datetime +import re from typing import Optional, Union from unittest.mock import patch import pytest @@ -88,11 +89,12 @@ SONAR_WAY = "Sonar way" -def clean(*files: str) -> None: +def clean(*files: Optional[str]) -> None: """Deletes a list of file if they exists""" for file in files: try: - os.remove(file) + if file: + os.remove(file) except FileNotFoundError: pass @@ -151,7 +153,7 @@ def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str] def __split_args(string_arguments: str) -> list[str]: - return [s for s in string_arguments.split(" ") if s != ""] + return [s.strip('"') for s in re.findall(r'(?:[^\s\*"]|"(?:\\.|[^"])*")+', string_arguments)] def __get_option_index(args: Union[str, list], option: str) -> Optional[str]: @@ -176,6 +178,7 @@ def run_cmd(func: callable, arguments: str, expected_code: int) -> Optional[str] """Runs a sonar-tools command, verifies it raises the right exception, and returns the expected code""" logging.info("RUNNING: %s", __get_redacted_cmd(arguments)) file, args = __get_args_and_file(arguments) + clean(file) with pytest.raises(SystemExit) as e: with patch.object(sys, "argv", args): func() From 02b4f3e5b091b759ba48b78ee22a4bd7255c187b Mon Sep 17 00:00:00 2001 From: Lukas Frystak <95630751+lukas-frystak-sonarsource@users.noreply.github.com> Date: Wed, 9 Apr 2025 09:23:24 +0200 Subject: [PATCH 07/29] Support issue synchronization in MQR mode (#1620) * Add resolve_as_accept Added the detection and decision whether to synchronize an issue that was resolved as accepted. The finding is described in this comment in issue #1616 https://github.com/okorach/sonar-tools/issues/1616#issuecomment-2775731968 * Enable issue synchronization in MQR mode The tool would throw an exception when attempting to synchronize issue status in MQR mode on SonarQube Server. However, the support for this sync is implemented. Relates to #1616, This comment: https://github.com/okorach/sonar-tools/issues/1616#issuecomment-2775947372 * Fix issue sync SQC to SQS * Support issue sync in MQR mode Fixes #1616. The 'issueStatus' field has been available since SonarQube Server 10.4 and is the preferred method for retrieving information about issue changes. Starting with SonarQube Server 10.4, the "resolution" and "status" keys are deprecated in issue changelogs but remain relevant for security hotspot changelogs. These conditions are retained for backward compatibility to support versions from SQS 9.9 to 10.3, where "resolution" and "status" are the only way to detect status changes. They are also still applicable for security hotspot changelogs. * Remove irrelevant section The docs section about permissions was duplicated, so the irrelevant one was removed. --- doc/sonar-findings-sync.md | 4 -- sonar/changelog.py | 107 ++++++++++++++++++++++++++++--------- sonar/issues.py | 2 - 3 files changed, 81 insertions(+), 32 deletions(-) diff --git a/doc/sonar-findings-sync.md b/doc/sonar-findings-sync.md index 21cd944d..c686002a 100644 --- a/doc/sonar-findings-sync.md +++ b/doc/sonar-findings-sync.md @@ -81,10 +81,6 @@ Issues changelog synchronization includes: When an issue could not be synchronized because of one of the above reasons, this is reported in the `sonar-findings-sync` report. Whenever a close enough issue was found but not sync'ed (because not 100% certain to be identical), the close issue is provided in the report to complete synchronization manually if desired. -## Required Permissions - -`sonar-findings-sync` needs the global `Create Projects` permission - ## Configurable behaviors When an issue is synchronized, a special comment is added on the target issue with a link to the source one, for cross checking purposes. This comment can be disabled by using the `--nolink` option diff --git a/sonar/changelog.py b/sonar/changelog.py index 10c717a8..ff6121d1 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -37,15 +37,36 @@ def __str__(self) -> str: """str() implementation""" return str(self.sq_json) + def __is_issue_status_diff(self) -> bool: + """Returns whether the changelog item contains an object with the key 'issueStatus'""" + for d in self.sq_json["diffs"]: + if d.get("key", "") == "issueStatus": + return True + return False + def __is_resolve_as(self, resolve_reason: str) -> bool: - cond1 = False - cond2 = False - for diff in self.sq_json["diffs"]: - if diff["key"] == "resolution" and "newValue" in diff and diff["newValue"] == resolve_reason: - cond1 = True - if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "RESOLVED": - cond2 = True - return cond1 and cond2 + """Returns whether the changelog item is an issue resolved as a specific reason""" + # The 'issueStatus' field has been available since SonarQube Server 10.4 and is the preferred + # method for retrieving information about issue changes. + # Starting with SonarQube Server 10.4, the "resolution" and "status" keys are deprecated in + # issue changelogs but remain relevant for security hotspot changelogs. + # These conditions are retained for backward compatibility to support versions from SQS 9.9 to 10.3, + # where "resolution" and "status" are the only way to detect status changes. They are also still + # applicable for security hotspot changelogs. + if self.__is_issue_status_diff(): + for diff in self.sq_json["diffs"]: + if diff["key"] == "issueStatus" and "newValue" in diff and diff["newValue"] == resolve_reason: + return True + else: + cond1 = False + cond2 = False + for diff in self.sq_json["diffs"]: + if diff["key"] == "resolution" and "newValue" in diff and diff["newValue"] == resolve_reason: + cond1 = True + if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "RESOLVED": + cond2 = True + return cond1 and cond2 + return False def is_resolve_as_fixed(self) -> bool: """Returns whether the changelog item is an issue resolved as fixed""" @@ -53,35 +74,57 @@ def is_resolve_as_fixed(self) -> bool: def is_resolve_as_fp(self) -> bool: """Returns whether the changelog item is an issue resolved as false positive""" - return self.__is_resolve_as("FALSE-POSITIVE") + # Finding "is resolve as false positive" requires "FALSE-POSITIVE" on SonarQube + # Server 9.9 and "FALSE_POSITIVE" on SonarQube Server 2025.1 and SonarQube Cloud. + cond1 = self.__is_resolve_as("FALSE-POSITIVE") + cond2 = self.__is_resolve_as("FALSE_POSITIVE") + return cond1 or cond2 def is_resolve_as_wf(self) -> bool: """Returns whether the changelog item is an issue resolved as won't fix""" return self.__is_resolve_as("WONTFIX") + def is_resolve_as_accept(self) -> bool: + """Returns whether the changelog item is an issue resolved as accepted""" + return self.__is_resolve_as("ACCEPTED") + def is_closed(self) -> bool: """{'creationDate': '2022-02-01T19:15:24+0100', 'diffs': [ {'key': 'resolution', 'newValue': 'FIXED'}, {'key': 'status', 'newValue': 'CLOSED', 'oldValue': 'OPEN'}]}""" - for diff in self.sq_json["diffs"]: - if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "CLOSED": - return True + if self.__is_issue_status_diff(): + for diff in self.sq_json["diffs"]: + if diff["key"] == "issueStatus" and "newValue" in diff and diff["newValue"] == "CLOSED": + return True + else: + for diff in self.sq_json["diffs"]: + if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "CLOSED": + return True return False def __is_status(self, status: str) -> bool: for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and d.get("newValue", "") == status: - return True + if self.__is_issue_status_diff(): + if d.get("key", "") == "issueStatus" and d.get("newValue", "") == status: + return True + else: + if d.get("key", "") == "status" and d.get("newValue", "") == status: + return True return False def is_reopen(self) -> bool: """Returns whether the changelog item is an issue re-open""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and ( - (d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") != "CONFIRMED") - or (d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CLOSED") - ): - return True + if self.__is_issue_status_diff(): + for d in self.sq_json["diffs"]: + if d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") != "CONFIRMED": + return True + else: + for d in self.sq_json["diffs"]: + if d.get("key", "") == "status" and ( + (d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") != "CONFIRMED") + or (d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CLOSED") + ): + return True return False def is_confirm(self) -> bool: @@ -90,9 +133,14 @@ def is_confirm(self) -> bool: def is_unconfirm(self) -> bool: """Returns whether the changelog item is an issue unconfirm""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") == "CONFIRMED": - return True + if self.__is_issue_status_diff(): + for d in self.sq_json["diffs"]: + if d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CONFIRMED": + return True + else: + for d in self.sq_json["diffs"]: + if d.get("key", "") == "status" and d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") == "CONFIRMED": + return True return False def is_mark_as_safe(self) -> bool: @@ -174,9 +222,14 @@ def old_assignee(self) -> Optional[str]: def previous_state(self) -> str: """Returns the previous state of a state change changelog""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status": - return d.get("oldValue", "") + if self.__is_issue_status_diff(): + for d in self.sq_json["diffs"]: + if d.get("key", "") == "issueStatus": + return d.get("oldValue", "") + else: + for d in self.sq_json["diffs"]: + if d.get("key", "") == "status": + return d.get("oldValue", "") return "" def date(self) -> str: @@ -219,6 +272,8 @@ def changelog_type(self) -> tuple[str, Optional[str]]: ctype = ("FALSE-POSITIVE", None) elif self.is_resolve_as_wf(): ctype = ("WONT-FIX", None) + elif self.is_resolve_as_accept(): + ctype = ("ACCEPT", None) elif self.is_tag(): ctype = ("TAG", self.get_tags()) elif self.is_closed(): diff --git a/sonar/issues.py b/sonar/issues.py index 69a29670..38ab45a7 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -352,8 +352,6 @@ def set_type(self, new_type: str) -> bool: :return: Whether the operation succeeded :rtype: bool """ - if self.endpoint.is_mqr_mode(): - raise exceptions.UnsupportedOperation("Setting issue type is not supported in MQR mode") log.debug("Changing type of issue %s from %s to %s", self.key, self.type, new_type) try: r = self.post("issues/set_type", {"issue": self.key, "type": new_type}) From 2e50a8788b167cd984b7879ffb3f431f56cd3b6d Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 9 Apr 2025 14:21:01 +0200 Subject: [PATCH 08/29] Simplify issue sync (#1625) * FIx improper log when comparing source and target project branches * Quality pass - reduce complexity --- sonar/changelog.py | 104 +++++++++++++++------------------------------ sonar/projects.py | 2 +- 2 files changed, 35 insertions(+), 71 deletions(-) diff --git a/sonar/changelog.py b/sonar/changelog.py index ff6121d1..cf3e1827 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -54,19 +54,11 @@ def __is_resolve_as(self, resolve_reason: str) -> bool: # where "resolution" and "status" are the only way to detect status changes. They are also still # applicable for security hotspot changelogs. if self.__is_issue_status_diff(): - for diff in self.sq_json["diffs"]: - if diff["key"] == "issueStatus" and "newValue" in diff and diff["newValue"] == resolve_reason: - return True + return any(diff["key"] == "issueStatus" and diff.get("newValue", "") == resolve_reason for diff in self.sq_json["diffs"]) else: - cond1 = False - cond2 = False - for diff in self.sq_json["diffs"]: - if diff["key"] == "resolution" and "newValue" in diff and diff["newValue"] == resolve_reason: - cond1 = True - if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "RESOLVED": - cond2 = True + cond1 = any(diff["key"] == "resolution" and diff.get("newValue", "") == resolve_reason for diff in self.sq_json["diffs"]) + cond2 = any(diff["key"] == "status" and diff.get("newValue", "") == "RESOLVED" for diff in self.sq_json["diffs"]) return cond1 and cond2 - return False def is_resolve_as_fixed(self) -> bool: """Returns whether the changelog item is an issue resolved as fixed""" @@ -76,9 +68,7 @@ def is_resolve_as_fp(self) -> bool: """Returns whether the changelog item is an issue resolved as false positive""" # Finding "is resolve as false positive" requires "FALSE-POSITIVE" on SonarQube # Server 9.9 and "FALSE_POSITIVE" on SonarQube Server 2025.1 and SonarQube Cloud. - cond1 = self.__is_resolve_as("FALSE-POSITIVE") - cond2 = self.__is_resolve_as("FALSE_POSITIVE") - return cond1 or cond2 + return self.__is_resolve_as("FALSE-POSITIVE") or self.__is_resolve_as("FALSE_POSITIVE") def is_resolve_as_wf(self) -> bool: """Returns whether the changelog item is an issue resolved as won't fix""" @@ -92,40 +82,29 @@ def is_closed(self) -> bool: """{'creationDate': '2022-02-01T19:15:24+0100', 'diffs': [ {'key': 'resolution', 'newValue': 'FIXED'}, {'key': 'status', 'newValue': 'CLOSED', 'oldValue': 'OPEN'}]}""" - if self.__is_issue_status_diff(): - for diff in self.sq_json["diffs"]: - if diff["key"] == "issueStatus" and "newValue" in diff and diff["newValue"] == "CLOSED": - return True - else: - for diff in self.sq_json["diffs"]: - if diff["key"] == "status" and "newValue" in diff and diff["newValue"] == "CLOSED": - return True - return False + status_key = "issueStatus" if self.__is_issue_status_diff() else "status" + return any(diff["key"] == status_key and diff.get("newValue", "") == "CLOSED" for diff in self.sq_json["diffs"]) def __is_status(self, status: str) -> bool: - for d in self.sq_json["diffs"]: - if self.__is_issue_status_diff(): - if d.get("key", "") == "issueStatus" and d.get("newValue", "") == status: - return True - else: - if d.get("key", "") == "status" and d.get("newValue", "") == status: - return True - return False + status_key = "issueStatus" if self.__is_issue_status_diff() else "status" + return any(diff["key"] == status_key and diff.get("newValue", "") == status for diff in self.sq_json["diffs"]) def is_reopen(self) -> bool: """Returns whether the changelog item is an issue re-open""" if self.__is_issue_status_diff(): - for d in self.sq_json["diffs"]: - if d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") != "CONFIRMED": - return True + return any( + d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") != "CONFIRMED" + for d in self.sq_json["diffs"] + ) else: - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and ( + return any( + d.get("key", "") == "status" + and ( (d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") != "CONFIRMED") or (d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CLOSED") - ): - return True - return False + ) + for d in self.sq_json["diffs"] + ) def is_confirm(self) -> bool: """Returns whether the changelog item is an issue confirm""" @@ -134,42 +113,31 @@ def is_confirm(self) -> bool: def is_unconfirm(self) -> bool: """Returns whether the changelog item is an issue unconfirm""" if self.__is_issue_status_diff(): - for d in self.sq_json["diffs"]: - if d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CONFIRMED": - return True + return any( + d.get("key", "") == "issueStatus" and d.get("newValue", "") == "OPEN" and d.get("oldValue", "") == "CONFIRMED" + for d in self.sq_json["diffs"] + ) else: - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") == "CONFIRMED": - return True - return False + return any( + d.get("key", "") == "status" and d.get("newValue", "") == "REOPENED" and d.get("oldValue", "") == "CONFIRMED" + for d in self.sq_json["diffs"] + ) def is_mark_as_safe(self) -> bool: """Returns whether the changelog item is a hotspot marked as safe""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "resolution" and d.get("newValue", "") == "SAFE": - return True - return False + return any(d.get("key", "") == "resolution" and d.get("newValue", "") == "SAFE" for d in self.sq_json["diffs"]) def is_mark_as_to_review(self) -> bool: """Returns whether the changelog item is a hotspot to review""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status" and d.get("newValue", "") == "TO_REVIEW": - return True - return False + return any(d.get("key", "") == "status" and d.get("newValue", "") == "TO_REVIEW" for d in self.sq_json["diffs"]) def is_mark_as_fixed(self) -> bool: """Returns whether the changelog item is an issue marked as fixed""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "resolution" and d.get("newValue", "") == "FIXED": - return True - return False + return any(d.get("key", "") == "resolution" and d.get("newValue", "") == "FIXED" for d in self.sq_json["diffs"]) def is_mark_as_acknowledged(self) -> bool: """Returns whether the changelog item is a hotspot acknowledge""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "resolution" and d.get("newValue", "") == "ACKNOWLEDGED": - return True - return False + return any(d.get("key", "") == "resolution" and d.get("newValue", "") == "ACKNOWLEDGED" for d in self.sq_json["diffs"]) def is_change_severity(self) -> bool: """Returns whether the changelog item is a change of issue severity""" @@ -222,14 +190,10 @@ def old_assignee(self) -> Optional[str]: def previous_state(self) -> str: """Returns the previous state of a state change changelog""" - if self.__is_issue_status_diff(): - for d in self.sq_json["diffs"]: - if d.get("key", "") == "issueStatus": - return d.get("oldValue", "") - else: - for d in self.sq_json["diffs"]: - if d.get("key", "") == "status": - return d.get("oldValue", "") + status_key = "issueStatus" if self.__is_issue_status_diff() else "status" + for d in self.sq_json["diffs"]: + if d.get("key", "") == status_key: + return d.get("oldValue", "") return "" def date(self) -> str: diff --git a/sonar/projects.py b/sonar/projects.py index cbfe38e9..3a20645b 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -932,8 +932,8 @@ def sync(self, another_project: Project, sync_settings: types.ConfigSettings) -> if len(diff) > 0: log.warning( "Target %s has branches that do not exist for source %s, these branches will be ignored: %s", - str(self), str(another_project), + str(self), ", ".join(diff), ) report = [] From f322f162dc639b50f905b09131f532d5789b2a05 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 9 Apr 2025 20:50:57 +0200 Subject: [PATCH 09/29] Filter-irrelevant-transitions-in-findings-sync (#1631) * Add checks on manual vs automatic changes * Filter out non manual changes * Fix log * Fix assignee management * Fix new_assignee() function name change * Fix new_assignee() * Exclude automatic changes of findings sync for hotspots * Fix log * Exclude automatic changes from hotspots syncs --- sonar/changelog.py | 48 ++++++++++++++++++++-------------------- sonar/findings.py | 23 +++++++------------ sonar/hotspots.py | 7 +++++- sonar/issues.py | 18 ++++++++++----- sonar/syncer.py | 2 +- test/unit/test_issues.py | 4 ++-- 6 files changed, 54 insertions(+), 48 deletions(-) diff --git a/sonar/changelog.py b/sonar/changelog.py index cf3e1827..7adfaef9 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -39,10 +39,7 @@ def __str__(self) -> str: def __is_issue_status_diff(self) -> bool: """Returns whether the changelog item contains an object with the key 'issueStatus'""" - for d in self.sq_json["diffs"]: - if d.get("key", "") == "issueStatus": - return True - return False + return any(d.get("key", "") == "issueStatus" for d in self.sq_json["diffs"]) def __is_resolve_as(self, resolve_reason: str) -> bool: """Returns whether the changelog item is an issue resolved as a specific reason""" @@ -169,24 +166,27 @@ def is_technical_change(self) -> bool: key = d.get("key", "") return key in ("from_short_branch", "from_branch", "effort") + def is_manual_change(self) -> bool: + """Returns whether the changelog item is a manual change""" + status_key, closed_state = "issueStatus", "FIXED" + if not self.__is_issue_status_diff(): + status_key, closed_state = "status", "CLOSED" + return not any( + d.get("key", "") == status_key and closed_state in (d.get("oldValue", ""), d.get("newValue", "")) for d in self.sq_json["diffs"] + ) + def is_assignment(self) -> bool: """Returns whether the changelog item is an assignment""" - d = self.sq_json["diffs"][0] - return d.get("key", "") == "assignee" + return any(d.get("key", "") == "assignee" for d in self.sq_json["diffs"]) - def new_assignee(self) -> Optional[str]: + def assignee(self, new: bool = True) -> Optional[str]: """Returns the new assignee of a change assignment changelog""" - if not self.is_assignment(): - return None - d = self.sq_json["diffs"][0] - return d.get("newValue", None) - - def old_assignee(self) -> Optional[str]: - """Returns the old assignee of a change assignment changelog""" - if not self.is_assignment(): - return None - d = self.sq_json["diffs"][0] - return d.get("oldValue", None) + try: + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") + return d.get("newValue" if new else "oldValue", None) + except StopIteration: + log.warning("No assignment found in changelog %s", str(self)) + return None def previous_state(self) -> str: """Returns the previous state of a state change changelog""" @@ -206,20 +206,20 @@ def author(self) -> Optional[str]: def is_tag(self) -> bool: """Returns whether the changelog item is an issue tagging""" - d = self.sq_json["diffs"][0] - return d.get("key", "") == "tag" + return any(d.get("key", "") == "tags" for d in self.sq_json["diffs"]) def get_tags(self) -> Optional[str]: """Returns the changelog tags for issue tagging items""" - if not self.is_tag(): + try: + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") + return d.get("newValue").replace(" ", ",") + except StopIteration: return None - d = self.sq_json["diffs"][0] - return d.get("newValue", "").replace(" ", ",") def changelog_type(self) -> tuple[str, Optional[str]]: ctype = (None, None) if self.is_assignment(): - ctype = ("ASSIGN", self.new_assignee()) + ctype = ("ASSIGN", self.assignee()) elif self.is_reopen(): ctype = ("REOPEN", None) elif self.is_confirm(): diff --git a/sonar/findings.py b/sonar/findings.py index bfebcef9..e7721958 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -22,7 +22,7 @@ from __future__ import annotations import re import datetime -from typing import Union +from typing import Union, Optional from queue import Queue from threading import Thread @@ -332,7 +332,7 @@ def is_security_issue(self) -> bool: def is_closed(self) -> bool: return self.status == "CLOSED" - def changelog(self) -> bool: + def changelog(self, manual_only: bool = True) -> bool: # Implemented in subclasses, should not reach this raise NotImplementedError() @@ -340,15 +340,16 @@ def comments(self) -> dict[str, str]: # Implemented in subclasses, should not reach this raise NotImplementedError() - def has_changelog(self, added_after: datetime.datetime = None) -> bool: + def has_changelog(self, added_after: Optional[datetime.datetime] = None, manual_only: bool = True) -> bool: """ + :param manual_only: Whether to check only manual changes :return: Whether the finding has a changelog :rtype: bool """ # log.debug("%s has %d changelogs", str(self), len(self.changelog())) if added_after is not None and added_after > self.modification_date: return False - return len(self.changelog()) > 0 + return len(self.changelog(manual_only)) > 0 def has_comments(self) -> bool: """ @@ -371,25 +372,17 @@ def commenters(self) -> set[str]: """ return {v["user"] for v in self.comments() if "user" in v} - def can_be_synced(self, user_list: list[str]) -> bool: + def can_be_synced(self, user_list: Optional[list[str]]) -> bool: """ :meta private: """ - log.debug( - "Issue %s: Checking if modifiers %s are different from user %s", - str(self), - str(self.modifiers()), - str(user_list), - ) + log.debug("%s: Checking if modifiers %s are different from user %s", str(self), str(self.modifiers()), str(user_list)) # If no account dedicated to sync is provided, finding can be synced only if no changelog if user_list is None: log.debug("Allowed user list empty, checking if issue has changelog") return not self.has_changelog() # Else, finding can be synced only if changes were performed by syncer accounts - for u in self.modifiers(): - if u not in user_list: - return False - return True + return all(u in user_list for u in self.modifiers()) def strictly_identical_to(self, another_finding: Finding, ignore_component: bool = False) -> bool: """ diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 6c60d038..c87334ed 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -275,6 +275,7 @@ def apply_changelog(self, source_hotspot: Hotspot, settings: types.ConfigSetting return False change_nbr = 0 + # FIXME: There can be a glitch if there are non manual changes in the changelog start_change = len(self.changelog()) + 1 log.debug("Applying changelog of %s to %s, from change %d", str(source_hotspot), str(self), start_change) for key in sorted(events.keys()): @@ -311,7 +312,7 @@ def apply_changelog(self, source_hotspot: Hotspot, settings: types.ConfigSetting self.add_comment(comments[key]["value"]) return True - def changelog(self) -> dict[str, changelog.Changelog]: + def changelog(self, manual_only: bool = True) -> dict[str, changelog.Changelog]: """ :return: The hotspot changelog :rtype: dict @@ -329,6 +330,10 @@ def changelog(self) -> dict[str, changelog.Changelog]: # Skip automatic changelog events generated by SonarSource itself log.debug("Changelog is a technical change: %s", str(d)) continue + if manual_only and not d.is_manual_change(): + # Skip automatic changelog events generated by SonarSource itself + log.debug("%s: Changelog is an automatic change: %s", str(self), str(d)) + continue util.json_dump_debug(l, "Changelog item Changelog ADDED = ") seq += 1 self._changelog[f"{d.date()}_{seq:03d}"] = d diff --git a/sonar/issues.py b/sonar/issues.py index 38ab45a7..68243245 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -218,8 +218,9 @@ def refresh(self) -> bool: self._load(json.loads(resp.text)["issues"][0]) return resp.ok - def changelog(self) -> dict[str, str]: + def changelog(self, manual_only: bool = True) -> dict[str, str]: """ + :param bool manual_only: Whether the only manual changes should be returned or all changes :return: The issue changelog :rtype: dict{"_": } """ @@ -232,9 +233,13 @@ def changelog(self) -> dict[str, str]: d = changelog.Changelog(l) if d.is_technical_change(): # Skip automatic changelog events generated by SonarSource itself - log.debug("Changelog is a technical change: %s", str(d)) + log.debug("%s: Changelog is a technical change: %s", str(self), str(d)) continue - util.json_dump_debug(l, "Changelog item Changelog ADDED = ") + if manual_only and not d.is_manual_change(): + # Skip automatic changelog events generated by SonarSource itself + log.debug("%s: Changelog is an automatic change: %s", str(self), str(d)) + continue + log.debug("%s: Changelog item Changelog ADDED = %s", str(self), str(d)) seq += 1 self._changelog[f"{d.date()}_{seq:03d}"] = d return self._changelog @@ -465,7 +470,7 @@ def accept(self) -> bool: log.debug("Marking %s as accepted", str(self)) return self.do_transition("accept") - def __apply_event(self, event: str, settings: ConfigSettings) -> bool: + def __apply_event(self, event: changelog.Changelog, settings: ConfigSettings) -> bool: from sonar import syncer log.debug("Applying event %s", str(event)) @@ -509,8 +514,10 @@ def __apply_event(self, event: str, settings: ConfigSettings) -> bool: self.set_tags(data) # self.add_comment(f"Tag change {origin}", settings[SYNC_ADD_COMMENTS]) elif event_type == "FIXED": - self.resolve_as_fixed() + log.debug("Event %s is not applied", str(event)) + # self.resolve_as_fixed() # self.add_comment(f"Change of issue type {origin}", settings[SYNC_ADD_COMMENTS]) + return False elif event_type == "CLOSED": log.info("Changelog event is a CLOSE issue, it cannot be applied... %s", str(event)) # self.add_comment(f"Change of issue type {origin}", settings[SYNC_ADD_COMMENTS]) @@ -534,6 +541,7 @@ def apply_changelog(self, source_issue: Issue, settings: ConfigSettings) -> bool return False change_nbr = 0 + # FIXME: There can be a glitch if there are non manual changes in the changelog start_change = len(self.changelog()) + 1 log.info("Applying changelog of %s to %s, from change %d", str(source_issue), str(self), start_change) for key in sorted(events.keys()): diff --git a/sonar/syncer.py b/sonar/syncer.py index 064623c1..1da3d039 100644 --- a/sonar/syncer.py +++ b/sonar/syncer.py @@ -188,7 +188,7 @@ def sync_lists( log.debug("%s is closed, so it will not be synchronized despite having a changelog", str(finding)) continue if not (finding.has_changelog(added_after=min_date) or finding.has_comments()): - log.debug("%s has no changelog or comments added after %s, skipped in sync", str(finding), str(min_date)) + log.debug("%s has no manual changelog or comments added after %s, skipped in sync", str(finding), str(min_date)) continue modifiers = finding.modifiers().union(finding.commenters()) diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index b957e892..94691f4d 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -163,8 +163,8 @@ def test_changelog() -> None: assert changelog.new_type() is None assert not changelog.is_technical_change() assert not changelog.is_assignment() - assert changelog.new_assignee() is None - assert changelog.old_assignee() is None + assert changelog.assignee() is None + assert changelog.assignee(False) is None assert datetime(2024, 10, 20) <= util.string_to_date(changelog.date()).replace(tzinfo=None) < datetime(2024, 12, 26) assert changelog.author() == "admin" assert not changelog.is_tag() From b92e2b0a0a17a485bb7e2de7e3d70c012b58d9f8 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 9 Apr 2025 21:11:30 +0200 Subject: [PATCH 10/29] Fixes #1621 (#1632) --- cli/options.py | 29 +++++++++++++++++++++++------ sonar/utilities.py | 1 + 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/cli/options.py b/cli/options.py index abc3809f..961e1ca5 100644 --- a/cli/options.py +++ b/cli/options.py @@ -35,10 +35,19 @@ URL_SHORT = "u" URL = "url" + +URL_TARGET_SHORT = "U" +URL_TARGET = "urlTarget" + TOKEN_SHORT = "t" TOKEN = "token" +TOKEN_TARGET_SHORT = "T" +TOKEN_TARGET = "tokenTarget" + ORG_SHORT = "o" ORG = "organization" +ORG_TARGET_SHORT = "O" +ORG_TARGET = "organizationTarget" VERBOSE_SHORT = "v" VERBOSE = "verbosity" @@ -203,6 +212,8 @@ def parse_and_check(parser: ArgumentParser, logger_name: str = None, verify_toke kwargs.pop(SKIP_VERSION_CHECK, None) if utilities.is_sonarcloud_url(kwargs[URL]) and kwargs[ORG] is None: raise ArgumentsError(f"Organization (-{ORG_SHORT}) option is mandatory for SonarCloud") + if utilities.is_sonarcloud_url(kwargs[URL_TARGET]) and kwargs[ORG_TARGET] is None: + raise ArgumentsError(f"Organization (-{ORG_TARGET_SHORT}) option is mandatory for SonarCloud") if verify_token: utilities.check_token(args.token, utilities.is_sonarcloud_url(kwargs[URL])) return args @@ -367,16 +378,22 @@ def add_component_type_arg(parser: ArgumentParser, comp_types: tuple[str] = COMP def set_target_sonar_args(parser: ArgumentParser) -> ArgumentParser: """Sets the target SonarQube CLI options""" parser.add_argument( - "-U", - "--urlTarget", + f"-{URL_TARGET_SHORT}", + f"--{URL_TARGET}", + required=False, + help="Root URL of the target platform when using sonar-findings-sync", + ) + parser.add_argument( + f"-{TOKEN_TARGET_SHORT}", + f"--{TOKEN_TARGET}", required=False, - help="Root URL of the target SonarQube server", + help="Token of target platform when using sonar-findings-sync - Unauthenticated usage is not possible", ) parser.add_argument( - "-T", - "--tokenTarget", + f"-{ORG_TARGET_SHORT}", + f"--{ORG_TARGET}", required=False, - help="Token to authenticate to target SonarQube - Unauthenticated usage is not possible", + help="Organization when using sonar-findings-sync with SonarCloud as target platform", ) return parser diff --git a/sonar/utilities.py b/sonar/utilities.py index 8c5298aa..99c84658 100644 --- a/sonar/utilities.py +++ b/sonar/utilities.py @@ -626,6 +626,7 @@ def convert_args(args: object, second_platform: bool = False) -> dict[str, str]: if second_platform: kwargs["url"] = kwargs.pop("urlTarget", kwargs["url"]) kwargs["token"] = kwargs.pop("tokenTarget", kwargs["token"]) + kwargs["org"] = kwargs.pop("organizationTarget", kwargs["org"]) return kwargs From 2e3e769836a39cd19b518a3baf80d6b6f46f9868 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Thu, 10 Apr 2025 13:24:50 +0200 Subject: [PATCH 11/29] Issue-sync-fixes (#1633) * Fixes assignment event detection * Improve log with event type * Intro diff * Remove artificial complexity * Add score matching logs * Fixes #1627 * Further cleanup of pipeline * Improve match logging * Fix sonar.tests --- .github/workflows/build.yml | 19 ++++++------------- requirements-to-build.txt | 2 ++ sonar/changelog.py | 4 +++- sonar/findings.py | 23 +++++++++++++++++++---- sonar/issues.py | 2 +- 5 files changed, 31 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3b6c9f23..21681fd7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,24 +28,17 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pylint pytest coverage if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - #- name: Lint with flake8 - # run: | - # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. - # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=128 --statistics - # - name: Test with pytest - # run: | - # pytest + if [ -f requirements-to-build.txt ]; then pip install -r requirements-to-build.txt; fi + # Linting is done in the run_linters.sh script - name: Prep tests + # Keep this stage so that the test dirs exists and scanner is happy with sonar.tests property working-directory: . run: | chmod +x conf/prep_tests.sh conf/prep_tests.sh - + # - name: Run tests # working-directory: . # run: | @@ -68,8 +61,8 @@ jobs: run: | version=$(grep PACKAGE_VERSION sonar/version.py | cut -d "=" -f 2 | sed "s/[\'\" ]//g") echo "sonar.projectVersion=$version" >> sonar-project.properties - - name: SonarCloud scan - uses: SonarSource/sonarcloud-github-action@master + - name: SonarQube Cloud scan + uses: SonarSource/sonarqube-scan-action@master env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/requirements-to-build.txt b/requirements-to-build.txt index 76449162..9c8ffe92 100644 --- a/requirements-to-build.txt +++ b/requirements-to-build.txt @@ -7,3 +7,5 @@ sphinx-autodoc-typehints twine pytest coverage +flake8 +pylint diff --git a/sonar/changelog.py b/sonar/changelog.py index 7adfaef9..18cae184 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -177,10 +177,12 @@ def is_manual_change(self) -> bool: def is_assignment(self) -> bool: """Returns whether the changelog item is an assignment""" - return any(d.get("key", "") == "assignee" for d in self.sq_json["diffs"]) + return any(d.get("key", "") == "assignee" and "newValue" in d for d in self.sq_json["diffs"]) def assignee(self, new: bool = True) -> Optional[str]: """Returns the new assignee of a change assignment changelog""" + if not self.is_assignment(): + return None try: d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") return d.get("newValue" if new else "oldValue", None) diff --git a/sonar/findings.py b/sonar/findings.py index e7721958..ce511e2d 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -27,6 +27,8 @@ from queue import Queue from threading import Thread from requests import RequestException +import Levenshtein + import sonar.logging as log import sonar.sqobject as sq import sonar.platform as pf @@ -417,21 +419,34 @@ def almost_identical_to(self, another_finding: Finding, ignore_component: bool = if self.rule != another_finding.rule or self.hash != another_finding.hash: return False score = 0 + match_msg = " Match" if self.message == another_finding.message or kwargs.get("ignore_message", False): score += 2 + match_msg += " message +2" + elif Levenshtein.distance(self.message, another_finding.message, score_cutoff=6) <= 5: + score += 1 + match_msg += " message +1" if self.file() == another_finding.file(): - score += 2 + score += 1 + match_msg += " file +1" if self.line == another_finding.line or kwargs.get("ignore_line", False): score += 1 + match_msg += " line +1" if self.component == another_finding.component or ignore_component: score += 1 + match_msg += " component +1" if self.author == another_finding.author or kwargs.get("ignore_author", False): score += 1 + match_msg += " author +1" if self.type == another_finding.type or kwargs.get("ignore_type", False): score += 1 + match_msg += " type +1" if self.severity == another_finding.severity or kwargs.get("ignore_severity", False): score += 1 - # Need at least 7 / 9 to match + match_msg += " severity +1" + + log.debug("%s vs %s - %s score = %d", str(self), str(another_finding), match_msg, score) + # Need at least 7 / 8 to consider it's a match return score >= 7 def search_siblings( @@ -455,8 +470,8 @@ def search_siblings( log.info("%s and %s are exact match but target already has changes, cannot be synced", str(self), str(finding)) match_but_modified.append(finding) return exact_matches, approx_matches, match_but_modified - else: - log.debug("%s and %s are not identical", str(self), str(finding)) + # else: + # log.debug("%s and %s are not identical", str(self), str(finding)) log.info("No exact match, searching for an approximate match of %s", str(self)) for finding in findings_list: diff --git a/sonar/issues.py b/sonar/issues.py index 68243245..c8f82080 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -473,9 +473,9 @@ def accept(self) -> bool: def __apply_event(self, event: changelog.Changelog, settings: ConfigSettings) -> bool: from sonar import syncer - log.debug("Applying event %s", str(event)) # origin = f"originally by *{event['userName']}* on original branch" (event_type, data) = event.changelog_type() + log.debug("Applying event type %s - %s", event_type, str(event)) if event_type == "SEVERITY": self.set_severity(data) # self.add_comment(f"Change of severity {origin}", settings[SYNC_ADD_COMMENTS]) From b654983d3044b99d498851d233776c89d881c3bc Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Thu, 10 Apr 2025 17:44:26 +0200 Subject: [PATCH 12/29] Issue-sync-fixes (#1634) * Fixes assignment event detection * Improve log with event type * Intro diff * Remove artificial complexity * Add score matching logs * Fixes #1627 * Further cleanup of pipeline * Improve match logging * Fix sonar.tests * Fix get tags * Merge branch 'master' into issue-sync-fixes * Secure assignee --- sonar/changelog.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/sonar/changelog.py b/sonar/changelog.py index 18cae184..6cfe6592 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -150,14 +150,16 @@ def new_severity(self) -> Optional[str]: def is_change_type(self) -> bool: """Returns whether the changelog item is a change of issue type""" - d = self.sq_json["diffs"][0] - return d.get("key", "") == "type" + return any(d.get("key", "") == "type" and "newValue" in d for d in self.sq_json["diffs"]) def new_type(self) -> Optional[str]: """Returns the new type of a change issue type changelog""" if self.is_change_type(): - d = self.sq_json["diffs"][0] - return d.get("newValue", None) + try: + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "type") + return d.get("newValue", None) + except StopIteration: + log.warning("No type change found in changelog %s", str(self)) return None def is_technical_change(self) -> bool: @@ -181,13 +183,12 @@ def is_assignment(self) -> bool: def assignee(self, new: bool = True) -> Optional[str]: """Returns the new assignee of a change assignment changelog""" - if not self.is_assignment(): - return None - try: - d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") - return d.get("newValue" if new else "oldValue", None) - except StopIteration: - log.warning("No assignment found in changelog %s", str(self)) + if self.is_assignment(): + try: + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") + return d.get("newValue" if new else "oldValue", None) + except StopIteration: + log.warning("No assignment found in changelog %s", str(self)) return None def previous_state(self) -> str: @@ -213,8 +214,8 @@ def is_tag(self) -> bool: def get_tags(self) -> Optional[str]: """Returns the changelog tags for issue tagging items""" try: - d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "assignee") - return d.get("newValue").replace(" ", ",") + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "tags") + return d.get("newValue", "").split() except StopIteration: return None From 29637e5ce8f0aa7bb669a94fa77ca6abc3274874 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 16:17:03 +0200 Subject: [PATCH 13/29] Regression-testing (#1637) * Make urlTarget check conditional * Force accept app/json * Move config.json in sonar * Move config from sonar.audit to sonar * Update map keys * Add get_issues_map * Use issue map config for types * Remove empty filters at the end in 1 go * Remove local type map * Formatting * Add resolutions * Fix config.json path * Adjust test to changelog with non manual changes * Simplify list remapping * Simplify pre search filters * Raise exception when setting type in MQR mode * Pass audit settings to project visi check * Disambiguate sonar config and audit config * Disambiguate sonar config and audit config * Move test config.json * SImplify issue search pre filter * Deduplicate list using sets * Update tests * remove temp log * Add unassign changelog * Add tests on changelog * Add unassign placeholder for apply_event * Removed unused func * Fix audit_conf.configure() * Add configure() tests * Adjust issues and changelog tests for 9.9 * Add timezone for issue search * Replace JCL by Apex * Make severity detection more robust * Remove logging * Adjust to CB * Adjust for CB * Quality pass * Quality pass * Quality pass --- .gitignore | 2 +- cli/audit.py | 7 +- cli/options.py | 2 +- setup.py | 2 +- sonar/audit/__init__.py | 4 +- sonar/audit/{config.py => audit_config.py} | 30 - sonar/changelog.py | 16 +- sonar/{audit => }/config.json | 40 + sonar/config.py | 67 + sonar/issues.py | 63 +- sonar/platform.py | 14 +- sonar/sif_node.py | 4 +- sonar/tasks.py | 2 +- sonar/utilities.py | 20 +- test/config.json | 4902 -------------------- test/unit/test_audit.py | 21 + test/unit/test_findings.py | 28 +- test/unit/test_issues.py | 42 +- test/unit/test_projects.py | 4 +- test/unit/test_rules.py | 11 +- test/unit/utilities.py | 18 +- 21 files changed, 273 insertions(+), 5026 deletions(-) rename sonar/audit/{config.py => audit_config.py} (77%) rename sonar/{audit => }/config.json (81%) create mode 100644 sonar/config.py delete mode 100644 test/config.json diff --git a/.gitignore b/.gitignore index 1ecfe366..5b345d53 100644 --- a/.gitignore +++ b/.gitignore @@ -17,7 +17,7 @@ tmp/ .venv/ .DS_Store -!sonar/audit/config.json +!sonar/config.json !test/config.json !test/integration/sif*.json !.vscode/*.json diff --git a/cli/audit.py b/cli/audit.py index 4201452e..04f4ec89 100755 --- a/cli/audit.py +++ b/cli/audit.py @@ -37,7 +37,8 @@ import sonar.logging as log from sonar import platform, users, groups, qualityprofiles, qualitygates, sif, portfolios, applications, projects import sonar.utilities as util -from sonar.audit import problem, config +from sonar.audit import problem +from sonar.audit import audit_config as audit_conf TOOL_NAME = "sonar-audit" WHAT_AUDITABLE = { @@ -156,7 +157,7 @@ def main() -> None: errcode = errcodes.OS_ERROR try: kwargs = util.convert_args(__parser_args("Audits a SonarQube platform or a SIF (Support Info File or System Info File)")) - settings = config.load(TOOL_NAME) + settings = audit_conf.load(TOOL_NAME) file = ofile = kwargs.pop(options.REPORT_FILE) settings.update( { @@ -167,7 +168,7 @@ def main() -> None: } ) if kwargs.get("config", False): - config.configure() + audit_conf.configure() sys.exit(errcodes.OK) if kwargs["sif"]: diff --git a/cli/options.py b/cli/options.py index 961e1ca5..8bd379f7 100644 --- a/cli/options.py +++ b/cli/options.py @@ -212,7 +212,7 @@ def parse_and_check(parser: ArgumentParser, logger_name: str = None, verify_toke kwargs.pop(SKIP_VERSION_CHECK, None) if utilities.is_sonarcloud_url(kwargs[URL]) and kwargs[ORG] is None: raise ArgumentsError(f"Organization (-{ORG_SHORT}) option is mandatory for SonarCloud") - if utilities.is_sonarcloud_url(kwargs[URL_TARGET]) and kwargs[ORG_TARGET] is None: + if URL_TARGET in kwargs and utilities.is_sonarcloud_url(kwargs[URL_TARGET]) and kwargs[ORG_TARGET] is None: raise ArgumentsError(f"Organization (-{ORG_TARGET_SHORT}) option is mandatory for SonarCloud") if verify_token: utilities.check_token(args.token, utilities.is_sonarcloud_url(kwargs[URL])) diff --git a/setup.py b/setup.py index 510a2cd9..f7e40dcd 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ "Source Code": "https://github.com/okorach/sonar-tools", }, packages=setuptools.find_packages(), - package_data={"sonar": ["LICENSE", "audit/rules.json", "audit/config.json", "audit/sonar-audit.properties"]}, + package_data={"sonar": ["LICENSE", "audit/rules.json", "config.json", "audit/sonar-audit.properties"]}, install_requires=[ "argparse", "datetime", diff --git a/sonar/audit/__init__.py b/sonar/audit/__init__.py index 67d90ceb..3c7ca097 100644 --- a/sonar/audit/__init__.py +++ b/sonar/audit/__init__.py @@ -20,8 +20,8 @@ """sonar.audit module""" -from sonar.audit import rules, config -from sonar import utilities, errcodes +from sonar.audit import rules +from sonar import utilities, errcodes, config config.load_config_data() try: diff --git a/sonar/audit/config.py b/sonar/audit/audit_config.py similarity index 77% rename from sonar/audit/config.py rename to sonar/audit/audit_config.py index bab30d71..66c00f42 100644 --- a/sonar/audit/config.py +++ b/sonar/audit/audit_config.py @@ -21,8 +21,6 @@ """sonar-config CLI""" import os import pathlib -import datetime -import json import jprops from typing import Optional import sonar.logging as log @@ -31,8 +29,6 @@ _CONFIG_SETTINGS = None -_CONFIG_DATA = None - def _load_properties_file(file: str) -> types.ConfigSettings: """Loads a properties file""" @@ -76,13 +72,6 @@ def load(config_name: Optional[str] = None, settings: types.ConfigSettings = Non return _CONFIG_SETTINGS -def get_property(name: str, settings: Optional[types.ConfigSettings] = None) -> str: - """Returns the value of a given property""" - if settings is None: - settings = _CONFIG_SETTINGS - return "" if not settings else settings.get(name, "") - - def configure() -> None: """Configures a default sonar-audit.properties""" template_file = pathlib.Path(__file__).parent / "sonar-audit.properties" @@ -97,22 +86,3 @@ def configure() -> None: log.info("Creating file '%s'", config_file) with open(config_file, "w", encoding="utf-8") as fh: print(text, file=fh) - - -def load_config_data() -> None: - global _CONFIG_DATA - config_data_file = pathlib.Path(__file__).parent / "config.json" - with open(config_data_file, "r", encoding="utf-8") as fh: - text = fh.read() - _CONFIG_DATA = json.loads(text) - - -def get_java_compatibility() -> dict[int, list[tuple[int, int, int]]]: - return {int(k): [tuple(v[0]), tuple(v[1])] for k, v in _CONFIG_DATA["javaCompatibility"].items()} - - -def get_scanners_versions() -> dict[int, list[tuple[int, int, int]]]: - data = {} - for scanner, release_info in _CONFIG_DATA["scannerVersions"].items(): - data[scanner] = {k: datetime.datetime(v[0], v[1], v[2]) for k, v in release_info.items()} - return data diff --git a/sonar/changelog.py b/sonar/changelog.py index 6cfe6592..18f2c7ba 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -138,14 +138,16 @@ def is_mark_as_acknowledged(self) -> bool: def is_change_severity(self) -> bool: """Returns whether the changelog item is a change of issue severity""" - d = self.sq_json["diffs"][0] - return d.get("key", "") == "severity" + return any(d.get("key", "") in ("severity", "impactSeverity") for d in self.sq_json["diffs"]) def new_severity(self) -> Optional[str]: """Returns the new severity of a change issue severity changelog""" if self.is_change_severity(): - d = self.sq_json["diffs"][0] - return d.get("newValue", None) + try: + d = next(d for d in self.sq_json["diffs"] if d.get("key", "") == "type") + return d.get("newValue", None) + except StopIteration: + log.warning("No severity change found in changelog %s", str(self)) return None def is_change_type(self) -> bool: @@ -181,6 +183,10 @@ def is_assignment(self) -> bool: """Returns whether the changelog item is an assignment""" return any(d.get("key", "") == "assignee" and "newValue" in d for d in self.sq_json["diffs"]) + def is_unassign(self) -> bool: + """Returns whether the changelog item is an unassign""" + return any(d.get("key", "") == "assignee" and "newValue" not in d for d in self.sq_json["diffs"]) + def assignee(self, new: bool = True) -> Optional[str]: """Returns the new assignee of a change assignment changelog""" if self.is_assignment(): @@ -223,6 +229,8 @@ def changelog_type(self) -> tuple[str, Optional[str]]: ctype = (None, None) if self.is_assignment(): ctype = ("ASSIGN", self.assignee()) + elif self.is_unassign(): + ctype = ("UNASSIGN", None) elif self.is_reopen(): ctype = ("REOPEN", None) elif self.is_confirm(): diff --git a/sonar/audit/config.json b/sonar/config.json similarity index 81% rename from sonar/audit/config.json rename to sonar/config.json index 70f3338a..37ca1e28 100644 --- a/sonar/audit/config.json +++ b/sonar/config.json @@ -163,5 +163,45 @@ "2.7.1": [2021, 4, 30], "2.7": [2019, 10, 1] } + }, + "issuesSearch": { + "maps": { + "fields": { + "types": "impactSoftwareQualities", + "severities": "impactSeverities", + "resolutions": "issueStatuses", + "statuses": "issueStatuses" + }, + "resultFields": { + "resolution": "", + "status": "issueStatus" + }, + "severities": { + "BLOCKER": "BLOCKER", + "CRITICAL": "HIGH", + "MAJOR": "MEDIUM", + "MINOR": "LOW", + "INFO": "INFO" + }, + "types": { + "SECURITY_HOTSPOT": "SECURITY", + "VULNERABILITY": "SECURITY", + "CODE_SMELL": "MAINTAINABILITY", + "BUG": "RELIABILITY" + }, + "statuses": { + "FALSE-POSITIVE": "FALSE_POSITIVE", + "CONFIRMED": "CONFIRMED", + "OPEN": "OPEN", + "REOPENED": "OPEN", + "CLOSED": "FIXED", + "WONTFIX": "ACCEPTED", + "FIXED": "OPEN" + }, + "resolutions": { + "FALSE-POSITIVE": "FALSE_POSITIVE", + "WONTFIX": "ACCEPTED" + } + } } } \ No newline at end of file diff --git a/sonar/config.py b/sonar/config.py new file mode 100644 index 00000000..4736da0f --- /dev/null +++ b/sonar/config.py @@ -0,0 +1,67 @@ +# +# sonar-tools +# Copyright (C) 2019-2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +"""sonar-config utils""" + +import pathlib +import datetime +import json +from typing import Optional + +_CONFIG_DATA = None +_ISSUES_SECTION = "issuesSearch" +_MAPS = "maps" + + +def load_config_data() -> None: + global _CONFIG_DATA + config_data_file = pathlib.Path(__file__).parent / "config.json" + with open(config_data_file, "r", encoding="utf-8") as fh: + text = fh.read() + _CONFIG_DATA = json.loads(text) + + i_map = _CONFIG_DATA[_ISSUES_SECTION] + i_map["allowedValues"] = {} + new_fields_map = {} + for old_field_name, new_field_name in i_map[_MAPS]["fields"].items(): + i_map["allowedValues"][old_field_name] = list(set(i_map[_MAPS][old_field_name].keys())) + if new_field_name != "": + i_map["allowedValues"][new_field_name] = list(set(i_map[_MAPS][old_field_name].values())) + new_fields_map[new_field_name] = {v: k for k, v in i_map[_MAPS][old_field_name].items()} + i_map[_MAPS].update(new_fields_map) + + +def get_java_compatibility() -> dict[int, list[tuple[int, int, int]]]: + return {int(k): [tuple(v[0]), tuple(v[1])] for k, v in _CONFIG_DATA["javaCompatibility"].items()} + + +def get_scanners_versions() -> dict[int, list[tuple[int, int, int]]]: + data = {} + for scanner, release_info in _CONFIG_DATA["scannerVersions"].items(): + data[scanner] = {k: datetime.datetime(v[0], v[1], v[2]) for k, v in release_info.items()} + return data + + +def get_issues_map(section: str) -> Optional[dict[str, str]]: + return _CONFIG_DATA[_ISSUES_SECTION][_MAPS].get(section, None) + + +def get_issue_search_allowed_values(field: str) -> Optional[set[str]]: + return _CONFIG_DATA[_ISSUES_SECTION]["allowedValues"].get(field, None) diff --git a/sonar/issues.py b/sonar/issues.py index c8f82080..09260c96 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -39,7 +39,7 @@ from sonar.util.types import ApiParams, ApiPayload, ObjectJsonRepr, ConfigSettings -from sonar import users, findings, changelog, projects, rules, exceptions +from sonar import users, findings, changelog, projects, rules, config, exceptions import sonar.utilities as util COMPONENT_FILTER_OLD = "componentKeys" @@ -357,6 +357,8 @@ def set_type(self, new_type: str) -> bool: :return: Whether the operation succeeded :rtype: bool """ + if self.endpoint.is_mqr_mode(): + raise exceptions.UnsupportedOperation("Changing issue type is not supported in MQR mode") log.debug("Changing type of issue %s from %s to %s", self.key, self.type, new_type) try: r = self.post("issues/set_type", {"issue": self.key, "type": new_type}) @@ -510,6 +512,9 @@ def __apply_event(self, event: changelog.Changelog, settings: ConfigSettings) -> u = settings[syncer.SYNC_SERVICE_ACCOUNTS][0] self.assign(u) # self.add_comment(f"Issue assigned {origin}", settings[SYNC_ADD_COMMENTS]) + elif event_type == "UNASSIGN": + # TODO: Handle uassign + return False elif event_type == "TAG": self.set_tags(data) # self.add_comment(f"Tag change {origin}", settings[SYNC_ADD_COMMENTS]) @@ -604,7 +609,7 @@ def search_by_type(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue] log.info("Splitting search by issue types") for issue_type in ("BUG", "VULNERABILITY", "CODE_SMELL"): try: - new_params["types"] = issue_type + new_params["types"] = [issue_type] issue_list.update(search(endpoint=endpoint, params=new_params)) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) @@ -620,7 +625,7 @@ def search_by_severity(endpoint: pf.Platform, params: ApiParams) -> dict[str, Is log.info("Splitting search by severities") for sev in ("BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO"): try: - new_params["severities"] = sev + new_params["severities"] = [sev] issue_list.update(search(endpoint=endpoint, params=new_params)) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) @@ -907,42 +912,32 @@ def pre_search_filters(endpoint: pf.Platform, params: ApiParams) -> ApiParams: return {} log.debug("Sanitizing issue search filters %s", str(params)) version = endpoint.version() - filters = util.dict_remap( - original_dict=params.copy(), remapping={"project": COMPONENT_FILTER, "application": COMPONENT_FILTER, "portfolio": COMPONENT_FILTER} - ) + comp_filter = component_filter(endpoint) + filters = util.dict_remap(original_dict=params.copy(), remapping={"project": comp_filter, "application": comp_filter, "portfolio": comp_filter}) filters = util.dict_subset(util.remove_nones(filters), _SEARCH_CRITERIAS) + types = filters.pop("types", []) + filters.pop("impactSoftwareQualities", []) + severities = filters.pop("severities", []) + filters.pop("impactSeverities", []) + statuses = filters.pop("statuses", []) + filters.pop("NEW_STATUS", []) + filters.pop(OLD_STATUS, []) + if endpoint.is_mqr_mode(): + log.debug("MAP Type = %s", str(config.get_issues_map("impactSoftwareQualities"))) + filters["impactSoftwareQualities"] = util.list_remap(types, config.get_issues_map("types")) + filters["impactSeverities"] = util.list_remap(severities, config.get_issues_map("severities")) + filters[NEW_STATUS] = util.list_remap(statuses, mapping=config.get_issues_map(OLD_STATUS)) + else: + filters["types"] = util.list_remap(types, config.get_issues_map("impactSoftwareQualities")) + filters["severities"] = util.list_remap(severities, config.get_issues_map("impactSeverities")) + filters[OLD_STATUS] = util.list_remap(statuses, mapping=config.get_issues_map(NEW_STATUS)) + if version < (10, 2, 0): # Starting from 10.2 - "componentKeys" was renamed "components" filters = util.dict_remap(original_dict=filters, remapping={COMPONENT_FILTER: COMPONENT_FILTER_OLD}) - else: - # Starting from 10.2 - Issue types were replaced by software qualities, and severities replaced by impacts - __MAP = {"BUG": "RELIABILITY", "CODE_SMELL": "MAINTAINABILITY", "VULNERABILITY": "SECURITY", "SECURITY_HOTSPOT": "SECURITY"} - filters["impactSoftwareQualities"] = util.list_re_value(filters.pop("types", None), __MAP) - if len(filters["impactSoftwareQualities"]) == 0: - filters.pop("impactSoftwareQualities") - __MAP = {"BLOCKER": "HIGH", "CRITICAL": "HIGH", "MAJOR": "MEDIUM", "MINOR": "LOW", "INFO": "LOW"} - filters["impactSeverities"] = util.list_re_value(filters.pop("severities", None), __MAP) - if len(filters["impactSeverities"]) == 0: - filters.pop("impactSeverities") - - if version < (10, 4, 0): - log.debug("Sanitizing issue search filters - fixing resolutions") - filters = util.dict_remap(original_dict=filters, remapping={NEW_STATUS: OLD_STATUS}) - if OLD_STATUS in filters: - filters[OLD_STATUS] = util.list_re_value(filters[OLD_STATUS], mapping={NEW_FP: OLD_FP}) - else: - # Starting from 10.4 - "resolutions" and "statuses" are merged into "issuesStatuses", "FALSE-POSITIVE" was renamed "FALSE_POSITIVE" - # and "statuses" is deprecated - if "statuses" in filters: - if NEW_STATUS in filters: - filters[NEW_STATUS].update(filters.pop("statuses")) - else: - filters[NEW_STATUS] = filters.pop("statuses") - filters = util.dict_remap(original_dict=filters, remapping={OLD_STATUS: NEW_STATUS}) - if NEW_STATUS in filters: - filters[NEW_STATUS] = util.list_re_value(filters[NEW_STATUS], mapping={OLD_FP: NEW_FP}) - filters = {k: util.allowed_values_string(v, FILTERS_MAP[k]) if k in FILTERS_MAP else v for k, v in filters.items()} + filters = {k: v for k, v in filters.items() if v is not None and (not isinstance(v, (list, set, str, tuple)) or len(v) > 0)} + for field in filters: + allowed = config.get_issue_search_allowed_values(field) + if allowed is not None and filters[field] is not None: + filters[field] = util.intersection(filters[field], allowed) + filters = {k: util.list_to_csv(v) for k, v in filters.items() if v} log.debug("Sanitized issue search filters %s", str(filters)) return filters diff --git a/sonar/platform.py b/sonar/platform.py index 772b608a..1d73450a 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -42,7 +42,6 @@ from sonar import errcodes, settings, devops, version, sif, exceptions from sonar.permissions import permissions, global_permissions, permission_templates -from sonar.audit import config from sonar.audit.rules import get_rule, RuleId import sonar.audit.severities as sev import sonar.audit.types as typ @@ -55,6 +54,7 @@ _SONAR_TOOLS_AGENT = f"sonar-tools {version.PACKAGE_VERSION}" _UPDATE_CENTER = "https://raw.githubusercontent.com/SonarSource/sonar-update-center-properties/master/update-center-source.properties" +_APP_JSON = "application/json" LTA = None LATEST = None _HARDCODED_LTA = (9, 9, 6) @@ -192,9 +192,9 @@ def post(self, api: str, params: types.ApiParams = None, **kwargs) -> requests.R """ if util.is_api_v2(api): if "headers" in kwargs: - kwargs["headers"]["content-type"] = "application/json" + kwargs["headers"]["content-type"] = _APP_JSON else: - kwargs["headers"] = {"content-type": "application/json"} + kwargs["headers"] = {"content-type": _APP_JSON} return self.__run_request(requests.post, api, data=json.dumps(params), **kwargs) else: return self.__run_request(requests.post, api, params, **kwargs) @@ -228,7 +228,7 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N """Makes an HTTP request to SonarQube""" mute = kwargs.pop("mute", ()) api = _normalize_api(api) - headers = {"user-agent": self._user_agent} + headers = {"user-agent": self._user_agent, "accept": _APP_JSON} headers.update(kwargs.get("headers", {})) if params is None: params = {} @@ -543,7 +543,7 @@ def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: problems += _audit_setting_set(key, False, platform_settings, audit_settings, settings_url) problems += ( - self._audit_project_default_visibility() + self._audit_project_default_visibility(audit_settings) + self._audit_global_permissions() + self._audit_logs(audit_settings) + permission_templates.audit(self, audit_settings) @@ -609,7 +609,7 @@ def _audit_logs(self, audit_settings: types.ConfigSettings) -> list[Problem]: problems.append(Problem(rule, f"{self.url}/admin/system", nb_deprecation)) return problems - def _audit_project_default_visibility(self) -> list[Problem]: + def _audit_project_default_visibility(self, audit_settings: types.ConfigSettings) -> list[Problem]: """Audits whether project default visibility is public""" log.info("Auditing project default visibility") problems = [] @@ -623,7 +623,7 @@ def _audit_project_default_visibility(self) -> list[Problem]: resp = self.get(settings.Setting.API[c.GET], params={"keys": "projects.default.visibility"}) visi = json.loads(resp.text)["settings"][0]["value"] log.info("Project default visibility is '%s'", visi) - if config.get_property("checkDefaultProjectVisibility") and visi != "private": + if audit_settings.get("audit.globalSettings.defaultProjectVisibility", "private") != visi: rule = get_rule(RuleId.SETTING_PROJ_DEFAULT_VISIBILITY) problems.append(Problem(rule, f"{self.url}/admin/projects_management", visi)) return problems diff --git a/sonar/sif_node.py b/sonar/sif_node.py index 8e161c9b..1c17ce58 100644 --- a/sonar/sif_node.py +++ b/sonar/sif_node.py @@ -31,7 +31,7 @@ from sonar.util import types from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem -import sonar.audit.config as audit_conf +from sonar import config _RELEASE_DATE_6_7 = datetime.datetime(2017, 11, 8) + relativedelta(months=+6) _RELEASE_DATE_7_9 = datetime.datetime(2019, 7, 1) + relativedelta(months=+6) @@ -147,7 +147,7 @@ def __audit_jvm_version(obj: object, obj_name: str, jvm_props: dict[str, str]) - except KeyError: log.warning("%s: Can't find SonarQube version in SIF, auditing this part is skipped", obj_name) return [] - java_compat = audit_conf.get_java_compatibility() + java_compat = config.get_java_compatibility() log.debug("Java compatibility matrix: %s", str(java_compat)) if java_version not in java_compat: log.warning("%s: Java version %d not listed in compatibility matrix, skipping JVM version audit", obj_name, java_version) diff --git a/sonar/tasks.py b/sonar/tasks.py index e03b3bcf..491d8a8f 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -34,7 +34,7 @@ import sonar.utilities as util from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem -from sonar.audit.config import get_scanners_versions +from sonar.config import get_scanners_versions from sonar.util import types, cache SUCCESS = "SUCCESS" diff --git a/sonar/utilities.py b/sonar/utilities.py index 99c84658..1d6295aa 100644 --- a/sonar/utilities.py +++ b/sonar/utilities.py @@ -652,23 +652,16 @@ def deduct_format(fmt: Union[str, None], filename: Union[str, None], allowed_for def dict_remap(original_dict: dict[str, str], remapping: dict[str, str]) -> dict[str, str]: - """Adjust findings search filters based on Sonar version""" + """Key old keys by new key in a dict""" if not original_dict: return {} - remapped_filters = original_dict.copy() - for old, new in remapping.items(): - if old in original_dict and new not in remapped_filters: - remapped_filters[new] = remapped_filters.pop(old) - return remapped_filters + return {remapping[k] if k in remapping else k: v for k, v in original_dict.items()} -def list_re_value(a_list: list[str], mapping: dict[str, str]) -> list[str]: - """Adjust findings search filters based on Sonar version""" +def list_remap(a_list: list[str], mapping: dict[str, str]) -> list[str]: if not a_list or len(a_list) == 0: return [] - for old, new in mapping.items(): - a_list = [new if v == old else v for v in a_list] - return a_list + return list(set(mapping[v] if v in mapping else v for v in a_list)) def dict_stringify(original_dict: dict[str, str]) -> dict[str, str]: @@ -681,6 +674,11 @@ def dict_stringify(original_dict: dict[str, str]) -> dict[str, str]: return original_dict +def dict_reverse(map: dict[str, str]) -> dict[str, str]: + """Reverses a dict""" + return {v: k for k, v in map.items()} + + def inline_lists(element: any, exceptions: tuple[str]) -> any: """Recursively explores a dict and replace string lists by CSV strings, if list values do not contain commas""" if isinstance(element, dict): diff --git a/test/config.json b/test/config.json deleted file mode 100644 index 771ce258..00000000 --- a/test/config.json +++ /dev/null @@ -1,4902 +0,0 @@ -{ - "applications": { - "App_with_branches": { - "branches": { - "break": { - "projects": { - "training:branches": "break", - "missing-project": "main" - } - }, - "master": { - "isMain": true, - "projects": { - "training:branches": "develop" - } - }, - "release-1.1": { - "projects": { - "training:branches": "release-1.1" - } - } - }, - "name": "App with branches", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "visibility": "public" - }, - "Azure_DevOps_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:azdo-mono-cli": "master", - "demo:azdo-mono-dotnet": "master", - "demo:azdo-mono-gradle": "master", - "demo:azdo-mono-maven": "master", - "demo:github-actions-cli": "non-existing-branch", - "non-existing-proj": "main" - } - } - }, - "name": "Azure DevOps monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - }, - "users": { - "ado": "admin" - } - }, - "tags": "monorepo, azure-devops", - "visibility": "public" - }, - "Bitbucket_Cloud_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:bbc-dotnet": "master", - "demo:bbc-gradle": "master", - "demo:bbc-maven": "master", - "demo:bbc-proj": "master" - } - } - }, - "name": "Bitbucket Cloud monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "visibility": "public" - }, - "Bitbucket_Server_LATEST_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:bbs-latest-mono-cli": "master", - "demo:bbs-latest-mono-dotnet": "master", - "demo:bbs-latest-mono-gradle": "master", - "demo:bbs-latest-mono-maven": "master" - } - } - }, - "name": "Bitbucket Server LATEST monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "tags": "bbs-latest", - "visibility": "public" - }, - "Bitbucket_Server_LTS_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:bbs-lts-dotnet": "master", - "demo:bbs-lts-gradle": "master", - "demo:bbs-lts-maven": "master", - "demo:bbs-lts-proj": "master" - } - } - }, - "name": "Bitbucket Server LTS monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "visibility": "public" - }, - "Corporate_Web_Site": { - "branches": { - "Release 1.1": { - "projects": { - "WEBSITE-TIER1-WEB": "release-1.1", - "WEBSITE-TIER2-BIZLOGIC": "release-1.1", - "WEBSITE-TIER3-DBLAYER": "release-1.1" - } - }, - "Release-2020.1": { - "projects": { - "WEBSITE-TIER1-WEB": "release-2020.1", - "WEBSITE-TIER2-BIZLOGIC": "release-2020.1", - "WEBSITE-TIER3-DBLAYER": "release-2020.1" - } - }, - "master": { - "isMain": true, - "projects": { - "WEBSITE-TIER1-WEB": "master", - "WEBSITE-TIER2-BIZLOGIC": "master", - "WEBSITE-TIER3-DBLAYER": "master" - } - }, - "realease-2019.4": { - "projects": { - "WEBSITE-TIER1-WEB": "release-2019.4", - "WEBSITE-TIER2-BIZLOGIC": "release-2019.4", - "WEBSITE-TIER3-DBLAYER": "release-2019.4" - } - } - }, - "name": "Corporate Web Site", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "visibility": "private" - }, - "GitHub_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:github-mono-jenkins-cli": "__default__", - "demo:github-mono-jenkins-dotnet": "__default__", - "demo:github-mono-jenkins-gradle": "__default__", - "demo:github-mono-jenkins-maven": "__default__" - } - } - }, - "name": "GitHub monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "tags": "github, monorepo", - "visibility": "public" - }, - "GitLab_monorepo": { - "branches": { - "master": { - "isMain": true, - "projects": { - "demo:gitlab-ci-mono-cli": "master", - "demo:gitlab-ci-mono-dotnet": "master", - "demo:gitlab-ci-mono-gradle": "master", - "demo:gitlab-ci-mono-maven": "master" - } - } - }, - "name": "GitLab-CI monorepo", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "tags": "gitlab, monorepo", - "visibility": "public" - } - }, - "globalSettings": { - "analysisScope": { - "sonar.coverage.exclusions": "**/*.html, **/*.css, **/*.scss, **/*.htm", - "sonar.cpd.exclusions": "", - "sonar.exclusions": "libs/jquery/**, **/*.min.js, generated/**, **/*DAO.java", - "sonar.global.exclusions": "", - "sonar.global.test.exclusions": "", - "sonar.inclusions": "", - "sonar.issue.enforce.multicriteria": [ - { - "resourceKey": "**/fish*.*", - "ruleKey": "squid:S1195" - }, - { - "resourceKey": "**/*NoName*.*", - "ruleKey": "squid:*Naming*" - } - ], - "sonar.issue.ignore.allfile": "", - "sonar.issue.ignore.block": [ - { - "beginBlockRegexp": "NOSONAR_BEGIN", - "endBlockRegexp": "NOSONAR_END" - } - ], - "sonar.issue.ignore.multicriteria": [ - { - "resourceKey": "**/*Name*.java", - "ruleKey": "squid:*Naming*" - }, - { - "resourceKey": "**/octopussy.*", - "ruleKey": "squid:S1195" - } - ], - "sonar.test.exclusions": "", - "sonar.test.inclusions": "" - }, - "authentication": { - "sonar.auth.bitbucket.allowUsersToSignUp": true, - "sonar.auth.bitbucket.clientId.secured": "", - "sonar.auth.bitbucket.clientSecret.secured": "", - "sonar.auth.bitbucket.enabled": false, - "sonar.auth.bitbucket.workspaces": "", - "sonar.auth.github.allowUsersToSignUp": true, - "sonar.auth.github.apiUrl": "https://api.github.com/", - "sonar.auth.github.clientId.secured": "", - "sonar.auth.github.clientSecret.secured": "", - "sonar.auth.github.enabled": true, - "sonar.auth.github.groupsSync": true, - "sonar.auth.github.organizations": "", - "sonar.auth.github.webUrl": "https://github.com/", - "sonar.auth.gitlab.allowUsersToSignUp": true, - "sonar.auth.gitlab.applicationId.secured": "", - "sonar.auth.gitlab.enabled": true, - "sonar.auth.gitlab.groupsSync": true, - "sonar.auth.gitlab.secret.secured": "", - "sonar.auth.gitlab.url": "https://gitlab.com", - "sonar.auth.saml.applicationId": "sonarqube", - "sonar.auth.saml.certificate.secured": "", - "sonar.auth.saml.enabled": true, - "sonar.auth.saml.group.name": "group", - "sonar.auth.saml.loginUrl": "http://keycloak:8080/auth/realms/sonarqube", - "sonar.auth.saml.providerId": "keycloak", - "sonar.auth.saml.providerName": "SAML", - "sonar.auth.saml.user.email": "email", - "sonar.auth.saml.user.login": "email", - "sonar.auth.saml.user.name": "name", - "sonar.authenticator.downcase": false, - "sonar.forceAuthentication": true - }, - "devopsIntegration": { - "Azure DevOps Services": { - "type": "azure", - "url": "https://dev.azure.com/olivierkorach" - }, - "BitBucket Server LATEST": { - "type": "bitbucket", - "url": "https://sonarsource-bitbucket-test.valiantys.net" - }, - "Bitbucket Cloud": { - "clientId": "TrmVgR6L5tXLsNWTjq", - "type": "bitbucketcloud", - "url": "https://bitbucket.org", - "workspace": "okorach" - }, - "Bitbucket Server LTS": { - "type": "bitbucket", - "url": "https://bitbucket-testing.valiantys.sonarsource.com" - }, - "GitHub.com": { - "appId": "27380", - "clientId": "Iv1.4f529f14afbf5530", - "type": "github", - "url": "https://api.github.com" - }, - "GitLab.com": { - "type": "gitlab", - "url": "https://gitlab.com/api/v4" - } - }, - "generalSettings": { - "email.from": "config-test@sonar.com", - "email.fromName": "Sonar Config Test", - "email.prefix": "[SONAR-CONFIG-TEST]", - "email.smtp_host.secured": "", - "email.smtp_password.secured": "", - "email.smtp_port.secured": 25, - "email.smtp_secure_connection.secured": "", - "email.smtp_username.secured": "", - "newCodePeriod": 42, - "projects.default.visibility": "private", - "sonar.allowPermissionManagementForProjectAdmins": true, - "sonar.builtInQualityProfiles.disableNotificationOnUpdate": true, - "sonar.core.serverBaseURL": "https://olivierk-sqlatest-test.eu.ngrok.io", - "sonar.cpd.cross_project": true, - "sonar.dbcleaner.auditHousekeeping": "Weekly", - "sonar.dbcleaner.branchesToKeepWhenInactive": "master, develop, trunk, release-.*, main, config-test-.*", - "sonar.dbcleaner.daysBeforeDeletingClosedIssues": 42, - "sonar.dbcleaner.daysBeforeDeletingInactiveBranchesAndPRs": 10, - "sonar.dbcleaner.hoursBeforeKeepingOnlyOneSnapshotByDay": 24, - "sonar.dbcleaner.weeksBeforeDeletingAllSnapshots": 1000, - "sonar.dbcleaner.weeksBeforeKeepingOnlyAnalysesWithVersion": 104, - "sonar.dbcleaner.weeksBeforeKeepingOnlyOneSnapshotByMonth": 52, - "sonar.dbcleaner.weeksBeforeKeepingOnlyOneSnapshotByWeek": 4, - "sonar.developerAggregatedInfo.disabled": false, - "sonar.governance.report.project.branch.frequency": "Daily", - "sonar.governance.report.view.frequency": "Weekly", - "sonar.governance.report.view.recipients": "config-test@korach.name", - "sonar.issues.defaultAssigneeLogin": "admin", - "sonar.lf.enableGravatar": true, - "sonar.lf.gravatarServerUrl": "https://secure.gravatar.com/avatar/{EMAIL_MD5}.jpg?s={SIZE}&d=identicon", - "sonar.lf.logoUrl": "", - "sonar.lf.logoWidthPx": "", - "sonar.notifications.delay": 60, - "sonar.notifications.runningDelayBeforeReportingStatus": 600, - "sonar.portfolios.recompute.hours": "", - "sonar.qualitygate.ignoreSmallChanges": false, - "sonar.scm.disabled": false, - "sonar.technicalDebt.developmentCost": 42, - "sonar.technicalDebt.ratingGrid": "0.03,0.07,0.15,0.42", - "sonar.validateWebhooks": true, - "webhooks": { - "Jenkins": { - "secret": "config-test", - "url": "https://config-load-test.eu.ngrok.io/sonarqube-webhook" - } - } - }, - "languages": { - "abap": { - "sonar.abap.file.suffixes": ".abap, .ab4, .flow, .asprog", - "sonar.cpd.abap.minimumLines": 42, - "sonar.cpd.abap.minimumTokens": 142 - }, - "apex": { - "sonar.apex.file.suffixes": ".cls, .trigger, .42", - "sonar.apex.pmd.reportPaths": "config/test/pmd" - }, - "cfamily": { - "sonar.c.file.suffixes": ".c, .h", - "sonar.cfamily.bullseye.reportPath": "config/test/bullseye", - "sonar.cfamily.cppunit.reportsPath": "config/test/cppunit", - "sonar.cfamily.gcov.reportsPath": "config/test/gcov", - "sonar.cfamily.ignoreHeaderComments": true, - "sonar.cfamily.llvm-cov.reportPath": "config/test/llvm-cov", - "sonar.cfamily.vscoveragexml.reportsPath": "config/test/vscoverage", - "sonar.cpp.file.suffixes": ".cc, .cpp, .cxx, .c++, .hh, .hpp, .hxx, .h++, .ipp", - "sonar.objc.file.suffixes": ".m" - }, - "cloudformation": { - "sonar.cloudformation.activate": true, - "sonar.cloudformation.cfn-lint.reportPaths": "config/test/cfn-lint", - "sonar.cloudformation.file.identifier": "AWSTemplateFormatVersion" - }, - "cobol": { - "sonar.cobol.adaprep.activation": false, - "sonar.cobol.aucobol.preprocessor.directives.default": "", - "sonar.cobol.byteBasedColumnCount": false, - "sonar.cobol.compilationConstants": "", - "sonar.cobol.copy.directories": "config/test/copybooks", - "sonar.cobol.copy.exclusions": "config/test/nocopy/**", - "sonar.cobol.copy.suffixes": "", - "sonar.cobol.db2include.directories": "", - "sonar.cobol.dialect": "ibm-enterprise-cobol", - "sonar.cobol.exec.recoveryMode": true, - "sonar.cobol.file.suffixes": "", - "sonar.cobol.preprocessor.skipping.first.matching.characters": "", - "sonar.cobol.sourceFormat": "fixed", - "sonar.cobol.sql.catalog.csv.path": "config/test/csv", - "sonar.cobol.sql.catalog.defaultSchema": "config/test/catalog", - "sonar.cobol.tab.width": 8, - "sonar.cpd.cobol.ignoreLiteral": true, - "sonar.cpd.cobol.minimumLines": 30, - "sonar.cpd.cobol.minimumTokens": 100 - }, - "cs": { - "sonar.cs.analyzeGeneratedCode": false, - "sonar.cs.file.suffixes": ".cs, .csx", - "sonar.cs.ignoreHeaderComments": true, - "sonar.cs.roslyn.bugCategories": "Compiler, bug", - "sonar.cs.roslyn.codeSmellCategories": "code smells", - "sonar.cs.roslyn.ignoreIssues": false, - "sonar.cs.roslyn.vulnerabilityCategories": "vulns, hotspots" - }, - "css": { - "sonar.css.file.suffixes": ".css, .less, .scss", - "sonar.css.stylelint.reportPaths": "config/test/stylelint" - }, - "flex": { - "sonar.flex.cobertura.reportPaths": "config/test/cobertura", - "sonar.flex.file.suffixes": "as" - }, - "go": { - "sonar.go.coverage.reportPaths": "config/test/converage", - "sonar.go.exclusions": "**/vendor/**", - "sonar.go.file.suffixes": ".go", - "sonar.go.golangci-lint.reportPaths": "config/test/golangci-lint", - "sonar.go.golint.reportPaths": "config/test/golint", - "sonar.go.gometalinter.reportPaths": "config/test/gometalinter", - "sonar.go.govet.reportPaths": "config/test/govet", - "sonar.go.tests.reportPaths": "config/test/tests" - }, - "html": { - "sonar.html.file.suffixes": ".html, .xhtml, .cshtml, .vbhtml, .aspx, .ascx, .rhtml, .erb, .shtm, .shtml, .cmp, .twig" - }, - "java": { - "sonar.java.checkstyle.reportPaths": "config/test", - "sonar.java.file.suffixes": ".java, .jav", - "sonar.java.pmd.reportPaths": "config/test", - "sonar.java.spotbugs.reportPaths": "config/test" - }, - "javascript": { - "sonar.javascript.environments": "amd, applescript, atomtest, browser, commonjs, couch, embertest, flow, greasemonkey, jasmine, jest, jquery, meteor, mocha, mongo, nashorn, node, phantomjs, prototypejs, protractor, qunit, rhino, serviceworker, shared-node-browser, shelljs, webextensions, worker, wsh, yui", - "sonar.javascript.file.suffixes": ".js, .jsx, .mjs, .vue", - "sonar.javascript.globals": "angular, goog, google, OenLayers, d3, dojo, dojox, dijit, Backbone, moment, casper, _", - "sonar.javascript.ignoreHeaderComments": true, - "sonar.javascript.lcov.reportPaths": "config/test/lcov", - "sonar.javascript.maxFileSize": 1000 - }, - "json": { - "sonar.json.file.suffixes": ".json, .jsonx" - }, - "jsp": { - "sonar.jsp.file.suffixes": ".jsp, .jspf, .jspx" - }, - "kotlin": { - "sonar.kotlin.detekt.reportPaths": "config/test/detekt", - "sonar.kotlin.file.suffixes": ".kt, .karate", - "sonar.kotlin.ktlint.reportPaths": "config/test/ktlint" - }, - "php": { - "sonar.php.coverage.reportPaths": "config/test", - "sonar.php.exclusions": "**/vendor/**, **/config/othervendor/**", - "sonar.php.file.suffixes": "php, php3, php4, php5, phtml, inc", - "sonar.php.phpstan.reportPaths": "config/test/phpstan", - "sonar.php.psalm.reportPaths": "config/test/psalm", - "sonar.php.tests.reportPath": "config/test/tests" - }, - "pli": { - "sonar.pli.extralingualCharacters": "#@$", - "sonar.pli.file.suffixes": ".pli", - "sonar.pli.ignoreHeaderComments": true, - "sonar.pli.marginLeft": 2, - "sonar.pli.marginRight": 42 - }, - "plsql": { - "sonar.plsql.file.suffixes": "sql, pks, pkb", - "sonar.plsql.ignoreHeaderComments": false - }, - "python": { - "sonar.python.bandit.reportPaths": "", - "sonar.python.coverage.reportPaths": "coverage-reports/*coverage-*.xml", - "sonar.python.file.suffixes": "py", - "sonar.python.flake8.reportPaths": "", - "sonar.python.pylint.reportPaths": "", - "sonar.python.xunit.reportPath": "xunit-reports/xunit-result-*.xml", - "sonar.python.xunit.skipDetails": false - }, - "rpg": { - "sonar.rpg.leftMarginWidth": 12, - "sonar.rpg.suffixes": ".rpg, .rpgle, .sqlrpgle, .RPG, .RPGLE, .SQLRPGLE" - }, - "ruby": { - "sonar.ruby.coverage.reportPaths": "coverage/.resultset.json", - "sonar.ruby.exclusions": "**/vendor/**", - "sonar.ruby.file.suffixes": ".rb", - "sonar.ruby.rubocop.reportPaths": "" - }, - "scala": { - "sonar.scala.coverage.reportPaths": "", - "sonar.scala.file.suffixes": ".scala", - "sonar.scala.scalastyle.reportPaths": "", - "sonar.scala.scapegoat.reportPaths": "" - }, - "swift": { - "sonar.swift.coverage.reportPaths": "", - "sonar.swift.file.suffixes": ".swift", - "sonar.swift.swiftLint.reportPaths": "" - }, - "terraform": { - "sonar.terraform.activate": true, - "sonar.terraform.file.suffixes": ".tf" - }, - "tsql": { - "sonar.tsql.file.suffixes": ".tsql" - }, - "typescript": { - "sonar.typescript.file.suffixes": ".ts, .tsx", - "sonar.typescript.tsconfigPath": "", - "sonar.typescript.tslint.reportPaths": "" - }, - "vb": { - "sonar.vb.file.suffixes": ".vb, .bas, .frm, .cls, .ctl, .VB, .BAS, .FRM, .CLS, .CTL", - "sonar.vb.ignoreHeaderComments": true - }, - "vbnet": { - "sonar.vbnet.analyzeGeneratedCode": false, - "sonar.vbnet.file.suffixes": ".vb", - "sonar.vbnet.ignoreHeaderComments": true, - "sonar.vbnet.roslyn.bugCategories": "", - "sonar.vbnet.roslyn.codeSmellCategoriess": "", - "sonar.vbnet.roslyn.ignoreIssues": false, - "sonar.vbnet.roslyn.vulnerabilityCategories": "" - }, - "xml": { - "sonar.xml.file.suffixes": ".xml, .xsd, .xsl, .xslt" - }, - "yaml": { - "sonar.yaml.file.suffixes": ".yaml, .yml, .yummy" - } - }, - "linters": { - "sonar.androidLint.reportPaths": "", - "sonar.eslint.reportPaths": "config/test/eslint" - }, - "permissionTemplates": { - "Default template": { - "defaultFor": "projects", - "description": "TEST: This permission template will be used as default when no other permission configuration is available", - "pattern": "", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "Full Open Permissions": { - "description": "All users can do what they want", - "pattern": "", - "permissions": { - "groups": { - "sonar-users": "admin, codeviewer, issueadmin, scan, securityhotspotadmin, user" - } - } - }, - "Permissions for projects of Team 1": { - "pattern": "TEAM_A_.*", - "permissions": { - "groups": { - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Tech Leads": "admin, codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "Portfolios permissions": { - "defaultFor": "applications, portfolios", - "pattern": "", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - } - }, - "SonarQube Team projects": { - "description": "Permissions for projects developed by the SonarQube Team", - "pattern": "SQ-.*", - "permissions": { - "groups": { - "Team Developers": "codeviewer, issueadmin, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "Permissions françaises équipe X": { - "pattern": "training:.*", - "permissions": { - "groups": { - "sonar-administrators": "admin, codeviewer, issueadmin, scan, securityhotspotadmin, user" - } - } - } - }, - "permissions": { - "groups": { - "CI Tools": "provisioning, scan", - "Language Experts": "profileadmin", - "sonar-administrators": "admin, applicationcreator, gateadmin, portfoliocreator, profileadmin, provisioning, scan", - "sonar-users": "applicationcreator, portfoliocreator" - }, - "users": { - "admin": "admin" - } - }, - "sastConfig": { - "sonar.security.config.javasecurity": "", - "sonar.security.config.phpsecurity": "{\n \"S3649\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQuery\"\n }\n ],\n \"passthroughs\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\RawUrl::RawUrl\",\n \"isWhitelist\": true,\n \"args\": [\n 1\n ]\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"mysql_query\",\n \"args\": [\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::execute\",\n \"isMethodPrefix\": true,\n \"args\": [\n 0,\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::run\",\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n },\n \"S5131\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQueryString\"\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\Server::write\",\n \"isMethodPrefix\": true,\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n }\n}", - "sonar.security.config.pythonsecurity": "", - "sonar.security.config.roslyn.sonaranalyzer.security.cs": "" - }, - "tests": { - "sonar.coverage.jacoco.xmlReportPaths": "", - "sonar.junit.reportPaths": "" - }, - "thirdParty": { - "sonar.dependencyCheck.htmlReportPath": "target/dependency-check-report.html", - "sonar.dependencyCheck.jsonReportPath": "target/dependency-check-report.json", - "sonar.dependencyCheck.securityHotspot": false, - "sonar.dependencyCheck.severity.blocker": 9.0, - "sonar.dependencyCheck.severity.critical": 7.0, - "sonar.dependencyCheck.severity.major": 4.0, - "sonar.dependencyCheck.severity.minor": 0.0, - "sonar.dependencyCheck.skip": false, - "sonar.dependencyChecks.summarize": false, - "sonar.dependencyCheck.xmlReportPath": "target/dependency-check-report.xml" - } - }, - "groups": { - "Bad - Empty Group": "", - "CI Tools": "", - "Executives": "People with access to Portfolios only", - "Language Experts": "", - "Security Auditors": "", - "Team 2 Developers": "Development Team for Languages", - "Team Developers": "Development Team for SonarQube", - "Team Project Admins": "", - "Team Tech Leads": "", - "comma,group": "comma, group", - "developers270": "GitLab Group", - "developers270/tech-leads": "GitLab group", - "sonar-administrators": "System administrators" - }, - "platform": { - "edition": "enterprise", - "serverId": "B40A8E55-AWOxMI1ddSkf4BExMG4d", - "version": "9.5.0" - }, - "portfolios": { - "All_projects": { - "description": "All projects of the platform", - "name": "All projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "REST", - "visibility": "private" - }, - "Bitbucket_Cloud_projects": { - "name": "Bitbucket Cloud projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionBranch": "Babar", - "projectSelectionMode": "TAGS", - "projectSelectionTags": "bb-cloud", - "visibility": "public" - }, - "Bitbucket_Server_projects": { - "name": "Bitbucket Server LATEST projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionMode": "TAGS", - "projectSelectionTags": "bbs-latest", - "visibility": "public" - }, - "GitHub_projects": { - "name": "GitHub projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionMode": "TAGS", - "projectSelectionTags": "github, gitlab", - "visibility": "public" - }, - "GitLab_projects": { - "name": "GitLab projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionMode": "TAGS", - "projectSelectionTags": "gitlab", - "visibility": "public" - }, - "PORTFOLIO-ALL": { - "description": "All bank and insurance projects", - "name": "Whole Bank and Insurance", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "NONE", - "subPortfolios": { - "Others": { - "byReference": false, - "name": "Others", - "projectSelectionMode": "REST" - }, - "PORTFOLIO-DIV-BANKING": { - "byReference": true - }, - "PORTFOLIO-DIV-INSURANCE": { - "byReference": true - } - }, - "visibility": "private" - }, - "PORTFOLIO-CORPORATE-BANKING": { - "description": "Corporate banking Projects", - "name": "Corporate banking projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionBranch": "foo", - "projectSelectionMode": "REGEXP", - "projectSelectionRegexp": "BANK-CORP-.*", - "visibility": "private" - }, - "PORTFOLIO-DIV-BANKING": { - "description": "All Banking division projects", - "name": "Banking division", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "MANUAL", - "projects": { - "WEBSITE-TIER1-WEB": "__default__", - "WEBSITE-TIER2-BIZLOGIC": "__default__", - "WEBSITE-TIER3-DBLAYER": "__default__", - "training:branches": "release-1.1, develop", - "demo:github-actions-cli": "non-existing-branch", - "non-existing-proj": "main" - }, - "subPortfolios": { - "PORTFOLIO-CORPORATE-BANKING": { - "byReference": true - }, - "PORTFOLIO-PRIVATE-BANKING": { - "byReference": true - }, - "PORTFOLIO-RETAIL-BANKING": { - "byReference": true - } - }, - "visibility": "private" - }, - "PORTFOLIO-DIV-INSURANCE": { - "description": "Insurance division projects", - "name": "Insurance division", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "REGEXP", - "projectSelectionRegexp": "INSURANCE-.*", - "visibility": "private" - }, - "PORTFOLIO-PRIVATE-BANKING": { - "description": "Private banking projects", - "name": "Private banking projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "REGEXP", - "projectSelectionRegexp": "BANK-PRIVATE-.*", - "visibility": "private" - }, - "PORTFOLIO-RETAIL-BANKING": { - "description": "Corporate banking Projects", - "name": "Retail banking projects", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin", - "sonar-users": "codeviewer, user" - } - }, - "projectSelectionMode": "MANUAL", - "projects": { - "BANK-RETAIL-401K": "__default__", - "BANK-RETAIL-CREDITRATING": "__default__", - "BANK-RETAIL-LOANSCALC": "__default__", - "BANK-RETAIL-MOBILEAPP": "__default__", - "nonexisting-proj": "__default__" - }, - "visibility": "private" - }, - "Portfolio_on_regexp_branch": { - "name": "Portfolio on regexp branch", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionBranch": "release-1.1", - "projectSelectionMode": "REGEXP", - "projectSelectionRegexp": "WEB.*", - "visibility": "public" - }, - "Top_Level": { - "description": "Hierarchy top down", - "name": "Top Level", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionMode": "NONE", - "subPortfolios": { - "Sub_Portfolio_1": { - "byReference": false, - "name": "Sub Portfolio 1", - "projectSelectionMode": "NONE", - "subPortfolios": { - "Sub_Sub_1_1": { - "byReference": false, - "name": "Sub Sub 1.1", - "projectSelectionMode": "MANUAL", - "projects": { - "demo:azdo-dotnetcore": "__default__", - "jenkins:audio-video-tools": "__default__", - "training:branches": "release-1.1" - } - }, - "Sub_Sub_1_2": { - "byReference": false, - "name": "Sub Sub 1.2", - "projectSelectionMode": "REGEXP", - "projectSelectionRegexp": "FOO.*" - } - } - }, - "Sub_Portfolio_2": { - "byReference": false, - "name": "Sub Portfolio 2", - "projectSelectionMode": "TAGS", - "projectSelectionTags": "gitlab" - } - }, - "visibility": "public" - }, - "bad:empty_portfolio": { - "name": "Empty portfolio", - "permissions": { - "groups": { - "sonar-administrators": "admin, issueadmin, securityhotspotadmin" - } - }, - "projectSelectionMode": "NONE", - "visibility": "public" - } - }, - "projects": { - "BANK-CORP-INVESTMENT": { - "name": "Corporate Banking - Investments", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "superbank.com" - }, - "BANK-CORP-MERGERS": { - "name": "Corporate Banking - Mergers and Acquisitions", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "superbank.com" - }, - "BANK-CORP-TAXOPTIMIZATION": { - "name": "Corporate Banking - Tax Optimization", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "superbank.com" - }, - "BANK-PRIVATE-ASSETMGT": { - "name": "Private Banking - Asset Management", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "BANK-PRIVATE-WEALTHMGT": { - "name": "Private Banking - Wealth Management", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "BANK-RETAIL-401K": { - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "http://redirect.sonarsource.com/plugins/ldap.html" - }, - { - "name": "ci", - "type": "ci", - "url": "https://travis-ci.org/SonarSource/sonar-ldap" - }, - { - "name": "issue", - "type": "issue", - "url": "http://jira.sonarsource.com/browse/LDAP" - }, - { - "name": "scm", - "type": "scm", - "url": "https://github.com/SonarSource/sonar-ldap" - } - ], - "name": "Retail Banking - 401K", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "BANK-RETAIL-CREDITRATING": { - "name": "Retail Banking - Credit Rating", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "BANK-RETAIL-LOANSCALC": { - "name": "Retail Banking - Loans Calculator", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "BANK-RETAIL-MOBILEAPP": { - "name": "Retail Banking - Mobile App", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "INSURANCE-LIFEINSURANCE": { - "name": "Insurance - Life Insurance", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "superbank.com" - }, - "WEBSITE-TIER1-WEB": { - "branches": { - "master": { - "isMain": true - }, - "release-1.1": { - "keepWhenInactive": true - }, - "release-2019.4": { - "keepWhenInactive": true - }, - "release-2020.1": { - "keepWhenInactive": true - } - }, - "name": "Bank Web Site: Tier 1 - Web Presentation", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "superbank.com" - }, - "WEBSITE-TIER2-BIZLOGIC": { - "branches": { - "master": { - "isMain": true - }, - "release-1.1": { - "keepWhenInactive": true - }, - "release-2019.4": { - "keepWhenInactive": true - }, - "release-2020.1": { - "keepWhenInactive": true - } - }, - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "http://redirect.sonarsource.com/plugins/ldap.html" - }, - { - "name": "ci", - "type": "ci", - "url": "https://travis-ci.org/SonarSource/sonar-ldap" - }, - { - "name": "issue", - "type": "issue", - "url": "http://jira.sonarsource.com/browse/LDAP" - }, - { - "name": "scm", - "type": "scm", - "url": "https://github.com/SonarSource/sonar-ldap" - } - ], - "name": "Bank Web Site: Tier 2 - Biz Logic", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "superbank.com" - }, - "WEBSITE-TIER3-DBLAYER": { - "branches": { - "master": { - "isMain": true - }, - "release-1.1": { - "keepWhenInactive": true - }, - "release-2019.4": { - "keepWhenInactive": true - }, - "release-2020.1": { - "keepWhenInactive": true - } - }, - "name": "Bank Web Site: Tier 3 - DB Layer", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way ++", - "tags": "superbank.com" - }, - "bad-project-for-audit-1": { - "branches": { - "branch,with+%funny name": {}, - "branch-with-no-code": {}, - "master": { - "isMain": true - } - }, - "name": "Bad project for audit 1", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "bad:never-analyzed": { - "name": "Bad - Never analyzed", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:azdo-cli": { - "binding": { - "key": "Azure DevOps Services", - "repository": "demo-cli", - "slug": "demo-azdo" - }, - "name": "Azure DevOps / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:azdo-dotnet": { - "name": "AzDO .Net Core project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "azure-devops, dotnetcore, dotnet" - }, - "demo:azdo-dotnetcore": { - "binding": { - "key": "Azure DevOps Services", - "repository": "demo-dotnet", - "slug": "demo-azdo" - }, - "name": "AzDO / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:azdo-dotnetfwk": { - "binding": { - "key": "Azure DevOps Services", - "repository": "demo-dotnet-fwk", - "slug": "demo-azdo" - }, - "name": "AzDO .Net Framework project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:azdo-gradle": { - "name": "AzDO / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o Hotspots" - }, - "demo:azdo-maven": { - "binding": { - "key": "Azure DevOps Services", - "repository": "demo-maven", - "slug": "demo-azdo" - }, - "name": "Azure DevOps / Maven", - "permissions": { - "groups": { - "CI Tools": "codeviewer, scan, user", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, scan, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityProfiles": { - "xml": "Olivier" - } - }, - "demo:azdo-mono-cli": { - "binding": { - "key": "Azure DevOps Services", - "monorepo": true, - "repository": "demo-monorepo", - "slug": "demo-azdo" - }, - "name": "AzDO / monorepo / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o Hotspots", - "sonare.coverage.exclusions": "**/gen/**/*, **/*DAO.java", - "sonar.cpd.exclusions": "**/gen/**/*, **/*DAO.java", - "sonar.exclusions": "**/node_modules/**/*, fileThatCrashesAnalysis.cs", - "sonar.issue.ignore.allfile": [ - { - "fileRegexp": ".*Generated by MotoGen v2\\.1.*" - } - ], - "tags": "azure-devops, cli, e-banking" - }, - "demo:azdo-mono-dotnet": { - "binding": { - "key": "Azure DevOps Services", - "monorepo": true, - "repository": "demo-monorepo", - "slug": "demo-azdo" - }, - "name": "AzDO / Monorepo / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:azdo-mono-gradle": { - "binding": { - "key": "Azure DevOps Services", - "monorepo": true, - "repository": "demo-monorepo", - "slug": "demo-azdo" - }, - "name": "AzDO / Monorepo / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o Hotspots" - }, - "demo:azdo-mono-maven": { - "binding": { - "key": "Azure DevOps Services", - "monorepo": true, - "repository": "demo-monorepo", - "slug": "demo-azdo" - }, - "name": "AzDo / Monorepo / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o Hotspots", - "tags": "strategic" - }, - "demo:azdo-project": { - "name": "AzDO project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:bbc-dotnet": { - "binding": { - "key": "Bitbucket Cloud", - "monorepo": true, - "repository": "bb-pr-demo" - }, - "name": "Bitbucket Cloud project - .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bb-cloud, dotnetcore" - }, - "demo:bbc-gradle": { - "binding": { - "key": "Bitbucket Cloud", - "monorepo": true, - "repository": "bb-pr-demo" - }, - "name": "Bitbucket Cloud project - Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bb-cloud, gradle" - }, - "demo:bbc-maven": { - "binding": { - "key": "Bitbucket Cloud", - "monorepo": true, - "repository": "bb-pr-demo" - }, - "name": "Bitbucket Cloud project - Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bb-cloud, maven" - }, - "demo:bbc-pipes-cli": { - "binding": { - "key": "Bitbucket Cloud", - "repository": "demo-cli" - }, - "name": "BB Cloud / Pipes / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:bbc-proj": { - "binding": { - "key": "Bitbucket Cloud", - "monorepo": true, - "repository": "bb-pr-demo" - }, - "name": "Bitbucket Cloud project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bb-cloud, cli" - }, - "demo:bbcloud-pipes-dotnet": { - "binding": { - "key": "Bitbucket Cloud", - "repository": "demo-dotnet" - }, - "name": "BBCloud / Pipes / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:bbcloud-pipes-gradle": { - "binding": { - "key": "Bitbucket Cloud", - "repository": "demo-gradle" - }, - "name": "BBCloud / Pipes / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:bbcloud-pipes-maven": { - "binding": { - "key": "Bitbucket Cloud", - "repository": "demo-maven" - }, - "name": "BBCloud / Pipes / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:bbs-latest-cli": { - "binding": { - "key": "BitBucket Server LATEST", - "repository": "OK", - "slug": "demo-cli" - }, - "name": "Bitbucket Latest / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "bitbucket" - }, - "demo:bbs-latest-dotnet": { - "binding": { - "key": "BitBucket Server LATEST", - "repository": "OK", - "slug": "demo-dotnet" - }, - "name": "Bitbucket Latest / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-latest, dotnetcore" - }, - "demo:bbs-latest-gradle": { - "binding": { - "key": "BitBucket Server LATEST", - "repository": "OK", - "slug": "demo-gradle" - }, - "name": "Bitbucket Latest / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-latest, gradle" - }, - "demo:bbs-latest-maven": { - "binding": { - "key": "BitBucket Server LATEST", - "repository": "OK", - "slug": "demo-maven" - }, - "name": "Bitbucket Latest project - Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-latest, maven" - }, - "demo:bbs-latest-mono-cli": { - "binding": { - "key": "BitBucket Server LATEST", - "monorepo": true, - "repository": "OK", - "slug": "demo-monorepo" - }, - "name": "Bitbucket Latest / Mono / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "bbs-latest, cli, monorepo" - }, - "demo:bbs-latest-mono-dotnet": { - "binding": { - "key": "BitBucket Server LATEST", - "monorepo": true, - "repository": "OK", - "slug": "demo-monorepo" - }, - "name": "Bitbucket Latest / Mono / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "bbs-latest, dotnet, dotnetcore" - }, - "demo:bbs-latest-mono-gradle": { - "binding": { - "key": "BitBucket Server LATEST", - "monorepo": true, - "repository": "OK", - "slug": "demo-monorepo" - }, - "name": "Bitbucket Latest / Mono / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "bbs-latest, gradle, monorepo" - }, - "demo:bbs-latest-mono-maven": { - "binding": { - "key": "BitBucket Server LATEST", - "monorepo": true, - "repository": "OK", - "slug": "demo-monorepo" - }, - "name": "Bitbucket Latest / Mono / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "bbs-latest, monorepo, maven" - }, - "demo:bbs-lts-dotnet": { - "binding": { - "key": "Bitbucket Server LTS", - "monorepo": true, - "repository": "SERVICES", - "slug": "oko-bitbucket-demo" - }, - "name": "Bitbucket LTS project - .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-lts, dotnetcore" - }, - "demo:bbs-lts-gradle": { - "binding": { - "key": "Bitbucket Server LTS", - "monorepo": true, - "repository": "SERVICES", - "slug": "oko-bitbucket-demo" - }, - "name": "Bitbucket LTS project - Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-lts, gradle" - }, - "demo:bbs-lts-maven": { - "binding": { - "key": "Bitbucket Server LTS", - "monorepo": true, - "repository": "SERVICES", - "slug": "oko-bitbucket-demo" - }, - "name": "Bitbucket LTS project - Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-lts, maven" - }, - "demo:bbs-lts-proj": { - "binding": { - "key": "Bitbucket Server LTS", - "monorepo": true, - "repository": "SERVICES", - "slug": "oko-bitbucket-demo" - }, - "name": "Bitbucket LTS project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "bbs-lts, cli" - }, - "demo:conditional-compilation": { - "branches": { - "branch-noscreen": {}, - "master": { - "isMain": true - } - }, - "name": "C/C++ Conditional Compilation", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:github-actions-cli": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-actions-cli", - "summaryCommentEnabled": true - }, - "branches": { - "main": { - "isMain": true, - "newCode": "NUMBER_OF_DAYS = 180" - } - }, - "name": "GitHub / Actions / CLI", - "newCodePeriod": "REFERENCE_BRANCH = main", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, actions, github-actions, cli" - }, - "demo:github-actions-dotnet": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-actions-dotnet", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way ++", - "sonar.security.config.phpsecurity": "{\n \"S3649\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQueryNEW\"\n }\n ],\n \"passthroughs\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\RawUrl::RawUrl\",\n \"isWhitelist\": true,\n \"args\": [\n 1\n ]\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"mysql_query\",\n \"args\": [\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::execute\",\n \"isMethodPrefix\": true,\n \"args\": [\n 0,\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::run\",\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n },\n \"S5131\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQueryString\"\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\Server::write\",\n \"isMethodPrefix\": true,\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n }\n}", - "tags": "github, actions, github-actions, dotnet" - }, - "demo:github-actions-gradle": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-actions-gradle", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, actions, github-actions, gradle" - }, - "demo:github-actions-maven": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-actions-maven", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, actions, github-actions, maven" - }, - "demo:github-actions-mono-cli": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/pr-demo", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / monorepo CLI", - "newCodePeriod": "NUMBER_OF_DAYS = 30", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "qualityProfiles": { - "py": "Python Olivier" - }, - "tags": "github, actions, github-actions, monorepo, cli" - }, - "demo:github-actions-mono-dotnet": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/pr-demo", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / monorepo .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "github, actions, github-actions, monorepo, dotnet" - }, - "demo:github-actions-mono-gradle": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/pr-demo", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / monorepo Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "github, actions, github-actions, monorepo, gradle" - }, - "demo:github-actions-mono-maven": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/pr-demo", - "summaryCommentEnabled": true - }, - "name": "GitHub / Actions / monorepo Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "qualityProfiles": { - "java": "Olivier way" - }, - "tags": "github, actions, github-actions, monorepo, maven" - }, - "demo:github-jenkins-cli": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-cli-jenkins", - "summaryCommentEnabled": true - }, - "branches": { - "main": { - "isMain": true - }, - "python-version-and-tags": {} - }, - "name": "GitHub / Jenkins / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, cli" - }, - "demo:github-jenkins-dotnet": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-dotnet-jenkins", - "summaryCommentEnabled": true - }, - "name": "GitHub / Jenkins / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, dotnetcore, jenkins" - }, - "demo:github-jenkins-gradle": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-gradle-jenkins", - "summaryCommentEnabled": true - }, - "branches": { - "add-tags": {}, - "fix-project-name": {}, - "main": { - "isMain": true - } - }, - "name": "GitHub / Jenkins / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, gradle" - }, - "demo:github-jenkins-maven": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/demo-maven-jenkins", - "summaryCommentEnabled": true - }, - "branches": { - "fix-project-name": {}, - "hardening": {}, - "main": { - "isMain": true - }, - "project-tags": {}, - "some-branch": {} - }, - "name": "GitHub / Jenkins / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, maven, strategic" - }, - "demo:github-mono-jenkins-cli": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/demo-jenkins", - "summaryCommentEnabled": true - }, - "branches": { - "fix-dotnet-core-project-name": {}, - "main": { - "isMain": true - }, - "set-tags-on-the-fly-2": {} - }, - "name": "GitHub / Jenkins / monorepo CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, monorepo, cli" - }, - "demo:github-mono-jenkins-dotnet": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/demo-mono-jenkins", - "summaryCommentEnabled": true - }, - "name": "GitHub / Jenkins / monorepo .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, monorepo, dotnet" - }, - "demo:github-mono-jenkins-gradle": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/demo-jenkins", - "summaryCommentEnabled": true - }, - "branches": { - "fix-dotnet-core-project-name": {}, - "main": { - "isMain": true - }, - "set-tags-on-the-fly-2": {} - }, - "name": "GitHub / Jenkins / monorepo Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, monorepo, gradle" - }, - "demo:github-mono-jenkins-maven": { - "binding": { - "key": "GitHub.com", - "monorepo": true, - "repository": "okorach/demo-jenkins", - "summaryCommentEnabled": true - }, - "name": "GitHub / Jenkins / monorepo Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "github, jenkins, monorepo, maven" - }, - "demo:gitlab-ci-cli": { - "binding": { - "key": "GitLab.com", - "repository": "30584574" - }, - "name": "GitLab-CI / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "cli, gitlab, gitlab-ci" - }, - "demo:gitlab-ci-dotnet": { - "binding": { - "key": "GitLab.com", - "repository": "30602626" - }, - "name": "GitLab-CI / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:gitlab-ci-gradle": { - "binding": { - "key": "GitLab.com", - "repository": "30602438" - }, - "branches": { - "main": { - "keepWhenInactive": true - }, - "master": { - "isMain": true - } - }, - "name": "GitLab-CI / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage" - }, - "demo:gitlab-ci-maven": { - "binding": { - "key": "GitLab.com", - "repository": "30599779" - }, - "branches": { - "main": { - "isMain": true - }, - "release-1.x": { - "keepWhenInactive": true - } - }, - "name": "GitLab-CI / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, gitlab-ci, maven, team-xxx" - }, - "demo:gitlab-ci-mono-cli": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "15953220" - }, - "name": "GitLab-CI / monorepo CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, cli" - }, - "demo:gitlab-ci-mono-dotnet": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "15953220" - }, - "name": "GitLab-CI / monorepo .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "sonar.exclusions": [ - "gen/**/*", - "full/path/to/file/particular,file.cs", - "**/gen*.cs" - ], - "tags": "gitlab, dotnetcore" - }, - "demo:gitlab-ci-mono-gradle": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "15953220" - }, - "name": "GitLab-CI / monorepo Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, gradle" - }, - "demo:gitlab-ci-mono-maven": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "15953220" - }, - "name": "GitLab-CI / monorepo Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, maven" - }, - "demo:gitlab-jenkins-cli": { - "binding": { - "key": "GitLab.com", - "repository": "30452092" - }, - "name": "GitLab / Jenkins / CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, cli" - }, - "demo:gitlab-jenkins-dotnet": { - "binding": { - "key": "GitLab.com", - "repository": "30605543" - }, - "name": "GitLab / Jenkins / .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, dotnet" - }, - "demo:gitlab-jenkins-gradle": { - "binding": { - "key": "GitLab.com", - "repository": "30453671" - }, - "name": "GitLab / Jenkins / Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, gradle" - }, - "demo:gitlab-jenkins-maven": { - "binding": { - "key": "GitLab.com", - "repository": "30452699" - }, - "name": "GitLab / Jenkins / Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, maven" - }, - "demo:gitlab-jenkins-mono-gradle": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "30439654" - }, - "name": "GitLab / Jenkins / monorepo Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, monorepo, jenkins" - }, - "demo:gitlab-jenkins-mono-maven": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "30439654" - }, - "name": "GitLab / Jenkins / monorepo Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, monorepo, maven" - }, - "demo:gitlab-mono-jenkins-cli": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "30439654" - }, - "name": "GitLab / Jenkins / monorepo CLI", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, monorepo, cli" - }, - "demo:gitlab-mono-jenkins-dotnet": { - "binding": { - "key": "GitLab.com", - "monorepo": true, - "repository": "30439654" - }, - "name": "GitLab / Jenkins / monorepo .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "gitlab, jenkins, monorepo, dotnet" - }, - "demo:joomla": { - "name": "Demo: Joomla", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "strategic" - }, - "demo:pull-request-bbs-jenkins": { - "branches": { - "feature/hb": {}, - "master": { - "isMain": true - } - }, - "name": "Demo: Pull Request Bitbucket Server from Jenkins", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "demo:sca-log4shell-detect-gradle": { - "name": "SCA demo - log4shell detect - Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "demo:sca-log4shell-detect-maven": { - "branches": { - "dont-use-log4j": {}, - "fix-log4j-dependency": {}, - "master": { - "isMain": true - } - }, - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "http://www.example.com" - } - ], - "name": "SCA demo - Log4shell detect - Maven", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "jenkins-pipeline:sonar-java": { - "branches": { - "master": { - "isMain": true, - "newCode": "SPECIFIC_ANALYSIS = 2021-01-18T14:45:21+0100" - } - }, - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "http://redirect.sonarsource.com/plugins/java.html" - }, - { - "name": "ci", - "type": "ci", - "url": "https://travis-ci.org/SonarSource/sonar-java" - }, - { - "name": "issue", - "type": "issue", - "url": "https://jira.sonarsource.com/browse/SONARJAVA" - }, - { - "name": "scm", - "type": "scm", - "url": "https://github.com/SonarSource/sonar-java" - }, - { - "name": "ci", - "type": "ci", - "url": "https://foo.com" - }, - { - "name": "ci", - "type": "ci", - "url": "https://travis-ci.org/SonarSource/sonar-java" - } - ], - "name": "SonarJava - Jenkins", - "newCodePeriod": "PREVIOUS_VERSION", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "tags": "jenkins-pipeline, jenkins" - }, - "jenkins:audio-video-tools": { - "branches": { - "master": { - "isMain": true, - "newCode": "NUMBER_OF_DAYS = 60" - } - }, - "name": "Audio Video tools - Jenkins", - "newCodePeriod": "NUMBER_OF_DAYS = 90", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "jenkins, jenkins-classic, olivier, accenture" - }, - "jenkins:cpp-sample": { - "name": "C++ example - Jenkins", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "sonar.coverage.exclusions": "**/*.json, fileThatWillNeverHaveTests.cs", - "sonar.exclusions": "**/*DAO.java, vendor/**/*", - "tags": "jenkins, norway, strategic" - }, - "jenkins:dotnet": { - "name": "Demo: Dotnet Core with Jenkins", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "sonar.issue.ignore.multicriteria": [ - { - "resourceKey": "**/MyMockDirectory/*", - "ruleKey": "csharpsquid:S2068" - } - ] - }, - "jenkins:excel-tools": { - "name": "Excel tools - Jenkins", - "newCodePeriod": "NUMBER_OF_DAYS = 90", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "strategic" - }, - "jenkins:sonarqube-tools": { - "name": "SonarQube Tools - Jenkins", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "tags": "jenkins, jenkins-classic, olivier" - }, - "no_code_1": { - "name": "Project with no code 1", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "no_code_2": { - "branches": { - "empty-branch": {}, - "master": { - "isMain": true - } - }, - "name": "Project with no code 2", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "okorach_audio-video-tools": { - "branches": { - "master": { - "isMain": true, - "newCode": "PREVIOUS_VERSION" - } - }, - "name": "Audio Video Tools", - "newCodePeriod": "REFERENCE_BRANCH = master", - "permissions": { - "groups": { - "CI Tools": "codeviewer, scan, user", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "okorach_my-new-project_AYCEP_vD3j4BmzJz7Tx_": { - "binding": { - "key": "GitLab.com", - "repository": "30576180" - }, - "name": "My new project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "okorach_sonar-tools": { - "branches": { - "master": { - "isMain": true - }, - "release-1.8.1": { - "keepWhenInactive": true - }, - "release-1.8.x": { - "keepWhenInactive": true - } - }, - "name": "Sonar Tools", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage" - }, - "only_provisioned": { - "name": "Only provisioned project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "org.owasp:benchmark": { - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "https://www.owasp.org/index.php/Benchmark" - } - ], - "name": "OWASP Benchmark Project", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way ++", - "tags": "strategic" - }, - "prov": { - "name": "prov", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "source": { - "name": "source", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "target": { - "name": "target", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "test-ublox": { - "name": "test-ublox", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "test:bbs-latest-cli": { - "name": "Test: Bitbucket Latest", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "test:bbs-latest-dotnet": { - "name": "Test: Bitbucket Latest - .Net Core", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "test:bbs-latest-gradle": { - "name": "Test: Bitbucket Latest - Gradle", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - } - }, - "test:bbs-latest-maven": { - "binding": { - "key": "BitBucket Server LATEST", - "monorepo": true, - "repository": "OK", - "slug": "test-bbs-latest" - }, - "branches": { - "even-longer": {}, - "longer-file": {}, - "longer-file-2": {}, - "master": { - "isMain": true, - "newCode": "NUMBER_OF_DAYS = 30" - } - }, - "name": "Test: Bitbucket Latest - Maven", - "newCodePeriod": "REFERENCE_BRANCH = master", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "qualityGate": "Sonar way w/o coverage", - "qualityProfiles": { - "java": "All rules" - } - }, - "test:cpp-symbolic-exec": { - "name": "Test: C++ symbolic execution", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "test_cobol": { - "name": "test_cobol", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team Developers": "codeviewer, user", - "Team Project Admins": "admin, codeviewer, user", - "Team Tech Leads": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "webhooks": { - "JenkinsWeb": { - "secret": "tagada", - "url": "http://foo.fr" - } - } - }, - "training:branches": { - "branches": { - "break": { - "newCode": "REFERENCE_BRANCH = release-1.1" - }, - "develop": { - "isMain": true, - "newCode": "SPECIFIC_ANALYSIS = 2018-06-01T00:00:00+0200" - }, - "release-1.1": { - "keepWhenInactive": true, - "newCode": "REFERENCE_BRANCH = develop" - } - }, - "name": "Training: Branches", - "newCodePeriod": "PREVIOUS_VERSION", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "codeviewer, issueadmin, securityhotspotadmin, user" - } - }, - "sonar.androidLint.reportPaths": "data/android.xml", - "sonar.test.exclusions": "**/test_file_to_exclude.*" - }, - "training:complexity": { - "name": "Training: Cyclomatic vs Cognitive complexity", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "qualityProfiles": { - "java": "Corp way" - } - }, - "training:coverage": { - "branches": { - "issue-on-test-files": {}, - "master": { - "isMain": true - }, - "partial-coverage": {}, - "partial-coverage-2": {} - }, - "name": "Training: Coverage", - "permissions": { - "groups": { - "sonar-administrators": "admin, codeviewer, issueadmin, scan, securityhotspotadmin, user" - } - } - }, - "training:cpp-scan": { - "name": "Training: C++ scan with build wrapper", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - }, - "tags": "accenture" - }, - "training:external-issues": { - "name": "Training: External issues import", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "training:no-scm": { - "name": "Training: Metrics on new code without SCM", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "training:security": { - "links": [ - { - "name": "homepage", - "type": "homepage", - "url": "http://maven.apache.org" - } - ], - "name": "Training: Security", - "permissions": { - "groups": { - "CI Tools": "scan", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - } - } - }, - "vcb-web": { - "binding": { - "key": "GitHub.com", - "repository": "okorach/volley-bridoire-web", - "summaryCommentEnabled": true - }, - "branches": { - "fix-issues": {}, - "master": { - "isMain": true - }, - "new-branch": {}, - "prep-new-21-22-season": {} - }, - "name": "Volley Bridoire Web", - "permissions": { - "groups": { - "CI Tools": "scan", - "Executives": "admin", - "Language Experts": "admin", - "Security Auditors": "codeviewer, issueadmin, securityhotspotadmin, user", - "Team 2 Developers": "admin, user", - "Team Developers": "admin, user", - "sonar-administrators": "admin, codeviewer, issueadmin, securityhotspotadmin, user", - "sonar-users": "user" - }, - "users": { - "admin": "admin", - "cameron": "admin", - "jack": "issueadmin, securityhotspotadmin, user", - "joe": "admin, user", - "olivier-korach91357": "admin", - "olivierk": "codeviewer", - "sylvain": "admin", - "yacine": "codeviewer, issueadmin" - } - }, - "qualityProfiles": { - "php": "PHP Olivier" - }, - "sonar.androidLint.reportPaths": "foo/androidLint.xml", - "sonar.dbcleaner.branchesToKeepWhenInactive": "master, develop, trunk, release-.*, main, release-[\\d.].*", - "sonar.dbcleaner.weeksBeforeKeepingOnlyOneSnapshotByMonth": 26, - "sonar.dependencyCheck.skip": true, - "sonar.developerAggregatedInfo.disabled": true, - "sonar.exclusions": "**/*.notcode", - "sonar.governance.report.project.branch.frequency": "Weekly", - "sonar.issues.defaultAssigneeLogin": "sylvain", - "sonar.php.file.suffixes": "php, php3, php4, php5, phtml, inc, phpx", - "sonar.scm.provider": "git", - "sonar.security.config.phpsecurity": "{\n \"S3649\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQuery\"\n }\n ],\n \"passthroughs\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\RawUrl::RawUrl\",\n \"isWhitelist\": true,\n \"args\": [\n 1\n ]\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"mysql_query\",\n \"args\": [\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::execute\",\n \"isMethodPrefix\": true,\n \"args\": [\n 0,\n 1\n ]\n },\n {\n \"methodId\": \"My\\\\Namespace\\\\SqlStatement::run\",\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n },\n \"S5131\": {\n \"sources\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\ServerRequest::getQueryString\"\n }\n ],\n \"sinks\": [\n {\n \"methodId\": \"My\\\\Namespace\\\\ClassName\\\\Server::write\",\n \"isMethodPrefix\": true,\n \"interval\": {\n \"fromIndex\": 1\n }\n }\n ]\n }\n}", - "tags": "slovakia" - } - }, - "qualityGates": { - "Sonar way": { - "isBuiltIn": true - }, - "Sonar way ++": { - "conditions": [ - "new_security_rating >= A", - "new_reliability_rating >= A", - "new_maintainability_rating >= A", - "new_duplicated_lines_density >= 2", - "new_coverage <= 85", - "new_security_hotspots_reviewed <= 100", - "security_rating >= C", - "reliability_rating >= D" - ], - "isDefault": true, - "permissions": { - "groups": "sonar-users", - "users": "yacine" - } - }, - "Sonar way w/o Hotspots": { - "conditions": [ - "new_security_rating >= A", - "new_reliability_rating >= A", - "new_maintainability_rating >= A", - "new_coverage <= 80", - "new_duplicated_lines_density >= 3" - ], - "permissions": { - "users": "sylvain" - } - }, - "Sonar way w/o coverage": { - "conditions": [ - "new_security_rating >= A", - "new_reliability_rating >= A", - "new_maintainability_rating >= A", - "new_duplicated_lines_density >= 3", - "new_security_hotspots_reviewed <= 100" - ], - "permissions": { - "users": "cameron" - } - } - }, - "qualityProfiles": { - "abap": { - "Sonar way": { - "children": { - "My way": { - "isDefault": true, - "permissions": { - "groups": "Team Tech Leads" - }, - "rules": { - "abap:S100": { - "modified": true, - "params": { - "format": "^([A-Z_]*|[a-z0-9_]*)$" - } - }, - "abap:S1639": "BLOCKER", - "abap:S1668": "MAJOR", - "abap:S1672": "MAJOR", - "abap:S2237": "MAJOR", - "abap:S2241": "MINOR", - "abap:S3923": { - "modified": true, - "severity": "BLOCKER" - }, - "common-abap:FailedUnitTests": "MAJOR" - } - } - }, - "isBuiltIn": true - } - }, - "apex": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "c": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "cloudformation": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "cobol": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "cpp": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "cs": { - "Sonar way": { - "children": { - "Unused QP": { - "permissions": { - "users": "simon.brandhof@sonarsource.com" - }, - "rules": { - "common-cs:InsufficientLineCoverage": { - "params": { - "minimumLineCoverageRatio": "65" - }, - "severity": "MAJOR" - }, - "csharpsquid:S3962": "MINOR", - "csharpsquid:S881": "MAJOR" - } - } - }, - "isBuiltIn": true, - "isDefault": true - } - }, - "css": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "flex": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "go": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "java": { - "Sonar way": { - "children": { - "All rules": { - "rules": { - "common-java:FailedUnitTests": "MAJOR", - "common-java:InsufficientBranchCoverage": { - "params": { - "minimumBranchCoverageRatio": "65" - }, - "severity": "MAJOR" - }, - "common-java:InsufficientCommentDensity": { - "params": { - "minimumCommentDensity": "25" - }, - "severity": "MAJOR" - }, - "common-java:InsufficientLineCoverage": { - "params": { - "minimumLineCoverageRatio": "65" - }, - "severity": "MAJOR" - }, - "common-java:SkippedUnitTests": "MAJOR", - "java:Don_t_use_hacker_classes": "INFO", - "java:Don_t_use_offensive_words_in_comments": "MAJOR", - "java:Don_t_use_sun_classes": "INFO", - "java:NoSonar": "MAJOR", - "java:S103": { - "params": { - "maximumLineLength": "120" - }, - "severity": "MAJOR" - }, - "java:S104": { - "params": { - "Max": "750" - }, - "severity": "MAJOR" - }, - "java:S105": "MINOR", - "java:S1067": { - "params": { - "max": "3" - }, - "severity": "CRITICAL" - }, - "java:S109": { - "params": { - "Authorized numbers": "-1,0,1" - }, - "severity": "MAJOR" - }, - "java:S1105": "MINOR", - "java:S1106": "MINOR", - "java:S1107": "MINOR", - "java:S1108": "MINOR", - "java:S1109": "MINOR", - "java:S1120": { - "params": { - "indentationLevel": "2" - }, - "severity": "MINOR" - }, - "java:S113": "MINOR", - "java:S1132": "MINOR", - "java:S1142": { - "params": { - "max": "3" - }, - "severity": "MAJOR" - }, - "java:S1147": "BLOCKER", - "java:S1151": { - "params": { - "max": "5" - }, - "severity": "MAJOR" - }, - "java:S1160": "MAJOR", - "java:S1162": "MAJOR", - "java:S1166": { - "params": { - "exceptions": "java.lang.InterruptedException, java.lang.NumberFormatException, java.lang.NoSuchMethodException, java.text.ParseException, java.net.MalformedURLException, java.time.format.DateTimeParseException" - }, - "severity": "MAJOR" - }, - "java:S1176": { - "params": { - "exclusion": "**.internal.**", - "forClasses": "**.api.**" - }, - "severity": "MAJOR" - }, - "java:S118": { - "params": { - "format": "^Abstract[A-Z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "java:S1188": { - "params": { - "Max": "20" - }, - "severity": "MAJOR" - }, - "java:S1194": "MAJOR", - "java:S1200": { - "params": { - "max": "20" - }, - "severity": "MAJOR" - }, - "java:S121": "CRITICAL", - "java:S1213": "MINOR", - "java:S122": "MAJOR", - "java:S1228": "MINOR", - "java:S1244": "MAJOR", - "java:S1258": "MAJOR", - "java:S126": "CRITICAL", - "java:S1309": { - "params": { - "listOfWarnings": "" - }, - "severity": "INFO" - }, - "java:S1310": "MINOR", - "java:S1312": { - "params": { - "format": "LOG(?:GER)?" - }, - "severity": "MINOR" - }, - "java:S1314": "BLOCKER", - "java:S1315": "MINOR", - "java:S134": { - "params": { - "max": "3" - }, - "severity": "CRITICAL" - }, - "java:S138": { - "params": { - "max": "75" - }, - "severity": "MAJOR" - }, - "java:S139": { - "params": { - "legalTrailingCommentPattern": "^\\s*+[^\\s]++$" - }, - "severity": "MINOR" - }, - "java:S1448": { - "params": { - "countNonpublicMethods": "true", - "maximumMethodThreshold": "35" - }, - "severity": "MAJOR" - }, - "java:S1449": "MINOR", - "java:S1451": { - "params": { - "headerFormat": "", - "isRegularExpression": "false" - }, - "severity": "BLOCKER" - }, - "java:S1541": { - "params": { - "Threshold": "10" - }, - "severity": "CRITICAL" - }, - "java:S1641": "MINOR", - "java:S1694": "MINOR", - "java:S1695": "MAJOR", - "java:S1696": "MAJOR", - "java:S1698": "MINOR", - "java:S1699": "CRITICAL", - "java:S1711": "MAJOR", - "java:S1774": "MAJOR", - "java:S1820": { - "params": { - "countNonpublicFields": "true", - "maximumFieldThreshold": "20" - }, - "severity": "MAJOR" - }, - "java:S1821": "CRITICAL", - "java:S1939": "MINOR", - "java:S1941": "MINOR", - "java:S1942": "MINOR", - "java:S1943": "MINOR", - "java:S1996": "MAJOR", - "java:S2039": "MINOR", - "java:S2047": "MAJOR", - "java:S2057": "CRITICAL", - "java:S2059": "MINOR", - "java:S2063": "CRITICAL", - "java:S2096": "BLOCKER", - "java:S2141": "MAJOR", - "java:S2143": "MAJOR", - "java:S2148": "MINOR", - "java:S2156": "MINOR", - "java:S2162": "MINOR", - "java:S2164": "MINOR", - "java:S2196": "MINOR", - "java:S2197": "CRITICAL", - "java:S2203": "MINOR", - "java:S2208": "CRITICAL", - "java:S2211": "MAJOR", - "java:S2221": "MINOR", - "java:S2250": "MINOR", - "java:S2260": "MAJOR", - "java:S2301": "MAJOR", - "java:S2308": "MAJOR", - "java:S2309": "MINOR", - "java:S2325": "MINOR", - "java:S2333": "MINOR", - "java:S2384": "MINOR", - "java:S2444": "CRITICAL", - "java:S2658": "CRITICAL", - "java:S2693": "BLOCKER", - "java:S2694": "MAJOR", - "java:S2698": "MINOR", - "java:S2701": "MINOR", - "java:S2959": "MINOR", - "java:S2972": { - "params": { - "Max": "25" - }, - "severity": "MAJOR" - }, - "java:S2973": "MAJOR", - "java:S2974": "MINOR", - "java:S3030": { - "params": { - "threshold": "4" - }, - "severity": "MAJOR" - }, - "java:S3032": "MINOR", - "java:S3047": "MINOR", - "java:S3052": "MINOR", - "java:S3242": "MINOR", - "java:S3254": "MINOR", - "java:S3306": "MAJOR", - "java:S3366": "MAJOR", - "java:S3414": "MAJOR", - "java:S3437": "MINOR", - "java:S3553": "MAJOR", - "java:S3578": { - "params": { - "format": "^test[A-Z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "java:S3658": "MINOR", - "java:S3725": "MAJOR", - "java:S3749": { - "params": { - "customInjectionAnnotations": "" - }, - "severity": "CRITICAL" - }, - "java:S3750": "MAJOR", - "java:S3937": "CRITICAL", - "java:S4174": { - "params": { - "format": "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$" - }, - "severity": "MINOR" - }, - "java:S4248": "MAJOR", - "java:S4266": "MINOR", - "java:S4288": "MAJOR", - "java:S4551": "MAJOR", - "java:S4604": "MAJOR", - "java:S4605": "CRITICAL", - "java:S4926": "MINOR", - "java:S5128": "CRITICAL", - "java:S5194": "MINOR", - "java:S5612": { - "params": { - "Max": "10" - }, - "severity": "MAJOR" - }, - "java:S5793": "INFO", - "java:S5867": "MINOR", - "java:S5970": "MAJOR", - "java:S5977": "MAJOR", - "java:S5979": "BLOCKER", - "java:S6073": "MAJOR", - "java:S6212": "INFO", - "java:S818": { - "params": { - "checkOnlyLong": "false" - }, - "severity": "MINOR" - }, - "java:S864": "MAJOR", - "java:S881": "MAJOR", - "java:S888": "CRITICAL", - "java:S923": "INFO", - "xml:AssertJ_3_4_1_forbidden": { - "severity": "MAJOR" - }, - "xml:Forbidden_to_use_Jackson_5_11": { - "severity": "MAJOR" - }, - "xml:S3282": { - "severity": "BLOCKER" - }, - "xml:S3373": { - "severity": "MINOR" - }, - "xml:S3419": { - "severity": "MINOR" - }, - "xml:S3420": { - "severity": "MINOR" - }, - "xml:S3423": { - "severity": "MINOR" - } - } - }, - "All security rules": { - "rules": { - "java:Don_t_use_hacker_classes": "INFO", - "java:S2039": "MINOR", - "java:S2077": { - "modified": true, - "severity": "CRITICAL" - }, - "java:S2384": "MINOR", - "java:S2658": "CRITICAL", - "java:S3011": { - "modified": true, - "severity": "CRITICAL" - }, - "java:S3330": { - "modified": true, - "severity": "CRITICAL" - }, - "java:S3725": "MAJOR", - "java:S3749": { - "params": { - "customInjectionAnnotations": "" - }, - "severity": "CRITICAL" - }, - "java:S3751": { - "modified": true, - "severity": "BLOCKER" - }, - "java:S3752": { - "modified": true, - "severity": "BLOCKER" - }, - "java:S4423": { - "modified": true, - "severity": "MAJOR" - }, - "java:S4426": { - "modified": true, - "severity": "BLOCKER" - }, - "java:S4434": { - "modified": true, - "severity": "BLOCKER" - }, - "java:S4507": { - "modified": true, - "severity": "CRITICAL" - }, - "java:S5542": { - "modified": true, - "severity": "BLOCKER" - } - } - }, - "Corp way": { - "children": { - "Corp Android Apps": { - "permissions": { - "users": "simon.brandhof@sonarsource.com" - }, - "rules": { - "java:S1108": "MINOR", - "java:S1166": { - "params": { - "exceptions": "java.lang.InterruptedException, java.lang.NumberFormatException, java.lang.NoSuchMethodException, java.text.ParseException, java.net.MalformedURLException, java.time.format.DateTimeParseException" - }, - "severity": "MAJOR" - }, - "java:S117": { - "modified": true, - "params": { - "format": "^_[a-zA-Z0-9]*$" - } - }, - "java:S139": { - "params": { - "legalTrailingCommentPattern": "^\\s*+[^\\s]++$" - }, - "severity": "MINOR" - }, - "java:S1943": "MINOR", - "java:S2162": "MINOR", - "java:S3030": { - "params": { - "threshold": "4" - }, - "severity": "MAJOR" - }, - "java:S3052": "MINOR" - } - }, - "Corp Server Apps": { - "rules": { - "java:S1941": "MINOR", - "java:S2208": "CRITICAL" - } - } - }, - "isDefault": true, - "rules": { - "java:Don_t_use_offensive_words_in_comments": "MAJOR", - "java:S1109": "MINOR", - "java:S1162": "MAJOR", - "java:S1541": { - "params": { - "Threshold": "10" - }, - "severity": "CRITICAL" - }, - "java:S1695": "MAJOR", - "java:S2063": "CRITICAL", - "java:S2203": "MINOR", - "java:S2658": "CRITICAL", - "java:S3242": "MINOR", - "java:S3254": "MINOR", - "java:S888": "CRITICAL" - } - }, - "Olivier way": { - "permissions": { - "groups": "Language Experts", - "users": "olivier-korach91357" - }, - "rules": { - "java:S104": { - "params": { - "Max": "750" - }, - "severity": "MAJOR" - } - } - } - }, - "isBuiltIn": true - } - }, - "js": { - "Bad - unused QP": { - "rules": { - "common-js:DuplicatedBlocks": "MAJOR", - "javascript:S101": { - "params": { - "format": "^[A-Z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "javascript:S103": { - "params": { - "maximumLineLength": "180" - }, - "severity": "MAJOR" - }, - "javascript:S104": { - "params": { - "maximum": "1000" - }, - "severity": "MAJOR" - }, - "javascript:S107": { - "params": { - "maximumFunctionParameters": "7" - }, - "severity": "MAJOR" - }, - "javascript:S108": "MAJOR", - "javascript:S1110": "MAJOR", - "javascript:S1116": "MINOR", - "javascript:S1117": "MAJOR", - "javascript:S1119": "MAJOR", - "javascript:S1121": "MAJOR", - "javascript:S1125": "MINOR", - "javascript:S1126": "MINOR", - "javascript:S1128": "MINOR", - "javascript:S113": "MINOR", - "javascript:S1131": "MINOR", - "javascript:S1134": "MAJOR", - "javascript:S1135": "INFO", - "javascript:S1143": "CRITICAL", - "javascript:S117": { - "params": { - "format": "^[_$A-Za-z][$A-Za-z0-9]*$|^[_$A-Z][_$A-Z0-9]+$" - }, - "severity": "MINOR" - }, - "javascript:S1186": "CRITICAL", - "javascript:S1192": { - "params": { - "threshold": "3" - }, - "severity": "CRITICAL" - }, - "javascript:S121": "CRITICAL", - "javascript:S1219": "BLOCKER", - "javascript:S122": "MAJOR", - "javascript:S1226": "MINOR", - "javascript:S125": "MAJOR", - "javascript:S1264": "MINOR", - "javascript:S128": "BLOCKER", - "javascript:S1301": "MINOR", - "javascript:S1313": "MINOR", - "javascript:S1314": "BLOCKER", - "javascript:S1321": "MINOR", - "javascript:S138": { - "params": { - "max": "200" - }, - "severity": "MAJOR" - }, - "javascript:S1439": "MAJOR", - "javascript:S1440": "MAJOR", - "javascript:S1472": "MINOR", - "javascript:S1479": { - "params": { - "maximum": "30" - }, - "severity": "MAJOR" - }, - "javascript:S1481": "MINOR", - "javascript:S1488": "MINOR", - "javascript:S1515": "MAJOR", - "javascript:S1516": "MINOR", - "javascript:S1523": "CRITICAL", - "javascript:S1526": "BLOCKER", - "javascript:S1527": "BLOCKER", - "javascript:S1528": "MINOR", - "javascript:S1529": "MAJOR", - "javascript:S1533": "MINOR", - "javascript:S1534": "MAJOR", - "javascript:S1536": "MAJOR", - "javascript:S1656": "MAJOR", - "javascript:S1751": "MAJOR", - "javascript:S1763": "MAJOR", - "javascript:S1764": "MAJOR", - "javascript:S1788": "MAJOR", - "javascript:S1821": "CRITICAL", - "javascript:S1848": "MAJOR", - "javascript:S1854": "MAJOR", - "javascript:S1862": "MAJOR", - "javascript:S1871": "MAJOR", - "javascript:S1940": "MINOR", - "javascript:S1994": "CRITICAL", - "javascript:S2068": { - "params": { - "credentialWords": "password, pwd, passwd" - }, - "severity": "BLOCKER" - }, - "javascript:S2077": "MAJOR", - "javascript:S2092": "MINOR", - "javascript:S2123": "MAJOR", - "javascript:S2137": "MAJOR", - "javascript:S2189": "BLOCKER", - "javascript:S2201": "MAJOR", - "javascript:S2234": "MAJOR", - "javascript:S2245": "CRITICAL", - "javascript:S2251": "MAJOR", - "javascript:S2259": "MAJOR", - "javascript:S2310": "CRITICAL", - "javascript:S2376": { - "params": { - "getWithoutSet": "false" - }, - "severity": "MAJOR" - }, - "javascript:S2392": "MAJOR", - "javascript:S2424": "MAJOR", - "javascript:S2428": "MINOR", - "javascript:S2432": "MAJOR", - "javascript:S2589": "MAJOR", - "javascript:S2598": "CRITICAL", - "javascript:S2612": "MAJOR", - "javascript:S2681": "MAJOR", - "javascript:S2685": "MAJOR", - "javascript:S2688": "MAJOR", - "javascript:S2692": "MAJOR", - "javascript:S2703": "BLOCKER", - "javascript:S2737": "MINOR", - "javascript:S2755": "BLOCKER", - "javascript:S2757": "MAJOR", - "javascript:S2814": "MAJOR", - "javascript:S2819": "CRITICAL", - "javascript:S2870": "MAJOR", - "javascript:S2871": "CRITICAL", - "javascript:S2990": "MINOR", - "javascript:S2999": { - "params": { - "considerJSDoc": "false" - }, - "severity": "MAJOR" - }, - "javascript:S3001": "MINOR", - "javascript:S3003": "MAJOR", - "javascript:S3317": "MINOR", - "javascript:S3330": "MINOR", - "javascript:S3353": "CRITICAL", - "javascript:S3358": "MAJOR", - "javascript:S3403": "MAJOR", - "javascript:S3500": "MAJOR", - "javascript:S3512": "MINOR", - "javascript:S3516": "BLOCKER", - "javascript:S3531": "MAJOR", - "javascript:S3579": "MAJOR", - "javascript:S3616": "MAJOR", - "javascript:S3626": "MINOR", - "javascript:S3686": "MAJOR", - "javascript:S3696": "MAJOR", - "javascript:S3699": "MAJOR", - "javascript:S3735": "CRITICAL", - "javascript:S3757": "MAJOR", - "javascript:S3758": "MAJOR", - "javascript:S3760": "MAJOR", - "javascript:S3776": { - "params": { - "threshold": "15" - }, - "severity": "CRITICAL" - }, - "javascript:S3782": "MAJOR", - "javascript:S3785": "CRITICAL", - "javascript:S3786": "MAJOR", - "javascript:S3796": "BLOCKER", - "javascript:S3799": "MAJOR", - "javascript:S3800": "MAJOR", - "javascript:S3801": "MAJOR", - "javascript:S3812": "CRITICAL", - "javascript:S3834": "CRITICAL", - "javascript:S3854": "CRITICAL", - "javascript:S3863": "MINOR", - "javascript:S3923": "MAJOR", - "javascript:S3972": "CRITICAL", - "javascript:S3973": "CRITICAL", - "javascript:S3981": "MAJOR", - "javascript:S3984": "MAJOR", - "javascript:S4030": "MAJOR", - "javascript:S4043": "MAJOR", - "javascript:S4123": "CRITICAL", - "javascript:S4138": "MINOR", - "javascript:S4139": "MAJOR", - "javascript:S4140": "MAJOR", - "javascript:S4143": "MAJOR", - "javascript:S4144": "MAJOR", - "javascript:S4158": "MINOR", - "javascript:S4165": "MAJOR", - "javascript:S4275": "CRITICAL", - "javascript:S4326": "MINOR", - "javascript:S4423": "CRITICAL", - "javascript:S4426": "CRITICAL", - "javascript:S4502": "CRITICAL", - "javascript:S4507": "MINOR", - "javascript:S4524": "CRITICAL", - "javascript:S4619": "MAJOR", - "javascript:S4624": "MAJOR", - "javascript:S4634": "MAJOR", - "javascript:S4721": "MAJOR", - "javascript:S4790": "CRITICAL", - "javascript:S4822": "MAJOR", - "javascript:S4830": "CRITICAL", - "javascript:S5042": "CRITICAL", - "javascript:S5122": "MINOR", - "javascript:S5247": "MAJOR", - "javascript:S5332": "CRITICAL", - "javascript:S5443": "CRITICAL", - "javascript:S5527": "CRITICAL", - "javascript:S5542": "CRITICAL", - "javascript:S5547": "CRITICAL", - "javascript:S5604": { - "params": { - "permissions": "geolocation" - }, - "severity": "MAJOR" - }, - "javascript:S5659": "CRITICAL", - "javascript:S5689": "MINOR", - "javascript:S5691": "MAJOR", - "javascript:S5693": { - "params": { - "fileUploadSizeLimit": "8000000", - "standardSizeLimit": "2000000" - }, - "severity": "MAJOR" - }, - "javascript:S5725": "MINOR", - "javascript:S5728": "MINOR", - "javascript:S5730": "MINOR", - "javascript:S5732": "MINOR", - "javascript:S5734": "MINOR", - "javascript:S5736": "MINOR", - "javascript:S5739": "MINOR", - "javascript:S5742": "MINOR", - "javascript:S5743": "MINOR", - "javascript:S5757": "MINOR", - "javascript:S5759": "MINOR", - "javascript:S5876": "CRITICAL", - "javascript:S878": "MAJOR", - "javascript:S881": "MAJOR", - "javascript:S888": "CRITICAL", - "javascript:S905": "MAJOR", - "javascript:S930": "CRITICAL", - "jssecurity:S2076": "BLOCKER", - "jssecurity:S2083": "BLOCKER", - "jssecurity:S2631": "CRITICAL", - "jssecurity:S3649": "BLOCKER", - "jssecurity:S5131": "BLOCKER", - "jssecurity:S5144": "MAJOR", - "jssecurity:S5146": "BLOCKER", - "jssecurity:S5147": "BLOCKER", - "jssecurity:S5334": "BLOCKER", - "jssecurity:S5696": "BLOCKER", - "jssecurity:S5883": "MINOR", - "jssecurity:S6096": "BLOCKER", - "jssecurity:S6105": "BLOCKER" - } - }, - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "json": { - "Sonar way": { - "isBuiltIn": true - }, - "SonarQube Way": { - "isDefault": true, - "rules": {} - } - }, - "jsp": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "kotlin": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - }, - "detekt active": { - "isBuiltIn": true - }, - "detekt all": { - "isBuiltIn": true - } - }, - "neutral": { - "Neutral": { - "isBuiltIn": true, - "isDefault": true - } - }, - "objc": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "php": { - "Drupal": { - "isBuiltIn": true - }, - "PSR-2": { - "isBuiltIn": true - }, - "Sonar way": { - "children": { - "PHP Olivier": { - "rules": { - "common-php:FailedUnitTests": "MAJOR", - "common-php:InsufficientBranchCoverage": { - "params": { - "minimumBranchCoverageRatio": "65" - }, - "severity": "MAJOR" - }, - "common-php:InsufficientCommentDensity": { - "params": { - "minimumCommentDensity": "25" - }, - "severity": "MAJOR" - }, - "common-php:InsufficientLineCoverage": { - "params": { - "minimumLineCoverageRatio": "65" - }, - "severity": "MAJOR" - }, - "common-php:SkippedUnitTests": "MAJOR", - "php:S100": { - "params": { - "format": "^[a-z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "php:S101": { - "modified": true, - "params": { - "format": "^[A-Z][a-z_A-Z0-9]*$" - } - }, - "php:S103": { - "params": { - "maximumLineLength": "120" - }, - "severity": "MINOR" - }, - "php:S104": { - "params": { - "max": "1000" - }, - "severity": "MAJOR" - }, - "php:S105": "MINOR", - "php:S1067": { - "params": { - "max": "3" - }, - "severity": "MAJOR" - }, - "php:S1105": "MINOR", - "php:S1124": "MINOR", - "php:S113": "MINOR", - "php:S1131": "MINOR", - "php:S115": { - "modified": true, - "severity": "MINOR" - }, - "php:S1151": { - "params": { - "max": "10" - }, - "severity": "MAJOR" - }, - "php:S116": { - "params": { - "format": "^[a-z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "php:S117": { - "params": { - "format": "^[a-z][a-zA-Z0-9]*$" - }, - "severity": "MINOR" - }, - "php:S1192": { - "modified": true, - "severity": "MINOR" - }, - "php:S1200": { - "params": { - "max": "20" - }, - "severity": "MAJOR" - }, - "php:S121": { - "modified": true, - "severity": "MAJOR" - }, - "php:S122": "MINOR", - "php:S126": "MAJOR", - "php:S128": "CRITICAL", - "php:S131": { - "modified": true, - "severity": "MAJOR" - }, - "php:S1311": { - "params": { - "max": "200" - }, - "severity": "MAJOR" - }, - "php:S134": { - "params": { - "max": "4" - }, - "severity": "MAJOR" - }, - "php:S139": { - "params": { - "legalTrailingCommentPattern": "^(//|#)\\s*+[^\\s]++$" - }, - "severity": "INFO" - }, - "php:S1451": { - "params": { - "headerFormat": "" - }, - "severity": "BLOCKER" - }, - "php:S1481": { - "modified": true, - "severity": "MAJOR" - }, - "php:S1541": { - "params": { - "threshold": "20" - }, - "severity": "MAJOR" - }, - "php:S1578": { - "params": { - "format": "[a-z][A-Za-z0-9]+.php" - }, - "severity": "MINOR" - }, - "php:S1599": { - "modified": true, - "severity": "MAJOR" - }, - "php:S1697": { - "modified": true, - "severity": "BLOCKER" - }, - "php:S1757": "MINOR", - "php:S1764": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S1765": "MINOR", - "php:S1766": "MINOR", - "php:S1779": "MINOR", - "php:S1780": "MINOR", - "php:S1781": "MINOR", - "php:S1784": "MINOR", - "php:S1788": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S1793": "MINOR", - "php:S1799": "CRITICAL", - "php:S1808": { - "params": { - "closing_curly_brace": "true", - "closure_format": "true", - "extends_implements_line": "true", - "foreach_space": "true", - "function_calls_arguments_indentation": "true", - "function_declaration_arguments_indentation": "true", - "interfaces_indentation": "true", - "namespace_blank_line": "true", - "no_space": "true", - "no_space_method_name": "true", - "one_space_after": "true", - "one_space_before": "true", - "one_space_for": "true", - "open_curly_brace_classes_functions": "true", - "open_curly_brace_control_structures": "true", - "space_comma": "true", - "use_after_namespace": "true", - "use_blank_line": "true" - }, - "severity": "MINOR" - }, - "php:S1820": { - "params": { - "countNonpublicFields": "true", - "maximumFieldThreshold": "20" - }, - "severity": "MAJOR" - }, - "php:S1848": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S1862": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S1990": "MINOR", - "php:S1996": "MAJOR", - "php:S1997": "MINOR", - "php:S2000": "CRITICAL", - "php:S2001": "MAJOR", - "php:S2002": "MAJOR", - "php:S2003": "CRITICAL", - "php:S2004": { - "modified": true, - "severity": "MAJOR" - }, - "php:S2005": "MINOR", - "php:S2007": "MAJOR", - "php:S2010": { - "modified": true, - "severity": "MAJOR" - }, - "php:S2011": "MAJOR", - "php:S2037": "MAJOR", - "php:S2038": "MINOR", - "php:S2041": { - "modified": true, - "severity": "MAJOR" - }, - "php:S2042": { - "params": { - "maximumLinesThreshold": "200" - }, - "severity": "MAJOR" - }, - "php:S2043": "MAJOR", - "php:S2044": "MINOR", - "php:S2046": "MINOR", - "php:S2047": "MINOR", - "php:S2068": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S2070": "CRITICAL", - "php:S2077": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S2277": "CRITICAL", - "php:S2278": "BLOCKER", - "php:S2681": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S2830": "MAJOR", - "php:S2918": "CRITICAL", - "php:S2964": "CRITICAL", - "php:S3011": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S3330": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S3331": "CRITICAL", - "php:S3332": "CRITICAL", - "php:S3333": "BLOCKER", - "php:S3334": "BLOCKER", - "php:S3335": "MAJOR", - "php:S3336": "BLOCKER", - "php:S3337": "BLOCKER", - "php:S3338": "BLOCKER", - "php:S4423": { - "modified": true, - "severity": "MAJOR" - }, - "php:S4426": { - "modified": true, - "severity": "BLOCKER" - }, - "php:S4507": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S5328": { - "modified": true, - "severity": "CRITICAL" - }, - "php:S5542": { - "modified": true, - "severity": "BLOCKER" - }, - "php:S881": "MAJOR", - "php:S905": { - "modified": true, - "severity": "CRITICAL" - } - } - } - }, - "isBuiltIn": true, - "isDefault": true - } - }, - "pli": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "plsql": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "py": { - "Sonar way": { - "children": { - "Python Olivier": { - "isDefault": true, - "permissions": { - "users": "olivierk" - }, - "rules": { - "common-py:DuplicatedBlocks": "MAJOR", - "common-py:InsufficientCommentDensity": { - "params": { - "minimumCommentDensity": "25" - }, - "severity": "MAJOR" - }, - "python:FunctionComplexity": { - "params": { - "maximumFunctionComplexityThreshold": "15" - }, - "severity": "CRITICAL" - }, - "python:LineLength": { - "params": { - "maximumLineLength": "120" - }, - "severity": "MAJOR" - }, - "python:OneStatementPerLine": "MAJOR", - "python:S100": { - "modified": true, - "params": { - "format": "^[a-z_][a-z0-9_]{2,}$" - } - }, - "python:S101": { - "modified": true, - "params": { - "format": "^[A-Z_][a-zA-Z0-9]+$" - } - }, - "python:S104": { - "params": { - "maximum": "1000" - }, - "severity": "MAJOR" - }, - "python:S107": { - "modified": true, - "params": { - "max": "7" - } - }, - "python:S1192": { - "modified": true, - "params": { - "threshold": "5" - } - }, - "python:S1542": { - "modified": true, - "params": { - "format": "^[a-z_][a-z0-9_]{2,}$" - } - }, - "python:S2077": { - "modified": true, - "severity": "CRITICAL" - }, - "python:S4423": { - "modified": true, - "severity": "MAJOR" - }, - "python:S4426": { - "modified": true, - "severity": "BLOCKER" - }, - "python:S4507": { - "modified": true, - "severity": "CRITICAL" - }, - "python:S5445": { - "modified": true, - "severity": "BLOCKER" - }, - "python:S5542": { - "modified": true, - "severity": "BLOCKER" - } - } - } - }, - "isBuiltIn": true - } - }, - "rpg": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "ruby": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "scala": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "swift": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "terraform": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "text": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "ts": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "tsql": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "vb": { - "Olivier way": { - "isDefault": true, - "rules": { - "common-vb:DuplicatedBlocks": "MAJOR", - "vb:ParseError": "MAJOR", - "vb:S103": { - "params": { - "maximumLineLength": "120" - }, - "severity": "MAJOR" - }, - "vb:S105": "MINOR", - "vb:S1067": { - "params": { - "max": "3" - }, - "severity": "CRITICAL" - }, - "vb:S1068": "MAJOR", - "vb:S107": { - "params": { - "max": "7" - }, - "severity": "MAJOR" - }, - "vb:S108": "MAJOR", - "vb:S1116": "MINOR", - "vb:S1125": "MINOR", - "vb:S1126": "MINOR", - "vb:S1131": "MINOR", - "vb:S1134": "MAJOR", - "vb:S1135": "INFO", - "vb:S115": { - "params": { - "format": "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$" - }, - "severity": "CRITICAL" - }, - "vb:S1151": { - "params": { - "max": "5" - }, - "severity": "MAJOR" - }, - "vb:S1186": "CRITICAL", - "vb:S122": "MAJOR", - "vb:S1301": "MINOR", - "vb:S131": "CRITICAL", - "vb:S1313": "MINOR", - "vb:S138": { - "params": { - "max": "100" - }, - "severity": "MAJOR" - }, - "vb:S1479": { - "params": { - "maximum": "30" - }, - "severity": "MAJOR" - }, - "vb:S1541": { - "params": { - "Threshold": "10" - }, - "severity": "CRITICAL" - }, - "vb:S1542": { - "params": { - "format": "^[A-Z][a-zA-Z0-9_]*$" - }, - "severity": "MAJOR" - }, - "vb:S1645": "CRITICAL", - "vb:S1647": "MAJOR", - "vb:S1648": "MAJOR", - "vb:S1649": "MAJOR", - "vb:S1650": "MINOR", - "vb:S1651": "MAJOR", - "vb:S1652": "MAJOR", - "vb:S1657": "BLOCKER", - "vb:S1658": "MAJOR", - "vb:S1660": "MAJOR", - "vb:S1702": "MAJOR", - "vb:S1821": "CRITICAL", - "vb:S907": "MAJOR" - } - }, - "Sonar way": { - "isBuiltIn": true - } - }, - "vbnet": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "web": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - }, - "xml": { - "Sonar way": { - "children": { - "Olivier": { - "isDefault": true, - "rules": { - "xml:Check_any_tag": "MAJOR", - "xml:Test_XPATH": "MAJOR", - "xml:Test_no_message": "MAJOR", - "xml:sonarExclusionsForbidden": "BLOCKER" - } - } - }, - "isBuiltIn": true - }, - "Sonar way Apigee": { - "isBuiltIn": true - } - }, - "yaml": { - "Sonar way": { - "isBuiltIn": true, - "isDefault": true - } - } - }, - "rules": { - "extended": { - "Web:S5148": { - "description": "Derivco suggest to do blah blah blah" - }, - "c:S3805": { - "description": "Because import is bad and include is good" - }, - "c:S989": { - "tags": "resilience" - }, - "cpp:S3696": { - "tags": "pci-dss" - }, - "cpp:S3806": { - "tags": "autosar" - }, - "cpp:S985": { - "tags": "pci-dss" - }, - "csharpsquid:S1206": { - "tags": "cwe-786-cbr" - }, - "csharpsquid:S3885": { - "tags": "siemens-love" - }, - "java:S2095": { - "tags": "universal-critical" - }, - "java:S2200": { - "description": "See [Quora thread](https://www.quora.com/How-does-one-effectively-use-the-compareTo-method-in-java-for-checking-string-value)", - "tags": "payment-critical" - }, - "java:S2204": { - "tags": "my-rule-tag,tag1,tag10,tag2,tag3,tag4,tag5,tag6,tag7,tag8,tag9" - }, - "java:S2273": { - "description": "Nilrod comment: This is related to standard XXX" - }, - "java:S2970": { - "tags": "santander-must" - }, - "java:S3752": { - "tags": "murex-cwe-2287" - }, - "php:S3336": { - "tags": "bayer-criticity-1" - }, - "php:S3972": { - "tags": "psr12" - }, - "python:S1763": { - "tags": "dead-code" - }, - "python:S2757": { - "description": "*Korach corp additional description*: Simple problem but this has been the source of so many bugs across all languages !" - } - }, - "instantiated": { - "java:Disallow_Sri_classes": { - "params": { - "className": "com\\.sri\\.*" - }, - "severity": "INFO", - "templateKey": "java:S3688" - }, - "java:Don_t_use_hacker_classes": { - "params": { - "className": "org\\.hacker\\..*" - }, - "severity": "INFO", - "templateKey": "java:S3688" - }, - "java:Don_t_use_offensive_words_in_comments": { - "params": { - "message": "Please don't use offensive comments", - "regularExpression": ".*FUCK.*" - }, - "severity": "MAJOR", - "templateKey": "java:S124" - }, - "java:Don_t_use_sun_classes": { - "params": { - "className": "sun\\..*" - }, - "severity": "INFO", - "templateKey": "java:S3688" - }, - "pli:Don_t_use_INSERT": { - "params": { - "message": "//INSERT", - "xpathQuery": "" - }, - "severity": "MAJOR", - "templateKey": "pli:XPath" - }, - "plsql:Dont_use_DROP": { - "params": { - "message": "Please don't drop me", - "xpathQuery": "//DROP" - }, - "severity": "MAJOR", - "templateKey": "plsql:XPathCheckPlSql" - }, - "xml:AssertJ_3_4_1_forbidden": { - "params": { - "dependencyName": "junit:junit", - "version": "4.11*" - }, - "severity": "MAJOR", - "templateKey": "xml:S3417" - }, - "xml:Check_any_tag": { - "params": { - "expression": "//forbidden", - "filePattern": "**/*", - "message": "Found a tag" - }, - "severity": "MAJOR", - "templateKey": "xml:XPathCheck" - }, - "xml:Forbidden_to_use_Jackson_5_11": { - "params": { - "dependencyName": ".*jackson.*", - "version": "5.*" - }, - "severity": "MAJOR", - "templateKey": "xml:S3417" - }, - "xml:Test_XPATH": { - "params": { - "expression": "//TD[@NOWRAP]", - "filePattern": "**", - "message": "No nowrap" - }, - "severity": "MAJOR", - "templateKey": "xml:XPathCheck" - }, - "xml:Test_no_message": { - "params": { - "expression": "/process", - "filePattern": "", - "message": "" - }, - "severity": "MAJOR", - "templateKey": "xml:XPathCheck" - }, - "xml:sonarExclusionsForbidden": { - "params": { - "expression": "//NAME[@tokenValue='sonar.exclusions']", - "filePattern": "", - "message": "Using sonar.exclusion in pom.xml is forbidden" - }, - "severity": "MAJOR", - "templateKey": "xml:XPathCheck" - } - } - }, - "users": { - "admin": { - "email": "olivier.korach@admin.com", - "groups": "CI Tools, Team Project Admins, sonar-administrators", - "local": true, - "name": "Administrator" - }, - "ado": { - "groups": "CI Tools", - "local": true, - "name": "Azure DevOps" - }, - "cameron": { - "groups": "Team Developers, Team Tech Leads", - "local": true, - "name": "cameron" - }, - "cli": { - "groups": "CI Tools, sonar-administrators", - "local": true, - "name": "cli" - }, - "gitlab": { - "groups": "CI Tools", - "local": true, - "name": "gitlab" - }, - "gitlab-ci": { - "groups": "CI Tools", - "local": true, - "name": "gitlab-ci" - }, - "issue-syncer": { - "groups": "sonar-administrators", - "local": true, - "name": "issue-syncer" - }, - "jack": { - "groups": "", - "local": true, - "name": "jack" - }, - "jenkins": { - "email": "", - "groups": "CI Tools", - "local": true, - "name": "Jenkins" - }, - "joe": { - "groups": "", - "local": true, - "name": "joe" - }, - "john.doe@sonarsource.com": { - "email": "john.doe@sonarsource.com", - "groups": [ - "Security Auditors", - "comma,group" - ], - "local": true, - "name": "John Doe" - }, - "olivier-korach81134": { - "groups": "Security Auditors", - "name": "Olivier Korach" - }, - "olivier-korach91357": { - "email": "olivier.korach@gmail.com", - "groups": "", - "name": "Olivier Korach" - }, - "olivierk": { - "groups": "CI Tools, Team Project Admins, Team Tech Leads", - "local": true, - "name": "olivierk", - "scmAccounts": "olivier.korach@test.com" - }, - "simon.brandhof@sonarsource.com": { - "email": "simon.brandhof@sonarsource.com", - "groups": "Executives, Language Experts, Team 2 Developers", - "local": true, - "name": "Simon Brandhof" - }, - "sylvain": { - "groups": "Language Experts, Team Developers, Team Tech Leads", - "local": true, - "name": "Sylvain Combe" - }, - "syncer": { - "groups": "sonar-administrators", - "local": true, - "name": "syncer" - }, - "tfs": { - "email": "", - "groups": "", - "local": true, - "name": "TFS" - }, - "yacine": { - "groups": "Team Developers", - "local": true, - "name": "yacine" - } - } -} diff --git a/test/unit/test_audit.py b/test/unit/test_audit.py index 102230ee..5c14d281 100644 --- a/test/unit/test_audit.py +++ b/test/unit/test_audit.py @@ -23,6 +23,7 @@ import os, stat from collections.abc import Generator +import pytest import utilities as util from sonar import errcodes, utilities, logging @@ -114,3 +115,23 @@ def test_sif_not_readable(get_json_file: Generator[str]) -> None: os.chmod(unreadable_file, current_permissions & NO_PERMS) util.run_failed_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {get_json_file} --sif {unreadable_file}", errcodes.SIF_AUDIT_ERROR) os.chmod(unreadable_file, current_permissions) + + +def test_configure() -> None: + DEFAULT_CONFIG = f"{os.path.expanduser('~')}{os.sep}.sonar-audit.properties" + config_exists = os.path.exists(DEFAULT_CONFIG) + if config_exists: + os.rename(DEFAULT_CONFIG, f"{DEFAULT_CONFIG}.bak") + util.run_success_cmd(audit.main, f"{CMD} --config") + assert os.path.exists(DEFAULT_CONFIG) + if config_exists: + os.rename(f"{DEFAULT_CONFIG}.bak", DEFAULT_CONFIG) + + +def test_configure_stdout() -> None: + DEFAULT_CONFIG = f"{os.path.expanduser('~')}{os.sep}.sonar-audit.properties" + if not os.path.exists(DEFAULT_CONFIG): + pytest.skip("No $HOME config fule") + last_change = os.stat(DEFAULT_CONFIG).st_ctime_ns + util.run_success_cmd(audit.main, f"{CMD} --config") + assert last_change == os.stat(DEFAULT_CONFIG).st_ctime_ns diff --git a/test/unit/test_findings.py b/test/unit/test_findings.py index 9cc3c5cf..0a3e1ffb 100644 --- a/test/unit/test_findings.py +++ b/test/unit/test_findings.py @@ -204,28 +204,31 @@ def test_findings_filter_on_resolution() -> None: with pytest.raises(SystemExit): with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.RESOLUTIONS}", "FALSE-POSITIVE,ACCEPTED,SAFE"]): findings_export.main() + if util.SQ.version() < (10, 0, 0): + statuses = ("FALSE-POSITIVE", "WONTFIX", "SAFE") + else: + statuses = ("FALSE-POSITIVE", "ACCEPTED", "SAFE") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) for line in csvreader: - assert line[STATUS_COL] in ("FALSE-POSITIVE", "ACCEPTED", "SAFE") + assert line[STATUS_COL] in statuses util.clean(util.CSV_FILE) def test_findings_filter_on_severity() -> None: - """test_findings_filter_on_resolution""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.SEVERITIES}", "BLOCKER,CRITICAL"]): - findings_export.main() + """test_findings_filter_on_severity""" + util.run_success_cmd(findings_export.main, f"{' '.join(CSV_OPTS)} --{opt.SEVERITIES} BLOCKER,CRITICAL") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) for line in csvreader: if util.SQ.version() < (10, 2, 0): assert line[SEVERITY_COL] in ("BLOCKER", "CRITICAL") - else: + elif util.SQ.version() < (10, 7, 0): assert "HIGH" in line[SECURITY_IMPACT_COL:OTHER_IMPACT_COL] or "MEDIUM" in line[SECURITY_IMPACT_COL:OTHER_IMPACT_COL] + else: + assert "BLOCKER" in line[SECURITY_IMPACT_COL:OTHER_IMPACT_COL] or "HIGH" in line[SECURITY_IMPACT_COL:OTHER_IMPACT_COL] util.clean(util.CSV_FILE) @@ -240,7 +243,10 @@ def test_findings_filter_on_multiple_criteria() -> None: csvreader = csv.reader(fh) next(csvreader) for line in csvreader: - assert line[STATUS_COL] in ("FALSE-POSITIVE", "ACCEPTED") + if util.SQ.version() < (10, 0, 0): + assert line[STATUS_COL] in ("FALSE-POSITIVE", "WONTFIX") + else: + assert line[STATUS_COL] in ("FALSE-POSITIVE", "ACCEPTED") if util.SQ.version() >= (10, 2, 0): assert line[MAINTAINABILITY_IMPACT_COL] != "" or line[RELIABILITY_IMPACT_COL] != "" else: @@ -282,11 +288,15 @@ def test_findings_filter_on_multiple_criteria_3() -> None: with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.STATUSES}", "ACCEPTED", f"--{opt.RESOLUTIONS}", "FALSE-POSITIVE"]): findings_export.main() + if util.SQ.version() < (10, 0, 0): + statuses = ("WONTFIX", "FALSE_POSITIVE", "FALSE-POSITIVE") + else: + statuses = ("ACCEPTED", "FALSE_POSITIVE", "FALSE-POSITIVE") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) for line in csvreader: - assert line[STATUS_COL] in ("ACCEPTED", "FALSE_POSITIVE", "FALSE-POSITIVE") + assert line[STATUS_COL] in statuses util.clean(util.CSV_FILE) diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 94691f4d..9c53d97b 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -34,6 +34,10 @@ ISSUE_FP_V9_9 = "AZNT89kklhFmauJ_HQSK" ISSUE_ACCEPTED = "a1fddba4-9e70-46c6-ac95-e815104ead59" ISSUE_ACCEPTED_V9_9 = "AZI6frkTuTfDeRt_hspx" +ISSUE_W_MULTIPLE_CHANGELOGS = "6ae41c3b-c3d2-422f-a505-d355e7b0a268" +CHLOG_ISSUE_DATE = "2019-09-21" +ISSUE_W_MULTIPLE_CHANGELOGS_V9_9 = "AZBKamIoDJWCTq61gxzW" +CHLOG_ISSUE_V9_9_DATE = "2021-01-08" def test_issue() -> None: @@ -140,8 +144,8 @@ def test_changelog() -> None: assert issue.key == issue_key assert str(issue) == f"Issue key '{issue_key}'" assert issue.is_false_positive() - changelog_l = list(issue.changelog().values()) - if tutil.SQ.version() >= (2025, 1, 0) or tutil.SQ.edition() == "community" and tutil.SQ.version() >= (25, 1, 0): + changelog_l = list(issue.changelog(manual_only=False).values()) + if tutil.SQ.version() >= (25, 1, 0): nb_changes = 3 else: nb_changes = 1 @@ -169,8 +173,38 @@ def test_changelog() -> None: assert changelog.author() == "admin" assert not changelog.is_tag() assert changelog.get_tags() is None - (t, _) = changelog.changelog_type() - assert t == "FALSE-POSITIVE" + + +def test_multiple_changelogs(): + """test_multiple_changelogs""" + issue_dt = util.string_to_date(CHLOG_ISSUE_V9_9_DATE if tutil.SQ.version() < (10, 0, 0) else CHLOG_ISSUE_DATE) + issues_d = issues.search_by_date( + endpoint=tutil.SQ, params={"project": "pytorch", "timeZone": "Europe/Paris"}, date_start=issue_dt, date_stop=issue_dt + ) + issue_key = ISSUE_W_MULTIPLE_CHANGELOGS if tutil.SQ.version() >= (10, 0, 0) else ISSUE_W_MULTIPLE_CHANGELOGS_V9_9 + assert issue_key in issues_d + issue = issues_d[issue_key] + state_list = ("ACCEPT", "CONFIRM", "UNCONFIRM", "FP", "REOPEN", "SEVERITY", "ASSIGN", "UNASSIGN", "SEVERITY") + results = {s: False for s in state_list} + for cl in issue.changelog().values(): + (t, _) = cl.changelog_type() + assert t is not None + results["ACCEPT"] = results["ACCEPT"] or cl.is_resolve_as_accept() or cl.is_resolve_as_wf() + results["CONFIRM"] = results["CONFIRM"] or cl.is_confirm() + results["UNCONFIRM"] = results["UNCONFIRM"] or cl.is_unconfirm() + if cl.is_resolve_as_fp(): + results["FP"] = True + assert cl.previous_state() in "OPEN", "REOPENED" + if cl.is_assignment(): + results["ASSIGN"] = True + assert len(cl.assignee()) > 0 + results["UNASSIGN"] = results["UNASSIGN"] or cl.is_unassign() + results["SEVERITY"] = results["SEVERITY"] or cl.is_change_severity() + results["REOPEN"] = results["REOPEN"] or cl.is_reopen() + for s in state_list: + if s != "REOPEN" or (s == "REOPEN" and tutil.SQ.version() < (10, 0, 0)): + logging.debug("Checking that changelog %s was found", s) + assert results[s] def test_request_error() -> None: diff --git a/test/unit/test_projects.py b/test/unit/test_projects.py index f4543d98..da6ed9f1 100644 --- a/test/unit/test_projects.py +++ b/test/unit/test_projects.py @@ -25,7 +25,7 @@ import pytest from sonar import projects, exceptions, qualityprofiles, qualitygates, rules -from sonar.audit import config +from sonar.audit import audit_config import utilities as util @@ -66,7 +66,7 @@ def test_create_delete() -> None: def test_audit() -> None: """test_audit""" - settings = {k: False for k, v in config.load("sonar-audit").items() if isinstance(v, bool)} + settings = {k: False for k, v in audit_config.load("sonar-audit").items() if isinstance(v, bool)} settings["audit.projects"] = True assert len(projects.audit(util.SQ, settings)) == 0 proj = projects.Project.get_object(endpoint=util.SQ, key=util.LIVE_PROJECT) diff --git a/test/unit/test_rules.py b/test/unit/test_rules.py index 38f4fa6f..549f1b3e 100644 --- a/test/unit/test_rules.py +++ b/test/unit/test_rules.py @@ -27,7 +27,6 @@ import sys import csv from unittest.mock import patch -import pytest import utilities as util from cli import rules_cli import cli.options as opt @@ -52,18 +51,16 @@ def test_rules_json_format() -> None: def test_rules_filter_language() -> None: """Tests that you can export rules for a single or a few languages""" - util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} py,jcl') + langs = ("py", "cs") if util.SQ.edition() == "community" else ("py", "apex") + util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)} --{opt.LANGUAGES} {",".join(langs)}') with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) line = next(csvreader) assert line[0].startswith("# ") line[0] = line[0][2:] - if util.SQ.version() >= (10, 2, 0): - assert line == rules.CSV_EXPORT_FIELDS - else: - assert line == rules.LEGACY_CSV_EXPORT_FIELDS + assert line == (rules.CSV_EXPORT_FIELDS if util.SQ.version() >= (10, 2, 0) else rules.LEGACY_CSV_EXPORT_FIELDS) for line in csvreader: - assert line[LANGUAGE_COL] in ("py", "jcl") + assert line[LANGUAGE_COL] in langs util.clean(util.CSV_FILE) diff --git a/test/unit/utilities.py b/test/unit/utilities.py index c3c174d5..57a156bc 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -141,15 +141,22 @@ def is_url(value: str) -> bool: return value.startswith("http") -def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str]]: +def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str], bool]: """Gets the list arguments and output file of a sonar-tools cmd""" args = __split_args(string_arguments) + imp_cmd = False + for option in (f"-{opt.IMPORT_SHORT}", f"--{opt.IMPORT}"): + try: + imp_cmd = args.index(option) is not None + break + except ValueError: + logging.info("%s - ValueError", option) for option in (f"-{opt.REPORT_FILE_SHORT}", f"--{opt.REPORT_FILE}"): try: - return args[args.index(option) + 1], args + return args[args.index(option) + 1], args, imp_cmd except ValueError: pass - return None, args + return None, args, imp_cmd def __split_args(string_arguments: str) -> list[str]: @@ -177,8 +184,9 @@ def __get_redacted_cmd(string_arguments: str) -> str: def run_cmd(func: callable, arguments: str, expected_code: int) -> Optional[str]: """Runs a sonar-tools command, verifies it raises the right exception, and returns the expected code""" logging.info("RUNNING: %s", __get_redacted_cmd(arguments)) - file, args = __get_args_and_file(arguments) - clean(file) + file, args, import_cmd = __get_args_and_file(arguments) + if not import_cmd: + clean(file) with pytest.raises(SystemExit) as e: with patch.object(sys, "argv", args): func() From 74c7af158d2d4a2212ef87fe46d39094e573b445 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 17:27:02 +0200 Subject: [PATCH 14/29] Update 3.10 scope (#1639) * Update 3.10 scope * Add sonar-rules improvement --- doc/what-is-new.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/doc/what-is-new.md b/doc/what-is-new.md index 8be131e7..72fbbb06 100644 --- a/doc/what-is-new.md +++ b/doc/what-is-new.md @@ -1,3 +1,19 @@ +# Version 3.10 + +* `sonar-findings-sync` hardening + - Allow `-O` option for target organization + - Compatibility with MQR mode (credit @lukas-frystak-sonarsource) + - Misc bug fixes +* `sonar-config`: + - Fix bug about not exporting all projects when more than 1000 projects +* `sonar-audit`: + - New audit check to avoid using Scanner for .Net 9.2 that has a vulnerability + - Fix incorrect warning when running 2025.1 with JRE 21 (this is supported) + - Fix incorrect warning when SQS is run with JRE 17 (this is supported) +* `sonar-rules`: + - Allow to only export rules of a given quality profile +* `sonar-findings-sync` hardening + # Version 3.9 * Compatibility with SonarQube 2025.1 release From 3c1bb7ceca010349b4b2b6317e987abe4ac0c718 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 17:43:30 +0200 Subject: [PATCH 15/29] Fix groups not exported if they have empty description (#1641) * Fixes #1640 #1603 * Add log --- cli/config.py | 10 +++++++--- sonar/groups.py | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/cli/config.py b/cli/config.py index 4d24a653..3fadf575 100644 --- a/cli/config.py +++ b/cli/config.py @@ -131,7 +131,10 @@ def write_objects(queue: Queue[types.ObjectJsonRepr], fd: TextIO, object_type: s obj_json = queue.get() done = obj_json is utilities.WRITE_END if not done: - obj_json = __prep_json_for_write(obj_json, export_settings) + if object_type == "groups": + obj_json = __prep_json_for_write(obj_json, {**export_settings, EXPORT_EMPTY: True}) + else: + obj_json = __prep_json_for_write(obj_json, export_settings) if object_type in ("projects", "applications", "portfolios", "users"): if object_type == "users": key = obj_json.pop("login", None) @@ -199,19 +202,20 @@ def export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> Non write_q.put(utilities.WRITE_END) write_q.join() print("\n}", file=fd) - remove_empty = False if mode == "MIGRATION" else not kwargs.get(EXPORT_EMPTY, False) - utilities.normalize_json_file(file, remove_empty=remove_empty, remove_none=True) + utilities.normalize_json_file(file, remove_empty=False, remove_none=True) log.info("Exporting %s data from %s completed", mode.lower(), kwargs[options.URL]) def __prep_json_for_write(json_data: types.ObjectJsonRepr, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr: """Cleans up the JSON before writing""" + log.debug("Exporting settings %s", utilities.json_dump(export_settings)) json_data = utilities.sort_lists(json_data) if export_settings.get("MODE", "CONFIG") == "MIGRATION": return json_data if not export_settings.get("FULL_EXPORT", False): json_data = utilities.remove_nones(json_data) if not export_settings.get(EXPORT_EMPTY, False): + log.debug("Removing empties") json_data = utilities.remove_empties(json_data) if export_settings.get("INLINE_LISTS", True): json_data = utilities.inline_lists(json_data, exceptions=("conditions",)) diff --git a/sonar/groups.py b/sonar/groups.py index e255bc09..40b7c5d1 100644 --- a/sonar/groups.py +++ b/sonar/groups.py @@ -376,6 +376,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg if not export_settings.get("FULL_EXPORT", False) and g_obj.is_default(): continue g_list[g_name] = "" if g_obj.description is None else g_obj.description + log.info("%s groups to export", len(g_list)) if write_q: write_q.put(g_list) write_q.put(util.WRITE_END) From 19e059d86e326d0924b64e694946f55dd12d376b Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 17:46:48 +0200 Subject: [PATCH 16/29] Bump sonar-tools release (#1642) --- conf/release.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/release.Dockerfile b/conf/release.Dockerfile index 17d5597a..2dff1485 100644 --- a/conf/release.Dockerfile +++ b/conf/release.Dockerfile @@ -31,7 +31,7 @@ COPY ./LICENSE . COPY ./sonar/audit sonar/audit RUN pip install --upgrade pip \ -&& pip install sonar-tools==3.9 +&& pip install sonar-tools==3.10 USER ${USERNAME} WORKDIR /home/${USERNAME} From e61a9e568f93c17087b17d8bb3e477611c376eb9 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 17:57:57 +0200 Subject: [PATCH 17/29] Add last bug fix info (#1643) --- doc/what-is-new.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/what-is-new.md b/doc/what-is-new.md index 72fbbb06..868b04f1 100644 --- a/doc/what-is-new.md +++ b/doc/what-is-new.md @@ -6,6 +6,7 @@ - Misc bug fixes * `sonar-config`: - Fix bug about not exporting all projects when more than 1000 projects + - Fix bug about not exporting groups that have no description * `sonar-audit`: - New audit check to avoid using Scanner for .Net 9.2 that has a vulnerability - Fix incorrect warning when running 2025.1 with JRE 21 (this is supported) From 852e2342fbd29ccdd33725754eea61aadba2cf5e Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sun, 13 Apr 2025 19:34:41 +0200 Subject: [PATCH 18/29] Bump version (#1644) --- conf/release.Dockerfile | 2 +- sonar/version.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf/release.Dockerfile b/conf/release.Dockerfile index 2dff1485..1ac33439 100644 --- a/conf/release.Dockerfile +++ b/conf/release.Dockerfile @@ -31,7 +31,7 @@ COPY ./LICENSE . COPY ./sonar/audit sonar/audit RUN pip install --upgrade pip \ -&& pip install sonar-tools==3.10 +&& pip install sonar-tools==3.11 USER ${USERNAME} WORKDIR /home/${USERNAME} diff --git a/sonar/version.py b/sonar/version.py index e3a0c06f..a03382f3 100644 --- a/sonar/version.py +++ b/sonar/version.py @@ -24,5 +24,5 @@ """ -PACKAGE_VERSION = "3.10" -MIGRATION_TOOL_VERSION = "0.5" +PACKAGE_VERSION = "3.11" +MIGRATION_TOOL_VERSION = "0.6-snapshot" From 60db86cb8e8859549acfe32fc3bddf10dea0bbe7 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Tue, 15 Apr 2025 09:10:03 +0200 Subject: [PATCH 19/29] Fix-too-many-issues (#1648) * Fixes #1647 #1636 * Remove useless cli parameters from search filters * Removed test where nbr of issues would be >10K and fail (no more failure because search split by files) --- cli/findings_export.py | 3 ++- sonar/issues.py | 41 +++++++++++++++++++++++++++++++++++----- test/unit/test_issues.py | 11 ----------- 3 files changed, 38 insertions(+), 17 deletions(-) diff --git a/cli/findings_export.py b/cli/findings_export.py index dbc7b56b..c34f84a4 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -331,10 +331,11 @@ def __get_component_findings(queue: Queue[tuple[object, ConfigSettings]], write_ def store_findings(components_list: dict[str, object], params: ConfigSettings) -> None: """Export all findings of a given project list""" components_queue = Queue(maxsize=0) + comp_params = {k: v for k, v in params.items() if k not in ("withUrl", "logfile", "datesWithouTine", "file", "format", "sonar")} for comp in components_list.values(): try: log.debug("Queue %s task %s put", str(components_queue), str(comp)) - components_queue.put((comp, params.copy())) + components_queue.put((comp, comp_params.copy())) except (ConnectionError, RequestException) as e: util.handle_error(e, f"exporting issues of {str(comp)}", catch_all=True) diff --git a/sonar/issues.py b/sonar/issues.py index 09260c96..c6bf4277 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -93,10 +93,10 @@ "impactSeverities", # 10.4 new filter NEW_STATUS, + "files", + "directories", ) -_FILTERS_10_2_REMAPPING = {"severities": "impactSeverities"} - TYPES = ("BUG", "VULNERABILITY", "CODE_SMELL") SEVERITIES = ("BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO") IMPACT_SEVERITIES = ("HIGH", "MEDIUM", "LOW") @@ -592,16 +592,47 @@ def search_by_directory(endpoint: pf.Platform, params: ApiParams) -> dict[str, I new_params = params.copy() if "components" in params: new_params[component_filter(endpoint)] = params["components"] + proj_key = new_params.get("project", new_params.get(component_filter(endpoint), None)) log.info("Splitting search by directories with %s", util.json_dump(new_params)) - facets = _get_facets(endpoint=endpoint, project_key=new_params[component_filter(endpoint)], facets="directories", params=new_params) + facets = _get_facets(endpoint=endpoint, project_key=proj_key, facets="directories", params=new_params) + log.debug("FAcets %s", util.json_dump(facets)) issue_list = {} for d in facets["directories"]: - new_params["directories"] = d["val"] - issue_list.update(search(endpoint=endpoint, params=new_params, raise_error=True)) + try: + new_params["directories"] = d["val"] + issue_list.update(search(endpoint=endpoint, params=new_params, raise_error=True)) + except TooManyIssuesError: + log.info(_TOO_MANY_ISSUES_MSG) + new_params[component_filter(endpoint)] = proj_key + issue_list.update(search_by_file(endpoint=endpoint, params=new_params)) log.debug("Search by directory ALL: %d issues found", len(issue_list)) return issue_list +def search_by_file(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue]: + """Searches issues splitting by directory to avoid exceeding the 10K limit""" + new_params = params.copy() + if "components" in params: + new_params[component_filter(endpoint)] = params["components"] + proj_key = new_params.get("project", new_params.get(component_filter(endpoint), None)) + log.info("Splitting search by files with %s", util.json_dump(new_params)) + facets = _get_facets(endpoint=endpoint, project_key=proj_key, facets="files", params=new_params) + log.debug("Facets %s", util.json_dump(facets)) + issue_list = {} + for d in facets["files"]: + try: + new_params["files"] = d["val"] + issue_list.update(search(endpoint=endpoint, params=new_params, raise_error=True)) + except TooManyIssuesError: + log.error("Too many issues (>10000) in file %s, aborting search issue for this file", f'{proj_key}:{d["val"]}') + continue + except exceptions.SonarException as e: + log.error("Error while searching issues in file %s: %s", f'{proj_key}:{d["val"]}', str(e)) + continue + log.debug("Search by files ALL: %d issues found", len(issue_list)) + return issue_list + + def search_by_type(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue]: """Searches issues splitting by type to avoid exceeding the 10K limit""" issue_list = {} diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 9c53d97b..25190cbc 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -267,14 +267,3 @@ def test_search_by_small() -> None: assert list1 == issues.search_by_date(tutil.SQ, params) assert list1 == issues.search_by_directory(tutil.SQ, params) - -def test_search_by_large() -> None: - """Test search_by on large project (more than 10000 issues)""" - assert len(issues.search_by_project(tutil.SQ, "pytorch")) > 10000 - - params = {"components": "pytorch", "project": "pytorch"} - - # Versions below 10.4 did not have enough python rules to break the 10K limit on the pytorch project - if tutil.SQ.version() >= (10, 4, 0): - with pytest.raises(issues.TooManyIssuesError): - issues.search_by_severity(tutil.SQ, params) From 35e94b923239770aed615427879c0123f15f6220 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Tue, 15 Apr 2025 12:59:28 +0200 Subject: [PATCH 20/29] Cache-platform-settings (#1649) * Remove unwanted log * Formatting * Fixes #1646 * Fix typo causing bug on status * Quality pass * Rename component filter var * Fix bug on issue URL in PRs * Simplify pre_search_filter() * simplify count_by_rule() * Simplify _get_facets() * Add vars for types, severities * Replace severities by old severities * add search field functions * Fix regression --- cli/findings_export.py | 12 ++-- sonar/components.py | 4 +- sonar/issues.py | 142 ++++++++++++++++++++------------------- sonar/platform.py | 17 +++-- test/unit/test_issues.py | 1 - 5 files changed, 88 insertions(+), 88 deletions(-) diff --git a/cli/findings_export.py b/cli/findings_export.py index c34f84a4..4fd32883 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -119,12 +119,12 @@ def parse_args(desc: str) -> Namespace: parser.add_argument( f"--{options.SEVERITIES}", required=False, - help="Comma separated severities among" + util.list_to_csv(issues.SEVERITIES + hotspots.SEVERITIES), + help="Comma separated severities among" + util.list_to_csv(issues.OLD_SEVERITIES + hotspots.SEVERITIES), ) parser.add_argument( f"--{options.TYPES}", required=False, - help="Comma separated types among " + util.list_to_csv(issues.TYPES + hotspots.TYPES), + help="Comma separated types among " + util.list_to_csv(issues.OLD_TYPES + hotspots.TYPES), ) parser.add_argument(f"--{options.TAGS}", help="Comma separated findings tags", required=False) parser.add_argument( @@ -240,11 +240,11 @@ def __verify_inputs(params: types.ApiParams) -> bool: if diff: util.exit_fatal(f"Statuses {str(diff)} are not legit statuses", errcodes.WRONG_SEARCH_CRITERIA) - diff = util.difference(util.csv_to_list(params.get(options.SEVERITIES, None)), issues.SEVERITIES + hotspots.SEVERITIES) + diff = util.difference(util.csv_to_list(params.get(options.SEVERITIES, None)), issues.OLD_SEVERITIES + hotspots.SEVERITIES) if diff: util.exit_fatal(f"Severities {str(diff)} are not legit severities", errcodes.WRONG_SEARCH_CRITERIA) - diff = util.difference(util.csv_to_list(params.get(options.TYPES, None)), issues.TYPES + hotspots.TYPES) + diff = util.difference(util.csv_to_list(params.get(options.TYPES, None)), issues.OLD_TYPES + hotspots.TYPES) if diff: util.exit_fatal(f"Types {str(diff)} are not legit types", errcodes.WRONG_SEARCH_CRITERIA) if len(params[options.CSV_SEPARATOR]) > 1: @@ -265,10 +265,10 @@ def __get_component_findings(queue: Queue[tuple[object, ConfigSettings]], write_ i_resols = util.intersection(resol_list, issues.RESOLUTIONS) h_resols = util.intersection(resol_list, hotspots.RESOLUTIONS) type_list = util.csv_to_list(params.get(options.TYPES, None)) - i_types = util.intersection(type_list, issues.TYPES) + i_types = util.intersection(type_list, issues.OLD_TYPES) h_types = util.intersection(type_list, hotspots.TYPES) sev_list = util.csv_to_list(params.get(options.SEVERITIES, None)) - i_sevs = util.intersection(sev_list, issues.SEVERITIES) + i_sevs = util.intersection(sev_list, issues.OLD_SEVERITIES) h_sevs = util.intersection(sev_list, hotspots.SEVERITIES) if status_list or resol_list or type_list or sev_list or options.LANGUAGES in params: diff --git a/sonar/components.py b/sonar/components.py index a79cbcea..493abfa6 100644 --- a/sonar/components.py +++ b/sonar/components.py @@ -356,11 +356,11 @@ def get_measures_history(self, metrics_list: types.KeyList) -> dict[str, str]: return measures.get_history(self, metrics_list) def api_params(self, op: str = c.LIST) -> types.ApiParams: - from sonar.issues import component_filter + from sonar.issues import component_search_field ops = { c.GET: {"component": self.key}, - c.LIST: {component_filter(self.endpoint): self.key}, + c.LIST: {component_search_field(self.endpoint): self.key}, } return ops[op] if op in ops else ops[c.LIST] diff --git a/sonar/issues.py b/sonar/issues.py index c6bf4277..5098aeee 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -42,20 +42,30 @@ from sonar import users, findings, changelog, projects, rules, config, exceptions import sonar.utilities as util -COMPONENT_FILTER_OLD = "componentKeys" -COMPONENT_FILTER = "components" +_OLD_SEARCH_COMPONENT_FIELD = "componentKeys" +_NEW_SEARCH_COMPONENT_FIELD = "components" -OLD_STATUS = "resolutions" -NEW_STATUS = "issueStatuses" +_OLD_SEARCH_STATUS_FIELD = "resolutions" +_NEW_SEARCH_STATUS_FIELD = "issueStatuses" + +_OLD_SEARCH_TYPE_FIELD = "types" +_NEW_SEARCH_TYPE = "impactSoftwareQualities" + +_OLD_SEARCH_SEVERITY_FIELD = "severities" +_NEW_SEARCH_SEVERITY_FIELD = "impactSeverities" OLD_FP = "FALSE-POSITIVE" NEW_FP = "FALSE_POSITIVE" _SEARCH_CRITERIAS = ( - COMPONENT_FILTER_OLD, - COMPONENT_FILTER, - "types", - "severities", + _OLD_SEARCH_COMPONENT_FIELD, + _NEW_SEARCH_COMPONENT_FIELD, + _OLD_SEARCH_TYPE_FIELD, + _NEW_SEARCH_TYPE, + _OLD_SEARCH_SEVERITY_FIELD, + _NEW_SEARCH_SEVERITY_FIELD, + _OLD_SEARCH_STATUS_FIELD, + _NEW_SEARCH_STATUS_FIELD, "createdAfter", "createdBefore", "createdInLast", @@ -85,32 +95,19 @@ "author", "issues", "languages", - OLD_STATUS, "resolved", "rules", "scopes", - # 10.2 new filter - "impactSeverities", - # 10.4 new filter - NEW_STATUS, "files", "directories", ) -TYPES = ("BUG", "VULNERABILITY", "CODE_SMELL") -SEVERITIES = ("BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO") -IMPACT_SEVERITIES = ("HIGH", "MEDIUM", "LOW") -IMPACT_SOFTWARE_QUALITIES = ("SECURITY", "RELIABILITY", "MAINTAINABILITY") +OLD_TYPES = ("BUG", "VULNERABILITY", "CODE_SMELL") +NEW_TYPES = ("RELIABILITY", "SECURITY", "MAINTAINABILITY") +OLD_SEVERITIES = ("BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO") +NEW_SEVERITIES = ("BLOCKER", "HIGH", "MEDIUM", "LOW", "INFO") STATUSES = ("OPEN", "CONFIRMED", "REOPENED", "RESOLVED", "CLOSED", "ACCEPTED", "FALSE_POSITIVE") RESOLUTIONS = ("FALSE-POSITIVE", "WONTFIX", "FIXED", "REMOVED", "ACCEPTED") -FILTERS_MAP = { - "types": TYPES, - "severities": SEVERITIES, - "impactSoftwareQualities": IMPACT_SOFTWARE_QUALITIES, - "impactSeverities": IMPACT_SEVERITIES, - "statuses": STATUSES, - OLD_STATUS: RESOLUTIONS, -} _TOO_MANY_ISSUES_MSG = "Too many issues, recursing..." @@ -166,7 +163,7 @@ def url(self) -> str: if self.branch is not None: branch = f"&branch={requests.utils.quote(self.branch)}" elif self.pull_request is not None: - branch = f"pullRequest={requests.utils.quote(self.pull_request)}&" + branch = f"&pullRequest={requests.utils.quote(self.pull_request)}" return f"{self.endpoint.url}/project/issues?id={self.projectKey}{branch}&issues={self.key}" def debt(self) -> int: @@ -579,20 +576,29 @@ def apply_changelog(self, source_issue: Issue, settings: ConfigSettings) -> bool # ------------------------------- Static methods -------------------------------------- -def component_filter(endpoint: pf.Platform) -> str: +def component_search_field(endpoint: pf.Platform) -> str: """Returns the fields used for issues/search filter by porject key""" - if endpoint.version() >= (10, 2, 0): - return COMPONENT_FILTER - else: - return COMPONENT_FILTER_OLD + return _NEW_SEARCH_COMPONENT_FIELD if endpoint.version() >= (10, 2, 0) else _OLD_SEARCH_COMPONENT_FIELD + + +def type_search_field(endpoint: pf.Platform) -> str: + return _OLD_SEARCH_TYPE_FIELD if endpoint.is_mqr_mode() else _NEW_SEARCH_TYPE + + +def severity_search_field(endpoint: pf.Platform) -> str: + return _OLD_SEARCH_SEVERITY_FIELD if endpoint.is_mqr_mode() else _NEW_SEARCH_SEVERITY_FIELD + + +def status_search_field(endpoint: pf.Platform) -> str: + return _OLD_SEARCH_STATUS_FIELD if endpoint.is_mqr_mode() else _NEW_SEARCH_STATUS_FIELD def search_by_directory(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue]: """Searches issues splitting by directory to avoid exceeding the 10K limit""" new_params = params.copy() if "components" in params: - new_params[component_filter(endpoint)] = params["components"] - proj_key = new_params.get("project", new_params.get(component_filter(endpoint), None)) + new_params[component_search_field(endpoint)] = params["components"] + proj_key = new_params.get("project", new_params.get(component_search_field(endpoint), None)) log.info("Splitting search by directories with %s", util.json_dump(new_params)) facets = _get_facets(endpoint=endpoint, project_key=proj_key, facets="directories", params=new_params) log.debug("FAcets %s", util.json_dump(facets)) @@ -603,7 +609,7 @@ def search_by_directory(endpoint: pf.Platform, params: ApiParams) -> dict[str, I issue_list.update(search(endpoint=endpoint, params=new_params, raise_error=True)) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) - new_params[component_filter(endpoint)] = proj_key + new_params[component_search_field(endpoint)] = proj_key issue_list.update(search_by_file(endpoint=endpoint, params=new_params)) log.debug("Search by directory ALL: %d issues found", len(issue_list)) return issue_list @@ -613,8 +619,8 @@ def search_by_file(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue] """Searches issues splitting by directory to avoid exceeding the 10K limit""" new_params = params.copy() if "components" in params: - new_params[component_filter(endpoint)] = params["components"] - proj_key = new_params.get("project", new_params.get(component_filter(endpoint), None)) + new_params[component_search_field(endpoint)] = params["components"] + proj_key = new_params.get("project", new_params.get(component_search_field(endpoint), None)) log.info("Splitting search by files with %s", util.json_dump(new_params)) facets = _get_facets(endpoint=endpoint, project_key=proj_key, facets="files", params=new_params) log.debug("Facets %s", util.json_dump(facets)) @@ -638,9 +644,10 @@ def search_by_type(endpoint: pf.Platform, params: ApiParams) -> dict[str, Issue] issue_list = {} new_params = params.copy() log.info("Splitting search by issue types") - for issue_type in ("BUG", "VULNERABILITY", "CODE_SMELL"): + types = NEW_TYPES if endpoint.is_mqr_mode() else OLD_TYPES + for issue_type in types: try: - new_params["types"] = [issue_type] + new_params[type_search_field(endpoint)] = [issue_type] issue_list.update(search(endpoint=endpoint, params=new_params)) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) @@ -654,9 +661,10 @@ def search_by_severity(endpoint: pf.Platform, params: ApiParams) -> dict[str, Is issue_list = {} new_params = params.copy() log.info("Splitting search by severities") - for sev in ("BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO"): + severities = NEW_SEVERITIES if endpoint.is_mqr_mode() else OLD_SEVERITIES + for sev in severities: try: - new_params["severities"] = [sev] + new_params[severity_search_field(endpoint)] = [sev] issue_list.update(search(endpoint=endpoint, params=new_params)) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) @@ -771,7 +779,7 @@ def search_all(endpoint: pf.Platform, params: ApiParams = None) -> dict[str, Iss issue_list = search(endpoint=endpoint, params=new_params.copy()) except TooManyIssuesError: log.info(_TOO_MANY_ISSUES_MSG) - comp_filter = component_filter(endpoint) + comp_filter = component_search_field(endpoint) if params and "project" in params: key_list = util.csv_to_list(params["project"]) elif params and comp_filter in params: @@ -866,15 +874,10 @@ def _get_facets(endpoint: pf.Platform, project_key: str, facets: str = "director """Returns the facets of a search""" if not params: params = {} - params.update({component_filter(endpoint): project_key, "facets": facets, "ps": Issue.MAX_PAGE_SIZE, "additionalFields": "comments"}) + params.update({component_search_field(endpoint): project_key, "facets": facets, "ps": Issue.MAX_PAGE_SIZE, "additionalFields": "comments"}) filters = pre_search_filters(endpoint=endpoint, params=params) data = json.loads(endpoint.get(Issue.API[c.SEARCH], params=filters).text) - l = {} - facets_list = util.csv_to_list(facets) - for f in data["facets"]: - if f["property"] in facets_list: - l[f["property"]] = f["values"] - return l + return {f["property"]: f["values"] for f in data["facets"] if f["property"] in util.csv_to_list(facets)} def __get_one_issue_date(endpoint: pf.Platform, asc_sort: str = "false", params: ApiParams = None) -> Optional[datetime]: @@ -918,12 +921,9 @@ def count_by_rule(endpoint: pf.Platform, **kwargs) -> dict[str, int]: params["rules"] = ",".join(ruleset[i * SLICE_SIZE : min((i + 1) * SLICE_SIZE - 1, len(ruleset))]) try: data = json.loads(endpoint.get(Issue.API[c.SEARCH], params=params).text)["facets"][0]["values"] - for d in data: - if d["val"] not in ruleset: - continue - if d["val"] not in rulecount: - rulecount[d["val"]] = 0 - rulecount[d["val"]] += d["count"] + added_count = {d["val"]: d["count"] for d in data if d["val"] in ruleset} + for k, v in added_count.items(): + rulecount[k] = rulecount.get(k, 0) + v except Exception as e: log.error("%s while counting issues per rule, count may be incomplete", util.error_msg(e)) return rulecount @@ -942,26 +942,28 @@ def pre_search_filters(endpoint: pf.Platform, params: ApiParams) -> ApiParams: if not params: return {} log.debug("Sanitizing issue search filters %s", str(params)) - version = endpoint.version() - comp_filter = component_filter(endpoint) - filters = util.dict_remap(original_dict=params.copy(), remapping={"project": comp_filter, "application": comp_filter, "portfolio": comp_filter}) + comp_filter = component_search_field(endpoint) + filters = util.dict_remap(original_dict=params, remapping={"project": comp_filter, "application": comp_filter, "portfolio": comp_filter}) filters = util.dict_subset(util.remove_nones(filters), _SEARCH_CRITERIAS) - types = filters.pop("types", []) + filters.pop("impactSoftwareQualities", []) - severities = filters.pop("severities", []) + filters.pop("impactSeverities", []) - statuses = filters.pop("statuses", []) + filters.pop("NEW_STATUS", []) + filters.pop(OLD_STATUS, []) if endpoint.is_mqr_mode(): - log.debug("MAP Type = %s", str(config.get_issues_map("impactSoftwareQualities"))) - filters["impactSoftwareQualities"] = util.list_remap(types, config.get_issues_map("types")) - filters["impactSeverities"] = util.list_remap(severities, config.get_issues_map("severities")) - filters[NEW_STATUS] = util.list_remap(statuses, mapping=config.get_issues_map(OLD_STATUS)) + mapping = { + _NEW_SEARCH_TYPE: _OLD_SEARCH_TYPE_FIELD, + _NEW_SEARCH_SEVERITY_FIELD: _OLD_SEARCH_SEVERITY_FIELD, + _NEW_SEARCH_STATUS_FIELD: _OLD_SEARCH_STATUS_FIELD, + } else: - filters["types"] = util.list_remap(types, config.get_issues_map("impactSoftwareQualities")) - filters["severities"] = util.list_remap(severities, config.get_issues_map("impactSeverities")) - filters[OLD_STATUS] = util.list_remap(statuses, mapping=config.get_issues_map(NEW_STATUS)) - - if version < (10, 2, 0): + mapping = { + _OLD_SEARCH_TYPE_FIELD: _NEW_SEARCH_TYPE, + _OLD_SEARCH_SEVERITY_FIELD: _NEW_SEARCH_SEVERITY_FIELD, + _OLD_SEARCH_STATUS_FIELD: _NEW_SEARCH_STATUS_FIELD, + } + for new, old in mapping.items(): + crit = filters.pop(old, []) + filters.pop(new, []) + filters[new] = util.list_remap(crit, config.get_issues_map(old)) + + if endpoint.version() < (10, 2, 0): # Starting from 10.2 - "componentKeys" was renamed "components" - filters = util.dict_remap(original_dict=filters, remapping={COMPONENT_FILTER: COMPONENT_FILTER_OLD}) + filters = util.dict_remap(original_dict=filters, remapping={_NEW_SEARCH_COMPONENT_FIELD: _OLD_SEARCH_COMPONENT_FIELD}) filters = {k: v for k, v in filters.items() if v is not None and (not isinstance(v, (list, set, str, tuple)) or len(v) > 0)} for field in filters: diff --git a/sonar/platform.py b/sonar/platform.py index 1d73450a..bd9edbb7 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -358,19 +358,18 @@ def plugins(self) -> dict[str, str]: return sysinfo["Statistics"]["plugins"] return sysinfo["Plugins"] - def get_settings(self, settings_list: list[str] = None) -> dict[str, any]: + def get_settings(self, settings_list: Optional[list[str]] = None) -> dict[str, any]: """Returns a list of (or all) platform global settings value from their key :return: the list of settings values :rtype: dict{: , ...} """ - params = util.remove_nones({"keys": util.list_to_csv(settings_list)}) - resp = self.get(settings.Setting.API[c.GET], params=params) - json_s = json.loads(resp.text) + if settings_list is None: + settings_dict = settings.get_bulk(endpoint=self) + else: + settings_dict = {k: settings.get_object(endpoint=self, key=k) for k in settings_list} platform_settings = {} - for s in json_s["settings"]: - for setting_key in "value", "values", "fieldValues": - if setting_key in s: - platform_settings[s["key"]] = s[setting_key] + for v in settings_dict.values(): + platform_settings |= v.to_json() return platform_settings def __settings(self, settings_list: types.KeyList = None, include_not_set: bool = False) -> dict[str, settings.Setting]: @@ -387,7 +386,7 @@ def get_setting(self, key: str) -> any: :param key: Setting key :return: the setting value """ - return self.get_settings(key).get(key, None) + return settings.get_object(endpoint=self, key=key).to_json().get(key, None) def reset_setting(self, key: str) -> bool: """Resets a platform global setting to the SonarQube internal default value diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 25190cbc..155d2503 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -266,4 +266,3 @@ def test_search_by_small() -> None: assert list1 == issues.search_by_severity(tutil.SQ, params) assert list1 == issues.search_by_date(tutil.SQ, params) assert list1 == issues.search_by_directory(tutil.SQ, params) - From 292a7115ce1d5ed3a3a9f5a2b7ff6bceaba4050d Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 16 Apr 2025 14:18:12 +0200 Subject: [PATCH 21/29] Multithread-sync (#1651) * Fix #1629 * Fixes #1629 * Fixes #1650 * Add sync logs * Add name for finding sync threads * Update tests path * File retrieval is different for issues and hotspots * For some reason some identical issues have different hash. Ajusting match to that * Fix min score for approx match * Confusing src finding when approx match, should be tgt finding * Better logs of final summary (issues+hotspots) * Add file() method * Quality pass * Make nbr of threads configurable --- cli/findings_sync.py | 12 +++---- cli/options.py | 6 ++++ sonar-project.properties | 2 +- sonar/changelog.py | 2 +- sonar/findings.py | 52 +++++++++++------------------- sonar/hotspots.py | 25 ++++++++++++--- sonar/issues.py | 20 ++++++++++++ sonar/syncer.py | 69 +++++++++++++++++++++++++--------------- 8 files changed, 116 insertions(+), 72 deletions(-) diff --git a/cli/findings_sync.py b/cli/findings_sync.py index 801ecf5f..6e6c37c2 100755 --- a/cli/findings_sync.py +++ b/cli/findings_sync.py @@ -198,12 +198,12 @@ def main() -> None: (report, counters) = src_project.sync(tgt_project, sync_settings=settings) __dump_report(report, args.file) - log.info("%d issues needed to be synchronized", counters.get("nb_to_sync", 0)) - log.info("%d issues were synchronized successfully", counters.get("nb_applies", 0)) - log.info("%d issues could not be synchronized because no match was found in target", counters.get("nb_no_match", 0)) - log.info("%d issues could not be synchronized because there were multiple matches", counters.get("nb_multiple_matches", 0)) - log.info("%d issues could not be synchronized because the match was approximate", counters.get("nb_approx_match", 0)) - log.info("%d issues could not be synchronized because target issue already had a changelog", counters.get("nb_tgt_has_changelog", 0)) + log.info("%d issues+hotspots needed to be synchronized", counters.get("nb_to_sync", 0)) + log.info("%d issues+hotspots were synchronized successfully", counters.get("nb_applies", 0)) + log.info("%d issues+hotspots could not be synchronized because no match was found in target", counters.get("nb_no_match", 0)) + log.info("%d issues+hotspots could not be synchronized because there were multiple matches", counters.get("nb_multiple_matches", 0)) + log.info("%d issues+hotspots could not be synchronized because the match was approximate", counters.get("nb_approx_match", 0)) + log.info("%d issues+hotspots could not be synchronized because target issue already had a changelog", counters.get("nb_tgt_has_changelog", 0)) except (exceptions.SonarException, options.ArgumentsError) as e: util.exit_fatal(e.message, e.errcode) diff --git a/cli/options.py b/cli/options.py index 8bd379f7..67b4ab80 100644 --- a/cli/options.py +++ b/cli/options.py @@ -212,6 +212,12 @@ def parse_and_check(parser: ArgumentParser, logger_name: str = None, verify_toke kwargs.pop(SKIP_VERSION_CHECK, None) if utilities.is_sonarcloud_url(kwargs[URL]) and kwargs[ORG] is None: raise ArgumentsError(f"Organization (-{ORG_SHORT}) option is mandatory for SonarCloud") + if URL_TARGET in kwargs and kwargs[URL_TARGET] is None: + kwargs[URL_TARGET] = kwargs[URL] + if TOKEN_TARGET in kwargs and kwargs[TOKEN_TARGET] is None: + kwargs[TOKEN_TARGET] = kwargs[TOKEN] + if ORG_TARGET in kwargs and kwargs[ORG_TARGET] is None: + kwargs[ORG_TARGET] = kwargs[ORG] if URL_TARGET in kwargs and utilities.is_sonarcloud_url(kwargs[URL_TARGET]) and kwargs[ORG_TARGET] is None: raise ArgumentsError(f"Organization (-{ORG_TARGET_SHORT}) option is mandatory for SonarCloud") if verify_token: diff --git a/sonar-project.properties b/sonar-project.properties index 25c6adb2..92fac825 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -15,7 +15,7 @@ sonar.sarifReportPaths=build/results_sarif.sarif # sonar.externalIssuesReportPaths=build/shellcheck.json,build/trivy.json # sonar.python.bandit.reportPaths=build/bandit-report.json -sonar.tests=test/latest, test/lts, test/9, test/9-ce +sonar.tests=test/latest, test/lts, test/9, test/9-ce, test/cb sonar.coverage.exclusions=setup*.py, test/**/*, conf/*2sonar.py, cli/cust_measures.py, sonar/custom_measures.py, cli/support.py, cli/projects_export.py, cli/projects_import.py sonar.cpd.exclusions=setup*.py diff --git a/sonar/changelog.py b/sonar/changelog.py index 18f2c7ba..13d4b6c8 100644 --- a/sonar/changelog.py +++ b/sonar/changelog.py @@ -185,7 +185,7 @@ def is_assignment(self) -> bool: def is_unassign(self) -> bool: """Returns whether the changelog item is an unassign""" - return any(d.get("key", "") == "assignee" and "newValue" not in d for d in self.sq_json["diffs"]) + return any(d.get("key", "") == "assignee" and "newValue" not in d for d in self.sq_json["diffs"]) and len(self.sq_json["diffs"]) == 1 def assignee(self, new: bool = True) -> Optional[str]: """Returns the new assignee of a change assignment changelog""" diff --git a/sonar/findings.py b/sonar/findings.py index ce511e2d..11db11b2 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -172,7 +172,7 @@ def _load_common(self, jsondata: types.ApiPayload) -> None: if "vulnerabilityProbability" in jsondata: self.impacts = {QUALITY_SECURITY: jsondata["vulnerabilityProbability"] + "(HOTSPOT)"} elif self.endpoint.version() >= (10, 2, 0): - self.impacts = {i["softwareQuality"]: i["severity"] for i in jsondata["impacts"]} + self.impacts = {i["softwareQuality"]: i["severity"] for i in jsondata.get("impacts", {})} else: self.impacts = {TYPE_QUALITY_MAPPING[jsondata.get("type", TYPE_NONE)]: SEVERITY_MAPPING[jsondata.get("severity", SEVERITY_NONE)]} self.type = jsondata.get("type", TYPE_NONE) @@ -193,7 +193,10 @@ def _load_common(self, jsondata: types.ApiPayload) -> None: def _load_from_search(self, jsondata: types.ApiPayload) -> None: self._load_common(jsondata) - self.projectKey = jsondata["project"] + if isinstance(jsondata["project"], str): + self.projectKey = jsondata["project"] + else: + self.projectKey = jsondata["project"]["key"] self.creation_date = util.string_to_date(jsondata["creationDate"]) self.modification_date = util.string_to_date(jsondata["updateDate"]) self.hash = jsondata.get("hash", None) @@ -216,27 +219,9 @@ def url(self) -> str: # Must be implemented in sub classes raise NotImplementedError() - def file(self) -> Union[str, None]: - """ - :return: The finding full file path, relative to the rpoject root directory - :rtype: str or None if not found - """ - if "component" in self.sq_json: - comp = self.sq_json["component"] - # Hack: Fix to adapt to the ugly component structure on branches and PR - # "component": "src:sonar/hot.py:BRANCH:somebranch" - m = re.search("(^.*):BRANCH:", comp) - if m: - comp = m.group(1) - m = re.search("(^.*):PULL_REQUEST:", comp) - if m: - comp = m.group(1) - return comp.split(":")[-1] - elif "path" in self.sq_json: - return self.sq_json["path"] - else: - log.warning("Can't find file name for %s", str(self)) - return None + def file(self) -> str: + # Must be implemented in sub classes + raise NotImplementedError() def language(self) -> str: """Returns the finding language""" @@ -391,19 +376,15 @@ def strictly_identical_to(self, another_finding: Finding, ignore_component: bool :meta private: """ if self.key == another_finding.key: + log.debug("%s and %s are the same issue, they have the same key %s", str(self), str(another_finding), self.key) return True prelim_check = True if self.rule in ("python:S6540"): try: - col1 = self.sq_json["textRange"]["startOffset"] - col2 = another_finding.sq_json["textRange"]["startOffset"] - prelim_check = col1 == col2 + prelim_check = self.sq_json["textRange"]["startOffset"] == another_finding.sq_json["textRange"]["startOffset"] except KeyError: pass - if self.key == "444f6f46-9571-42e1-8ee4-d1171d8b497e": - log.info("Source: %s / %s / %s / %s ", self.rule, self.hash, self.file(), self.message) - log.info("Target: %s / %s / %s / %s ", another_finding.rule, another_finding.hash, another_finding.file(), another_finding.message) - return ( + identical = ( self.rule == another_finding.rule and self.hash == another_finding.hash and self.message == another_finding.message @@ -411,12 +392,14 @@ def strictly_identical_to(self, another_finding: Finding, ignore_component: bool and (self.component == another_finding.component or ignore_component) and prelim_check ) + log.debug("%s vs %s - identical = %s hash = %s/%s", str(self), str(another_finding), str(identical), self.hash, another_finding.hash) + return identical def almost_identical_to(self, another_finding: Finding, ignore_component: bool = False, **kwargs) -> bool: """ :meta private: """ - if self.rule != another_finding.rule or self.hash != another_finding.hash: + if self.rule != another_finding.rule: return False score = 0 match_msg = " Match" @@ -447,7 +430,9 @@ def almost_identical_to(self, another_finding: Finding, ignore_component: bool = log.debug("%s vs %s - %s score = %d", str(self), str(another_finding), match_msg, score) # Need at least 7 / 8 to consider it's a match - return score >= 7 + # for some reason, rarely the hash may not be the same for 2 issues that are identical + # In this case we match if the rest of the score is perfectly identical + return score == 8 or score >= 7 and self.hash == another_finding.hash def search_siblings( self, findings_list: list[Finding], allowed_users: bool = None, ignore_component: bool = False, **kwargs @@ -461,6 +446,7 @@ def search_siblings( log.info("Searching for an exact match of %s", str(self)) for finding in findings_list: if self is finding: + log.debug("%s and %s are the same issue", str(self), str(finding)) continue if finding.strictly_identical_to(self, ignore_component, **kwargs): if finding.can_be_synced(allowed_users): @@ -470,8 +456,6 @@ def search_siblings( log.info("%s and %s are exact match but target already has changes, cannot be synced", str(self), str(finding)) match_but_modified.append(finding) return exact_matches, approx_matches, match_but_modified - # else: - # log.debug("%s and %s are not identical", str(self), str(finding)) log.info("No exact match, searching for an approximate match of %s", str(self)) for finding in findings_list: diff --git a/sonar/hotspots.py b/sonar/hotspots.py index c87334ed..877320ca 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -23,6 +23,7 @@ import json import re +from typing import Optional from http import HTTPStatus from requests import RequestException import requests.utils @@ -147,15 +148,31 @@ def to_json(self, without_time: bool = False) -> types.ObjectJsonRepr: data.pop("type", None) return data + def file(self) -> Optional[str]: + """ + :return: The hotspot full file path, relative to the project root directory, or None if not found + """ + try: + f = self.sq_json["component"]["path"] + except KeyError: + f = None + if not f: + log.warning("Can't find file name for %s", str(self)) + return f + def refresh(self) -> bool: """Refreshes and reads hotspots details in SonarQube :return: The hotspot details :rtype: Whether ther operation succeeded """ - resp = self.get(Hotspot.API[c.GET], {"hotspot": self.key}) - if resp.ok: - self.__details = json.loads(resp.text) - return resp.ok + try: + resp = self.get(Hotspot.API[c.GET], {"hotspot": self.key}) + if resp.ok: + self.__details = json.loads(resp.text) + self._load(self.__details) + return resp.ok + except (ConnectionError, RequestException): + return False def __mark_as(self, resolution: str, comment: str = None) -> bool: params = {"hotspot": self.key, "status": "REVIEWED", "resolution": resolution} diff --git a/sonar/issues.py b/sonar/issues.py index 5098aeee..f0e93ed5 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -166,6 +166,26 @@ def url(self) -> str: branch = f"&pullRequest={requests.utils.quote(self.pull_request)}" return f"{self.endpoint.url}/project/issues?id={self.projectKey}{branch}&issues={self.key}" + def file(self) -> Optional[str]: + """ + :return: The issue full file path, relative to the project root directory, or None if not found + """ + if "component" in self.sq_json: + comp = self.sq_json["component"] + # Hack: Fix to adapt to the ugly component structure on branches and PR + # "component": "src:sonar/hot.py:BRANCH:somebranch" + for prefix in ("BRANCH", "PULL_REQUEST"): + m = re.search(rf"(^.*):{prefix}:", comp) + if m: + comp = m.group(1) + break + return comp.split(":")[-1] + elif "path" in self.sq_json: + return self.sq_json["path"] + else: + log.warning("Can't find file name for %s", str(self)) + return None + def debt(self) -> int: """ :return: The remediation effort of the issue, in minutes diff --git a/sonar/syncer.py b/sonar/syncer.py index 1da3d039..17c5adf7 100644 --- a/sonar/syncer.py +++ b/sonar/syncer.py @@ -20,6 +20,8 @@ """Findings syncer""" +import concurrent.futures + import sonar.logging as log import sonar.utilities as util from sonar.util import types @@ -43,10 +45,16 @@ SYNC_SINCE_DATE = "syncSinceDate" SYNC_THREADS = "threads" +EXACT_MATCH = "nb_applies" +MULTIPLE_MATCHES = "nb_multiple_matches" +APPROX_MATCH = "nb_approx_match" +MODIFIED_MATCH = "nb_tgt_has_changelog" +NO_MATCH = "nb_no_match" + def __get_findings(findings_list: list[findings.Finding]) -> list[dict[str, str]]: """Returns a list of finding keys and their URLS""" - return [{SRC_KEY: f.key, SRC_URL: f.url()} for f in findings_list] + return [{TGT_KEY: f.key, TGT_URL: f.url()} for f in findings_list] def __process_exact_sibling(finding: findings.Finding, sibling: findings.Finding, settings: types.ConfigSettings) -> dict[str, str]: @@ -126,39 +134,48 @@ def __process_modified_siblings(finding: findings.Finding, siblings: list[findin } +def __sync_one_finding( + src_finding: findings.Finding, tgt_findings: list[findings.Finding], settings: types.ConfigSettings +) -> tuple[int, dict[str, str]]: + """Syncs one finding""" + (exact_siblings, approx_siblings, modified_siblings) = src_finding.search_siblings( + tgt_findings, + allowed_users=settings[SYNC_SERVICE_ACCOUNTS], + ignore_component=settings[SYNC_IGNORE_COMPONENTS], + ) + if len(exact_siblings) == 1: + return EXACT_MATCH, __process_exact_sibling(src_finding, exact_siblings[0], settings) + elif len(exact_siblings) > 1: + return MULTIPLE_MATCHES, __process_multiple_exact_siblings(src_finding, exact_siblings) + elif approx_siblings: + return APPROX_MATCH, __process_approx_siblings(src_finding, approx_siblings) + elif modified_siblings: + return MODIFIED_MATCH, __process_modified_siblings(src_finding, modified_siblings) + + return NO_MATCH, __process_no_match(src_finding) + + def __sync_curated_list( src_findings: list[findings.Finding], tgt_findings: list[findings.Finding], settings: types.ConfigSettings ) -> tuple[list[dict[str, str]], dict[str, int]]: """Syncs 2 list of findings""" - counters = {k: 0 for k in ("nb_applies", "nb_approx_match", "nb_tgt_has_changelog", "nb_multiple_matches")} + counters = {k: 0 for k in (EXACT_MATCH, APPROX_MATCH, MODIFIED_MATCH, MULTIPLE_MATCHES, NO_MATCH)} counters["nb_to_sync"] = len(src_findings) name = "finding" if len(src_findings) == 0 else util.class_name(src_findings[0]).lower() report = [] log.info("%d %ss to sync, %d %ss in target", len(src_findings), name, len(tgt_findings), name) - for finding in src_findings: - log.debug("Searching sibling for %s", str(finding)) - (exact_siblings, approx_siblings, modified_siblings) = finding.search_siblings( - tgt_findings, - allowed_users=settings[SYNC_SERVICE_ACCOUNTS], - ignore_component=settings[SYNC_IGNORE_COMPONENTS], - ) - if len(exact_siblings) == 1: - report.append(__process_exact_sibling(finding, exact_siblings[0], settings)) - counters["nb_applies"] += 1 - elif len(exact_siblings) > 1: - report.append(__process_multiple_exact_siblings(finding, exact_siblings)) - counters["nb_multiple_matches"] += 1 - elif approx_siblings: - report.append(__process_approx_siblings(finding, approx_siblings)) - counters["nb_approx_match"] += 1 - elif modified_siblings: - counters["nb_tgt_has_changelog"] += 1 - report.append(__process_modified_siblings(finding, modified_siblings)) - else: # No match - report.append(__process_no_match(finding)) - counters["nb_no_match"] = counters["nb_to_sync"] - ( - counters["nb_applies"] + counters["nb_tgt_has_changelog"] + counters["nb_multiple_matches"] + counters["nb_approx_match"] - ) + + with concurrent.futures.ThreadPoolExecutor(max_workers=settings.get(SYNC_THREADS, 8), thread_name_prefix="FindingSync") as executor: + futures = [executor.submit(__sync_one_finding, finding, tgt_findings, settings) for finding in src_findings] + for future in concurrent.futures.as_completed(futures): + try: + match_type, result = future.result() # Retrieve result or raise an exception + log.info("Result: %s", str(result)) + report.append(result) + counters[match_type] += 1 + except Exception as e: + log.error(f"Task raised an exception: {e}") + return (report, counters) From 0f3e2e6e3fc3ceb4fe2d3010c13f5b4e23b5588a Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 16 Apr 2025 20:47:28 +0200 Subject: [PATCH 22/29] Support unassign of issues and hotspots (#1653) * Control nbr of sync threads * Update doc * Fixes #1638 * Fix assign() signature * Fix unassign() arguments --- doc/what-is-new.md | 7 +++++++ sonar/findings.py | 11 +++++++++++ sonar/hotspots.py | 31 +++++++++++++++++++++---------- sonar/issues.py | 13 ++++++------- sonar/syncer.py | 4 +++- 5 files changed, 48 insertions(+), 18 deletions(-) diff --git a/doc/what-is-new.md b/doc/what-is-new.md index 868b04f1..6316d190 100644 --- a/doc/what-is-new.md +++ b/doc/what-is-new.md @@ -1,3 +1,10 @@ +# Version 3.11 + +* `sonar-findings-sync` + - Fixed major sync regression + - Added sync multithreading to significantly accelerate sync of large projects with many findings to sync + - Covered support for several additional corner cases to increase number of issues that can be matched, and sync them + # Version 3.10 * `sonar-findings-sync` hardening diff --git a/sonar/findings.py b/sonar/findings.py index 11db11b2..6ad8ef5c 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -223,6 +223,10 @@ def file(self) -> str: # Must be implemented in sub classes raise NotImplementedError() + def assign(self, assignee: Optional[str] = None) -> str: + # Must be implemented in sub classes + raise NotImplementedError() + def language(self) -> str: """Returns the finding language""" return rules.get_object(endpoint=self.endpoint, key=self.rule).language @@ -327,6 +331,13 @@ def comments(self) -> dict[str, str]: # Implemented in subclasses, should not reach this raise NotImplementedError() + def unassign(self) -> bool: + """Unassigns an issue + + :return: Whether the operation succeeded + """ + return self.assign(None) + def has_changelog(self, added_after: Optional[datetime.datetime] = None, manual_only: bool = True) -> bool: """ :param manual_only: Whether to check only manual changes diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 877320ca..698c4911 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -234,20 +234,30 @@ def add_comment(self, comment: str) -> bool: params = {"hotspot": self.key, "comment": comment} return self.post("hotspots/add_comment", params=params).ok - def assign(self, assignee: str, comment: str = None) -> bool: + def assign(self, assignee: Optional[str] = None) -> bool: """Assigns a hotspot (and optionally comment) - :param assignee: User login to assign the hotspot - :type assignee: str - :param comment: Comment to add, in markdown format, defaults to None - :type comment: str, optional + :param str assignee: User login to assign the hotspot + :return: Whether the operation succeeded + """ + try: + params = util.remove_nones({"hotspot": self.key, "assignee": assignee}) + log.debug("Assigning %s to '%s'", str(self), str(assignee)) + r = self.post("hotspots/assign", params) + if r.ok: + self.assignee = assignee + except (ConnectionError, requests.RequestException) as e: + util.handle_error(e, "assigning hotspot", catch_all=True) + return False + return r.ok + + def unassign(self) -> bool: + """Unassigns a hotspot (and optionally comment) + :return: Whether the operation succeeded :rtype: bool """ - params = {"hotspot": self.key, "assignee": assignee} - if comment is not None: - params["comment"] = comment - return self.post("hotspots/assign", params=params) + return self.assign(assignee=None) def __apply_event(self, event: object, settings: types.ConfigSettings) -> bool: """Applies a changelog event (transition, comment, assign) to the hotspot""" @@ -273,7 +283,8 @@ def __apply_event(self, event: object, settings: types.ConfigSettings) -> bool: u = settings[syncer.SYNC_SERVICE_ACCOUNTS][0] self.assign(u) # self.add_comment(f"Hotspot assigned assigned {origin}", settings[SYNC_ADD_COMMENTS]) - + elif event_type == "UNASSIGN": + self.unassign() elif event_type == "INTERNAL": log.info("Changelog %s is internal, it will not be applied...", str(event)) # self.add_comment(f"Change of issue type {origin}", settings[SYNC_ADD_COMMENTS]) diff --git a/sonar/issues.py b/sonar/issues.py index f0e93ed5..81e51b23 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -314,16 +314,16 @@ def set_severity(self, severity: str) -> bool: return False return r.ok - def assign(self, assignee: str) -> bool: + def assign(self, assignee: Optional[str] = None) -> bool: """Assigns an issue to a user - :param str assignee: The user login + :param str assignee: The user login, set to None to unassign the issue :return: Whether the operation succeeded - :rtype: bool """ try: - log.debug("Assigning %s to '%s'", str(self), assignee) - r = self.post("issues/assign", {"issue": self.key, "assignee": assignee}) + params = util.remove_nones({"issue": self.key, "assignee": assignee}) + log.debug("Assigning %s to '%s'", str(self), str(assignee)) + r = self.post("issues/assign", params) if r.ok: self.assignee = assignee except (ConnectionError, requests.RequestException) as e: @@ -530,8 +530,7 @@ def __apply_event(self, event: changelog.Changelog, settings: ConfigSettings) -> self.assign(u) # self.add_comment(f"Issue assigned {origin}", settings[SYNC_ADD_COMMENTS]) elif event_type == "UNASSIGN": - # TODO: Handle uassign - return False + self.unassign() elif event_type == "TAG": self.set_tags(data) # self.add_comment(f"Tag change {origin}", settings[SYNC_ADD_COMMENTS]) diff --git a/sonar/syncer.py b/sonar/syncer.py index 17c5adf7..1b0d3cb1 100644 --- a/sonar/syncer.py +++ b/sonar/syncer.py @@ -169,10 +169,12 @@ def __sync_curated_list( futures = [executor.submit(__sync_one_finding, finding, tgt_findings, settings) for finding in src_findings] for future in concurrent.futures.as_completed(futures): try: - match_type, result = future.result() # Retrieve result or raise an exception + match_type, result = future.result(timeout=60) # Retrieve result or raise an exception log.info("Result: %s", str(result)) report.append(result) counters[match_type] += 1 + except TimeoutError: + log.error(f"Finding sync timed out after 60 seconds for {str(future)}, sync killed.") except Exception as e: log.error(f"Task raised an exception: {e}") From 60df9db2c3a2c447bec3cd452d7be8dd7c022a51 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Wed, 16 Apr 2025 20:48:33 +0200 Subject: [PATCH 23/29] Control-nbr-of-sync-threads (#1652) * Control nbr of sync threads * Update doc From e0a5d6edd457541ec21d76d51d6dbd5ab29a7dd1 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Tue, 29 Apr 2025 12:40:28 +0200 Subject: [PATCH 24/29] Fix-branch-of-hotspots-search (#1657) * Fix branch retrieval for hotspots * Create a _load() separate for issues and hotspots * Fix collection of branch/pr * Fix collection of branch/pr * Remove useless log * Simplify tests using run_xxx_cmd() * Add apps component type option constant * Add project component type constant * fix tests * fix typo * Remove duplicate stmt * Fix setting set/get * Split long and short tests * Fix set/reset settings tests * Quality pass * Fix rule collection * Fix rule collection * Fix rule collection * Fix QG hashing * Fix sync src is okorach-sonar-tools * Test for apps audit disable * Clean up temp files after test * fix tests * Fix test --- cli/findings_export.py | 2 +- cli/options.py | 7 +- sonar/applications.py | 3 + sonar/findings.py | 79 ++++--------- sonar/hotspots.py | 57 ++++----- sonar/issues.py | 59 ++++++---- sonar/qualitygates.py | 20 ++-- sonar/settings.py | 37 ++++-- test/unit/test_audit.py | 7 +- test/unit/test_config.py | 52 ++------- test/unit/test_findings.py | 194 +++++++++++-------------------- test/unit/test_findings_sync.py | 4 +- test/unit/test_migration.py | 2 +- test/unit/test_platform.py | 12 +- test/unit/test_project_export.py | 4 +- test/unit/test_rules.py | 4 +- test/unit/utilities.py | 8 +- 17 files changed, 227 insertions(+), 324 deletions(-) diff --git a/cli/findings_export.py b/cli/findings_export.py index 4fd32883..7e936468 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -331,7 +331,7 @@ def __get_component_findings(queue: Queue[tuple[object, ConfigSettings]], write_ def store_findings(components_list: dict[str, object], params: ConfigSettings) -> None: """Export all findings of a given project list""" components_queue = Queue(maxsize=0) - comp_params = {k: v for k, v in params.items() if k not in ("withUrl", "logfile", "datesWithouTine", "file", "format", "sonar")} + comp_params = {k: v for k, v in params.items() if k not in ("withUrl", "logfile", "datesWithoutTime", "file", "format", "sonar")} for comp in components_list.values(): try: log.debug("Queue %s task %s put", str(components_queue), str(comp)) diff --git a/cli/options.py b/cli/options.py index 67b4ab80..7885e5d6 100644 --- a/cli/options.py +++ b/cli/options.py @@ -93,8 +93,6 @@ LANGUAGES = "languages" QP = "qualityProfiles" -PORTFOLIOS = "portfolios" - FORMAT = "format" WITH_URL = "withURL" WITH_NAME_SHORT = "n" @@ -150,7 +148,10 @@ MULTI_VALUED_OPTS = (KEYS, METRIC_KEYS, RESOLUTIONS, SEVERITIES, STATUSES, TYPES, TAGS, BRANCHES, PULL_REQUESTS, WHAT) COMPONENT_TYPE = "compType" -COMPONENT_TYPES = ("projects", "apps", "portfolios") +PROJECTS = "projects" +PORTFOLIOS = "portfolios" +APPS = "apps" +COMPONENT_TYPES = (PROJECTS, APPS, PORTFOLIOS) class ArgumentsError(exceptions.SonarException): diff --git a/sonar/applications.py b/sonar/applications.py index 95ec84e6..6637ab43 100644 --- a/sonar/applications.py +++ b/sonar/applications.py @@ -331,6 +331,9 @@ def audit(self, audit_settings: types.ConfigSettings, **kwargs) -> list[problem. :return: list of problems found :rtype: list [Problem] """ + if not audit_settings.get("audit.applications", True): + log.debug("Auditing applications is disabled, skipping...") + return [] log.info("Auditing %s", str(self)) problems = ( super().audit(audit_settings) diff --git a/sonar/findings.py b/sonar/findings.py index 6ad8ef5c..427ae730 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -98,31 +98,6 @@ "legacySeverity", ) -SEVERITY_NONE = "NONE" - -TYPE_VULN = "VULNERABILITY" -TYPE_BUG = "BUG" -TYPE_CODE_SMELL = "CODE_SMELL" -TYPE_HOTSPOT = "SECURITY_HOTSPOT" -TYPE_NONE = "NONE" - -QUALITY_SECURITY = "SECURITY" -QUALITY_RELIABILITY = "RELIABILITY" -QUALITY_MAINTAINABILITY = "MAINTAINABILITY" -QUALITY_NONE = "NONE" - -# Mapping between old issues type and new software qualities -TYPE_QUALITY_MAPPING = { - TYPE_CODE_SMELL: QUALITY_MAINTAINABILITY, - TYPE_BUG: QUALITY_RELIABILITY, - TYPE_VULN: QUALITY_SECURITY, - TYPE_HOTSPOT: QUALITY_SECURITY, - TYPE_NONE: QUALITY_NONE, -} - -# Mapping between old and new severities -SEVERITY_MAPPING = {"BLOCKER": "BLOCKER", "CRITICAL": "HIGH", "MAJOR": "MEDIUM", "MINOR": "LOW", "INFO": "INFO", "NONE": "NONE"} - STATUS_MAPPING = {"WONTFIX": "ACCEPTED", "REOPENED": "OPEN", "REMOVED": "CLOSED", "FIXED": "CLOSED"} @@ -146,6 +121,7 @@ def __init__(self, endpoint: pf.Platform, key: str, data: types.ApiPayload = Non self.projectKey = None #: Project key (str) self._changelog = None self._comments = None + self.file = None #: File (str) self.line = None #: Line (int) self.component = None self.message = None #: Message @@ -169,19 +145,11 @@ def _load_common(self, jsondata: types.ApiPayload) -> None: else: self.sq_json.update(jsondata) self.author = jsondata.get("author", None) - if "vulnerabilityProbability" in jsondata: - self.impacts = {QUALITY_SECURITY: jsondata["vulnerabilityProbability"] + "(HOTSPOT)"} - elif self.endpoint.version() >= (10, 2, 0): - self.impacts = {i["softwareQuality"]: i["severity"] for i in jsondata.get("impacts", {})} - else: - self.impacts = {TYPE_QUALITY_MAPPING[jsondata.get("type", TYPE_NONE)]: SEVERITY_MAPPING[jsondata.get("severity", SEVERITY_NONE)]} - self.type = jsondata.get("type", TYPE_NONE) - self.severity = jsondata.get("severity", SEVERITY_NONE) - self.message = jsondata.get("message", None) self.status = jsondata["status"] self.resolution = jsondata.get("resolution", None) - self.rule = jsondata.get("rule", jsondata.get("ruleReference", None)) + if not self.rule: + self.rule = jsondata.get("rule", jsondata.get("ruleReference", None)) self.line = jsondata.get("line", jsondata.get("lineNumber", None)) if self.line == "null": self.line = None @@ -193,21 +161,15 @@ def _load_common(self, jsondata: types.ApiPayload) -> None: def _load_from_search(self, jsondata: types.ApiPayload) -> None: self._load_common(jsondata) - if isinstance(jsondata["project"], str): - self.projectKey = jsondata["project"] - else: - self.projectKey = jsondata["project"]["key"] + self.projectKey = jsondata.get("project", None) + self.component = jsondata.get("component", None) + self.line = jsondata.get("line", None) + self.status = jsondata.get("status", None) + self.message = jsondata.get("message", None) + if self.component: + self.file = self.component.replace(f"{self.projectKey}:", "", 1) self.creation_date = util.string_to_date(jsondata["creationDate"]) self.modification_date = util.string_to_date(jsondata["updateDate"]) - self.hash = jsondata.get("hash", None) - self.component = jsondata.get("component", None) - self.pull_request = jsondata.get("pullRequest", None) - if self.pull_request is None: - self.branch = jsondata.get("branch", None) - if self.branch is None: - self.branch = projects.Project.get_object(self.endpoint, self.projectKey).main_branch_name() - else: - self.branch = re.sub("^BRANCH:", "", self.branch) def _load_from_export(self, jsondata: types.ObjectJsonRepr) -> None: self._load_common(jsondata) @@ -219,10 +181,6 @@ def url(self) -> str: # Must be implemented in sub classes raise NotImplementedError() - def file(self) -> str: - # Must be implemented in sub classes - raise NotImplementedError() - def assign(self, assignee: Optional[str] = None) -> str: # Must be implemented in sub classes raise NotImplementedError() @@ -260,7 +218,7 @@ def to_json(self, without_time: bool = False) -> types.ObjectJsonRepr: for old_name, new_name in _JSON_FIELDS_REMAPPED: data[new_name] = data.pop(old_name, None) - data["file"] = self.file() + data["file"] = self.file data["creationDate"] = self.creation_date.strftime(fmt) data["updateDate"] = self.modification_date.strftime(fmt) data["language"] = self.language() @@ -288,7 +246,7 @@ def to_sarif(self, full: bool = True) -> dict[str, str]: data["locations"] = [ { "physicalLocation": { - "artifactLocation": {"uri": f"file:///{self.file()}", "index": 0}, + "artifactLocation": {"uri": f"file:///{self.file}", "index": 0}, "region": { "startLine": max(int(rg["startLine"]), 1), "startColumn": max(int(rg["startOffset"]), 1), @@ -399,7 +357,7 @@ def strictly_identical_to(self, another_finding: Finding, ignore_component: bool self.rule == another_finding.rule and self.hash == another_finding.hash and self.message == another_finding.message - and self.file() == another_finding.file() + and self.file == another_finding.file and (self.component == another_finding.component or ignore_component) and prelim_check ) @@ -420,7 +378,7 @@ def almost_identical_to(self, another_finding: Finding, ignore_component: bool = elif Levenshtein.distance(self.message, another_finding.message, score_cutoff=6) <= 5: score += 1 match_msg += " message +1" - if self.file() == another_finding.file(): + if self.file == another_finding.file: score += 1 match_msg += " file +1" if self.line == another_finding.line or kwargs.get("ignore_line", False): @@ -490,6 +448,15 @@ def do_transition(self, transition: str) -> bool: util.handle_error(e, f"applying transition {transition}") return False + def get_branch_and_pr(self, data: types.ApiPayload) -> tuple[Optional[str], Optional[str]]: + """ + :param data: The data to extract the branch and pull request from + :return: The branch name or pull request id + """ + pr = data.get("pullRequest", None) + branch = None if pr else data.get("branch", projects.Project.get_object(self.endpoint, key=self.projectKey).main_branch_name()) + return branch, pr + def export_findings(endpoint: pf.Platform, project_key: str, branch: str = None, pull_request: str = None) -> dict[str, Finding]: """Export all findings of a given project diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 698c4911..1c5d27bf 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -35,7 +35,7 @@ from sonar.util import types, cache, constants as c from sonar import syncer, users -from sonar import findings, rules, changelog +from sonar import findings, rules, changelog, projects PROJECT_FILTER = "project" PROJECT_FILTER_OLD = "projectKey" @@ -94,37 +94,20 @@ class Hotspot(findings.Finding): def __init__(self, endpoint: pf.Platform, key: str, data: types.ApiPayload = None, from_export: bool = False) -> None: """Constructor""" super().__init__(endpoint=endpoint, key=key, data=data, from_export=from_export) - self.vulnerabilityProbability = None #: - self.category = data["securityCategory"] #: - self.vulnerabilityProbability = data["vulnerabilityProbability"] #: - self.securityCategory = None #: self.type = "SECURITY_HOTSPOT" self.__details = None - - # FIXME: Ugly hack to fix how hotspot branches are managed - m = re.match(r"^(.*):BRANCH:(.*)$", self.projectKey) - if m: - self.projectKey = m.group(1) - self.branch = m.group(2) - m = re.match(r"^(.*):PULL_REQUEST:(.*)$", self.projectKey) - if m: - self.projectKey = m.group(1) - self.branch = m.group(2) Hotspot.CACHE.put(self) - if self.rule is None and self.refresh(): - self.rule = self.__details["rule"]["key"] + self.refresh() def __str__(self) -> str: """ - :return: String representation of the hotspot - :rtype: str + :return: String representation of the object """ return f"Hotspot key '{self.key}'" def url(self) -> str: """ :return: Permalink URL to the hotspot in the SonarQube platform - :rtype: str """ branch = "" if self.branch is not None: @@ -138,27 +121,27 @@ def to_json(self, without_time: bool = False) -> types.ObjectJsonRepr: :return: JSON representation of the hotspot :rtype: dict """ - if self.endpoint.version() >= (10, 2, 0): - if "vulnerabilityProbability" in self.sq_json: - self.impacts = {findings.QUALITY_SECURITY: self.sq_json["vulnerabilityProbability"] + "(HOTSPOT)"} - else: - self.impacts = {findings.QUALITY_SECURITY: "UNDEFINED(HOTSPOT)"} data = super().to_json(without_time) if self.endpoint.version() >= (10, 2, 0): data.pop("type", None) return data - def file(self) -> Optional[str]: - """ - :return: The hotspot full file path, relative to the project root directory, or None if not found - """ - try: - f = self.sq_json["component"]["path"] - except KeyError: - f = None - if not f: - log.warning("Can't find file name for %s", str(self)) - return f + def _load(self, data: types.ApiPayload, from_export: bool = False) -> None: + """Loads the hotspot details from the provided data (coming from api/hotspots/search)""" + super()._load(data, from_export) + if not self.rule: + self.rule = data.get("ruleKey", None) + self.severity = data.get("vulnerabilityProbability", "UNDEFINED") + "(HOTSPOT)" + self.impacts = {"SECURITY": self.severity} + + def _load_details(self, data: dict[str, any]) -> None: + self.file = data["component"]["path"] + self.branch, self.pull_request = self.get_branch_and_pr(data["project"]) + self.severity = data["rule"].get("vulnerabilityProbability", "UNDEFINED") + "(HOTSPOT)" + self.impacts = {"SECURITY": self.severity} + if not self.rule: + self.rule = data["rule"]["key"] + self.assignee = data.get("assignee", None) def refresh(self) -> bool: """Refreshes and reads hotspots details in SonarQube @@ -169,7 +152,7 @@ def refresh(self) -> bool: resp = self.get(Hotspot.API[c.GET], {"hotspot": self.key}) if resp.ok: self.__details = json.loads(resp.text) - self._load(self.__details) + self._load_details(self.__details) return resp.ok except (ConnectionError, RequestException): return False diff --git a/sonar/issues.py b/sonar/issues.py index 81e51b23..d14303cb 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -111,6 +111,31 @@ _TOO_MANY_ISSUES_MSG = "Too many issues, recursing..." +SEVERITY_NONE = "NONE" + +TYPE_VULN = "VULNERABILITY" +TYPE_BUG = "BUG" +TYPE_CODE_SMELL = "CODE_SMELL" +TYPE_HOTSPOT = "SECURITY_HOTSPOT" +TYPE_NONE = "NONE" + +QUALITY_SECURITY = "SECURITY" +QUALITY_RELIABILITY = "RELIABILITY" +QUALITY_MAINTAINABILITY = "MAINTAINABILITY" +QUALITY_NONE = "NONE" + +# Mapping between old issues type and new software qualities +TYPE_QUALITY_MAPPING = { + TYPE_CODE_SMELL: QUALITY_MAINTAINABILITY, + TYPE_BUG: QUALITY_RELIABILITY, + TYPE_VULN: QUALITY_SECURITY, + TYPE_HOTSPOT: QUALITY_SECURITY, + TYPE_NONE: QUALITY_NONE, +} + +# Mapping between old and new severities +SEVERITY_MAPPING = {"BLOCKER": "BLOCKER", "CRITICAL": "HIGH", "MAJOR": "MEDIUM", "MINOR": "LOW", "INFO": "INFO", "NONE": "NONE"} + class TooManyIssuesError(Exception): """When a call to api/issues/search returns too many issues.""" @@ -166,26 +191,6 @@ def url(self) -> str: branch = f"&pullRequest={requests.utils.quote(self.pull_request)}" return f"{self.endpoint.url}/project/issues?id={self.projectKey}{branch}&issues={self.key}" - def file(self) -> Optional[str]: - """ - :return: The issue full file path, relative to the project root directory, or None if not found - """ - if "component" in self.sq_json: - comp = self.sq_json["component"] - # Hack: Fix to adapt to the ugly component structure on branches and PR - # "component": "src:sonar/hot.py:BRANCH:somebranch" - for prefix in ("BRANCH", "PULL_REQUEST"): - m = re.search(rf"(^.*):{prefix}:", comp) - if m: - comp = m.group(1) - break - return comp.split(":")[-1] - elif "path" in self.sq_json: - return self.sq_json["path"] - else: - log.warning("Can't find file name for %s", str(self)) - return None - def debt(self) -> int: """ :return: The remediation effort of the issue, in minutes @@ -235,6 +240,20 @@ def refresh(self) -> bool: self._load(json.loads(resp.text)["issues"][0]) return resp.ok + def _load(self, data: ApiPayload, from_export: bool = False) -> None: + """Loads the issue from a JSON payload""" + super()._load(data, from_export) + self.hash = data.get("hash", None) + self.severity = data.get("severity", None) + if not self.rule: + self.rule = data.get("rule", None) + self.type = data.get("type", None) + self.branch, self.pull_request = self.get_branch_and_pr(data) + if self.endpoint.version() >= (10, 2, 0): + self.impacts = {i["softwareQuality"]: i["severity"] for i in data.get("impacts", {})} + else: + self.impacts = {TYPE_QUALITY_MAPPING[data.get("type", TYPE_NONE)]: SEVERITY_MAPPING[data.get("severity", SEVERITY_NONE)]} + def changelog(self, manual_only: bool = True) -> dict[str, str]: """ :param bool manual_only: Whether the only manual changes should be returned or all changes diff --git a/sonar/qualitygates.py b/sonar/qualitygates.py index a2eb44bc..1023fa73 100644 --- a/sonar/qualitygates.py +++ b/sonar/qualitygates.py @@ -99,6 +99,7 @@ def __init__(self, endpoint: pf.Platform, name: str, data: types.ApiPayload) -> self.is_built_in = data.get("isBuiltIn", False) self.conditions() self.permissions() + log.debug("Created %s with uuid %d id %x", str(self), hash(self), id(self)) QualityGate.CACHE.put(self) @classmethod @@ -149,6 +150,10 @@ def __str__(self) -> str: """ return f"quality gate '{self.name}'" + def __hash__(self) -> int: + """Default UUID for SQ objects""" + return hash((self.name, self.endpoint.url)) + def url(self) -> str: """ :return: The object permalink @@ -266,16 +271,15 @@ def set_as_default(self) -> bool: :return: Whether setting as default quality gate was successful :rtype: bool """ - if self.endpoint.is_sonarcloud(): - r = self.post("qualitygates/set_as_default", params={"id": self.key}) - else: - r = self.post("qualitygates/set_as_default", params={"name": self.name}) - if r.ok: - self.is_default = True + params = {"id": self.key} if self.endpoint.is_sonarcloud() else {"name": self.name} + try: + r = self.post("qualitygates/set_as_default", params=params) # Turn off default for all other quality gates except the current one for qg in get_list(self.endpoint).values(): - if qg.name != self.name: - qg.is_default = False + qg.is_default = qg.name == self.name + except (ConnectionError, RequestException) as e: + util.handle_error(e, f"setting {str(self)} as default quality gate") + return False return r.ok def update(self, **data) -> bool: diff --git a/sonar/settings.py b/sonar/settings.py index 6ad1f096..1706af25 100644 --- a/sonar/settings.py +++ b/sonar/settings.py @@ -256,19 +256,37 @@ def set(self, value: any) -> bool: log.debug("Setting %s to value '%s'", str(self), str(value)) params = {"key": self.key, "component": self.component.key if self.component else None} + untransformed_value = value if isinstance(value, list): if isinstance(value[0], str): params["values"] = value else: params["fieldValues"] = [json.dumps(v) for v in value] + elif isinstance(value, bool): + value = str(value).lower() else: - if isinstance(value, bool): - value = "true" if value else "false" - if self.multi_valued: - params["values"] = value - else: - params["value"] = value - return self.post(Setting.API[c.CREATE], params=params).ok + pname = "values" if self.multi_valued else "value" + params[pname] = value + try: + r = self.post(Setting.API[c.CREATE], params=params) + self.value = untransformed_value + return r.ok + except (ConnectionError, RequestException) as e: + util.handle_error(e, f"setting setting '{self.key}' of {str(self.component)}", catch_all=True) + return False + + def reset(self) -> bool: + log.info("Resetting %s", str(self)) + params = {"keys": self.key} + if self.component: + params["component"] = self.component.key + try: + r = self.post("settings/reset", params=params) + self.value = None + return r.ok + except (ConnectionError, RequestException) as e: + util.handle_error(e, f"resetting setting '{self.key}' of {str(self.component)}", catch_all=True) + return False def to_json(self, list_as_csv: bool = True) -> types.ObjectJsonRepr: val = self.value @@ -545,10 +563,9 @@ def decode(setting_key: str, setting_value: any) -> any: return setting_value -def reset_setting(endpoint: pf.Platform, setting_key: str, project_key: str = None) -> bool: +def reset_setting(endpoint: pf.Platform, setting_key: str, project: Optional[object] = None) -> bool: """Resets a setting to its default""" - log.info("Resetting setting '%s", setting_key) - return endpoint.post("settings/reset", params={"keys": setting_key, "component": project_key}).ok + return get_object(endpoint=endpoint, key=setting_key, component=project).reset() def get_component_params(component: object, name: str = "component") -> types.ApiParamss: diff --git a/test/unit/test_audit.py b/test/unit/test_audit.py index 5c14d281..0f0aa8ec 100644 --- a/test/unit/test_audit.py +++ b/test/unit/test_audit.py @@ -64,21 +64,22 @@ def test_audit(get_csv_file: Generator[str]) -> None: line = fd.readline() assert line not in lines lines.append(line) + util.clean(file) def test_audit_stdout() -> None: """test_audit_stdout""" - util.run_success_cmd(audit.main, CMD) + util.run_success_cmd(audit.main, CMD, True) def test_audit_json(get_json_file: Generator[str]) -> None: """test_audit_json""" - util.run_success_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {get_json_file}") + util.run_success_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {get_json_file}", True) def test_audit_proj_key(get_csv_file: Generator[str]) -> None: """test_audit_proj_key""" - util.run_success_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {get_csv_file} --{opt.WHAT} projects --{opt.KEYS} okorach_sonar-tools") + util.run_success_cmd(audit.main, f"{CMD} --{opt.REPORT_FILE} {get_csv_file} --{opt.WHAT} projects --{opt.KEYS} okorach_sonar-tools", True) def test_audit_proj_non_existing_key() -> None: diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 6456079f..10b204ba 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -27,7 +27,6 @@ import json from unittest.mock import patch -import pytest import utilities as util from sonar import errcodes, portfolios @@ -36,70 +35,44 @@ from cli import config CMD = "config.py" -OPTS = [CMD] + util.STD_OPTS + ["-e", f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] +LIST_OPTS = [CMD] + util.STD_OPTS + ["-e", f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] +OPTS = " ".join(LIST_OPTS) OPTS_IMPORT = [CMD] + util.TEST_OPTS + ["-i", f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] -def __test_config_cmd(arguments: list[str]) -> None: - """Runs a test command""" - outputfile = arguments[arguments.index(f"-{opt.REPORT_FILE_SHORT}") + 1] - util.clean(outputfile) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", arguments): - config.main() - assert int(str(e.value)) == errcodes.OK - assert util.file_not_empty(outputfile) - util.clean(outputfile) - - def test_config_export_full() -> None: """test_config_export_full""" - __test_config_cmd(OPTS + ["--fullExport"]) + util.run_success_cmd(config.main, f"{OPTS} --fullExport", True) def test_config_export_partial_2() -> None: """test_config_export_partial_2""" - __test_config_cmd(OPTS + ["-w", "settings,portfolios,users"]) + util.run_success_cmd(config.main, f"{OPTS} -w settings,portfolios,users", True) def test_config_export_partial_3() -> None: """test_config_export_partial_3""" - __test_config_cmd(OPTS + ["-w", "projects", f"-{opt.KEYS_SHORT}", "okorach_sonar-tools"]) + util.run_success_cmd(config.main, f"{OPTS} -w projects -{opt.KEYS_SHORT} okorach_sonar-tools", True) def test_config_export_yaml() -> None: - """test_config_export_partial_3""" - __test_config_cmd([CMD] + util.STD_OPTS + ["-e", f"-{opt.REPORT_FILE_SHORT}", util.YAML_FILE]) + """test_config_export_yaml""" + util.run_success_cmd(config.main, f"{OPTS} -{opt.REPORT_FILE_SHORT} {util.YAML_FILE}", True) def test_config_export_wrong() -> None: """test_config_export_wrong""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", OPTS + ["-w", "settings,wrong,users"]): - config.main() - assert int(str(e.value)) == errcodes.ARGS_ERROR - assert not os.path.isfile(util.JSON_FILE) - util.clean(util.JSON_FILE) + util.run_failed_cmd(config.main, f"{OPTS} -w settings,wrong,users", errcodes.ARGS_ERROR) def test_config_non_existing_project() -> None: """test_config_non_existing_project""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", OPTS + [f"-{opt.KEYS_SHORT}", "okorach_sonar-tools,bad_project"]): - config.main() - assert int(str(e.value)) == errcodes.NO_SUCH_KEY - assert not os.path.isfile(util.JSON_FILE) - util.clean(util.JSON_FILE) + util.run_failed_cmd(config.main, f"{OPTS} -{opt.KEYS_SHORT} okorach_sonar-tools,bad_project", errcodes.NO_SUCH_KEY) def test_config_inline_lists() -> None: """test_config_inline_commas""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", OPTS): - config.main() + util.run_success_cmd(config.main, OPTS) with open(file=util.JSON_FILE, mode="r", encoding="utf-8") as fh: json_config = json.loads(fh.read()) assert isinstance(json_config["globalSettings"]["languages"]["javascript"]["sonar.javascript.file.suffixes"], str) @@ -118,10 +91,7 @@ def test_config_inline_lists() -> None: def test_config_dont_inline_lists() -> None: """test_config_no_inline_commas""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", OPTS + ["--dontInlineLists"]): - config.main() + util.run_success_cmd(config.main, f"{OPTS} --dontInlineLists") with open(file=util.JSON_FILE, mode="r", encoding="utf-8") as fh: json_config = json.loads(fh.read()) assert isinstance(json_config["globalSettings"]["languages"]["javascript"]["sonar.javascript.file.suffixes"], list) diff --git a/test/unit/test_findings.py b/test/unit/test_findings.py index 0a3e1ffb..4cc41b75 100644 --- a/test/unit/test_findings.py +++ b/test/unit/test_findings.py @@ -38,7 +38,10 @@ CMD = "sonar-findings-export.py" SARIF_FILE = "issues.sarif" CSV_OPTS = [CMD] + util.STD_OPTS + [f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE] +CSV_OPTS_STR = " ".join(CSV_OPTS) JSON_OPTS = [CMD] + util.STD_OPTS + [f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] +JSON_OPTS_STR = " ".join(JSON_OPTS) +LIVE_PROJ_KEY = f"--{opt.KEYS} {util.LIVE_PROJECT}" RULE_COL = 1 LANG_COL = 2 @@ -67,51 +70,50 @@ PR_COL = fields.index("pullRequest") __GOOD_OPTS = [ - [f"--{opt.FORMAT}", "json", f"--{opt.NBR_THREADS}", "16", f"-{opt.LOGFILE_SHORT}", "sonar-tools.log", f"--{opt.VERBOSE}", "DEBUG"], [f"--{opt.FORMAT}", "json", f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1},{util.PROJECT_2}", f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE], - [f"--{opt.WITH_URL}", f"--{opt.NBR_THREADS}", "16", f"--{opt.REPORT_FILE}", util.CSV_FILE], [f"--{opt.CSV_SEPARATOR}", ";", "-d", f"--{opt.TAGS}", "cwe,convention", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], - [f"--{opt.STATUSES}", "OPEN,CLOSED", f"--{opt.REPORT_FILE}", util.CSV_FILE], - [f"--{opt.STATUSES}", "OPEN,CLOSED", f"--{opt.SEVERITIES}", "MINOR,MAJOR,CRITICAL", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], - [f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1}", f"-{opt.WITH_BRANCHES_SHORT}", "*", f"--{opt.REPORT_FILE}", util.CSV_FILE], + [f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1}", f"-{opt.WITH_BRANCHES_SHORT}", '"*"', f"--{opt.REPORT_FILE}", util.CSV_FILE], [f"--{opt.KEYS}", "training:security", f"-{opt.WITH_BRANCHES_SHORT}", "main", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], [f"--{opt.USE_FINDINGS}", f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1},{util.PROJECT_2}", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], - ["--apps", f"-{opt.KEYS_SHORT}", "APP_TEST", f"--{opt.BRANCHES}", "*", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], - ["--portfolios", f"-{opt.KEYS_SHORT}", "Banking", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], + [f"--{opt.APPS}", f"-{opt.KEYS_SHORT}", "APP_TEST", f"--{opt.BRANCHES}", '"*"', f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], + [f"--{opt.PORTFOLIOS}", f"-{opt.KEYS_SHORT}", "Banking", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], + [f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1}", f"-{opt.WITH_BRANCHES_SHORT}", '"*"', f"--{opt.REPORT_FILE}", util.CSV_FILE], + [f"--{opt.STATUSES}", "OPEN,CLOSED", f"--{opt.SEVERITIES}", "BLOCKER,CRITICAL", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], +] + +__GOOD_OPTS_LONG = [ + [f"--{opt.FORMAT}", "json", f"--{opt.NBR_THREADS}", "16", f"-{opt.LOGFILE_SHORT}", "sonar-tools.log", f"--{opt.VERBOSE}", "DEBUG"], + [f"--{opt.WITH_URL}", f"--{opt.NBR_THREADS}", "16", f"--{opt.REPORT_FILE}", util.CSV_FILE], + [f"--{opt.STATUSES}", "OPEN,CLOSED", f"--{opt.REPORT_FILE}", util.CSV_FILE], ] + __WRONG_FILTER_OPTS = [ - [f"--{opt.STATUSES}", "OPEN,NOT_OPEN"], - [f"--{opt.RESOLUTIONS}", "ACCEPTED,SAFE,DO_FIX,WONTFIX"], - [f"--{opt.TYPES}", "BUG,VULN"], - [f"--{opt.SEVERITIES}", "HIGH,SUPER_HIGH"], - [f"--{opt.CSV_SEPARATOR}", "';'", "-d", f"--{opt.TAGS}", "cwe,convention", f"-{opt.REPORT_FILE_SHORT}", util.CSV_FILE], + f"--{opt.STATUSES} OPEN,NOT_OPEN", + f"--{opt.RESOLUTIONS} ACCEPTED,SAFE,DO_FIX,WONTFIX", + f"--{opt.TYPES} BUG,VULN", + f"--{opt.SEVERITIES} HIGH,SUPER_HIGH", + f"--{opt.CSV_SEPARATOR} ';' -d --{opt.TAGS} cwe,convention -{opt.REPORT_FILE_SHORT} {util.CSV_FILE}", ] __WRONG_OPTS = [ [f"-{opt.KEYS_SHORT}", "non-existing-project-key"], - ["--apps", f"-{opt.KEYS_SHORT}", util.LIVE_PROJECT], - ["--portfolios", f"-{opt.KEYS_SHORT}", util.LIVE_PROJECT], + [f"--{opt.APPS}", f"-{opt.KEYS_SHORT}", util.LIVE_PROJECT], + [f"--{opt.PORTFOLIOS}", f"-{opt.KEYS_SHORT}", util.LIVE_PROJECT], ] def test_findings_export_sarif_explicit() -> None: """Test SARIF export""" util.clean(util.JSON_FILE) - util.run_success_cmd( - findings_export.main, f"{CMD} {util.SQS_OPTS} --{opt.REPORT_FILE} {util.JSON_FILE} --{opt.KEYS} {util.LIVE_PROJECT} --{opt.FORMAT} sarif" - ) + util.run_success_cmd(findings_export.main, f"{CMD} {util.SQS_OPTS} --{opt.REPORT_FILE} {util.JSON_FILE} {LIVE_PROJ_KEY} --{opt.FORMAT} sarif") assert util.file_contains(util.JSON_FILE, "schemas/json/sarif-2.1.0-rtm.4") util.clean(util.JSON_FILE) def test_findings_export_sarif_implicit() -> None: """Test SARIF export for a single project and implicit format""" - util.clean(SARIF_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", JSON_OPTS + [f"-{opt.KEYS_SHORT}", f"{util.LIVE_PROJECT}", f"-{opt.REPORT_FILE_SHORT}", SARIF_FILE]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f"{CMD} {util.SQS_OPTS} -{opt.KEYS_SHORT} {util.LIVE_PROJECT} -{opt.REPORT_FILE_SHORT} {SARIF_FILE}") assert util.file_contains(SARIF_FILE, "schemas/json/sarif-2.1.0-rtm.4") util.clean(SARIF_FILE) @@ -120,10 +122,7 @@ def test_wrong_filters() -> None: """test_wrong_filters""" util.clean(util.CSV_FILE, util.JSON_FILE) for bad_opts in __WRONG_FILTER_OPTS: - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + bad_opts): - findings_export.main() - assert int(str(e.value)) == errcodes.WRONG_SEARCH_CRITERIA + util.run_failed_cmd(findings_export.main, f"{CSV_OPTS_STR} {bad_opts}", errcodes.WRONG_SEARCH_CRITERIA) assert not os.path.isfile(util.CSV_FILE) assert not os.path.isfile(util.JSON_FILE) @@ -152,11 +151,7 @@ def test_findings_export_non_existing_branch() -> None: def test_findings_filter_on_date_after() -> None: """test_findings_filter_on_type""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"-{opt.KEYS_SHORT}", f"{util.LIVE_PROJECT}", f"--{opt.DATE_AFTER}", "2023-05-01"]): - findings_export.main() - + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} -{opt.KEYS_SHORT} {util.LIVE_PROJECT} --{opt.DATE_AFTER} 2023-05-01") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -167,11 +162,7 @@ def test_findings_filter_on_date_after() -> None: def test_findings_filter_on_date_before() -> None: """test_findings_filter_on_type""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"-{opt.KEYS_SHORT}", f"{util.LIVE_PROJECT}", f"--{opt.DATE_BEFORE}", "2024-05-01"]): - findings_export.main() - + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} -{opt.KEYS_SHORT} {util.LIVE_PROJECT} --{opt.DATE_BEFORE} 2024-05-01") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -182,11 +173,7 @@ def test_findings_filter_on_date_before() -> None: def test_findings_filter_on_type() -> None: """test_findings_filter_on_type""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.TYPES}", "VULNERABILITY,BUG"]): - findings_export.main() - + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.TYPES} VULNERABILITY,BUG") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -200,10 +187,7 @@ def test_findings_filter_on_type() -> None: def test_findings_filter_on_resolution() -> None: """test_findings_filter_on_resolution""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.RESOLUTIONS}", "FALSE-POSITIVE,ACCEPTED,SAFE"]): - findings_export.main() + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.RESOLUTIONS} FALSE-POSITIVE,ACCEPTED,SAFE") if util.SQ.version() < (10, 0, 0): statuses = ("FALSE-POSITIVE", "WONTFIX", "SAFE") else: @@ -218,7 +202,7 @@ def test_findings_filter_on_resolution() -> None: def test_findings_filter_on_severity() -> None: """test_findings_filter_on_severity""" - util.run_success_cmd(findings_export.main, f"{' '.join(CSV_OPTS)} --{opt.SEVERITIES} BLOCKER,CRITICAL") + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.SEVERITIES} BLOCKER,CRITICAL") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -234,11 +218,7 @@ def test_findings_filter_on_severity() -> None: def test_findings_filter_on_multiple_criteria() -> None: """test_findings_filter_on_multiple_criteria""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.RESOLUTIONS}", "FALSE-POSITIVE,ACCEPTED", f"--{opt.TYPES}", "BUG,CODE_SMELL"]): - findings_export.main() - + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.RESOLUTIONS} FALSE-POSITIVE,ACCEPTED --{opt.TYPES} BUG,CODE_SMELL") with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -256,13 +236,9 @@ def test_findings_filter_on_multiple_criteria() -> None: def test_findings_filter_on_multiple_criteria_2() -> None: """test_findings_filter_on_multiple_criteria_2""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object( - sys, "argv", CSV_OPTS + [f"--{opt.DATE_AFTER}", "2020-01-10", f"--{opt.DATE_BEFORE}", "2020-12-31", f"--{opt.TYPES}", "SECURITY_HOTSPOT"] - ): - findings_export.main() - + util.run_success_cmd( + findings_export.main, f"{CSV_OPTS_STR} --{opt.DATE_AFTER} 2020-01-10 --{opt.DATE_BEFORE} 2020-12-31 --{opt.TYPES} SECURITY_HOTSPOT" + ) with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -283,11 +259,7 @@ def test_findings_filter_on_multiple_criteria_2() -> None: def test_findings_filter_on_multiple_criteria_3() -> None: """test_findings_filter_on_multiple_criteria_3""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.STATUSES}", "ACCEPTED", f"--{opt.RESOLUTIONS}", "FALSE-POSITIVE"]): - findings_export.main() - + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.STATUSES} ACCEPTED --{opt.RESOLUTIONS} FALSE-POSITIVE") if util.SQ.version() < (10, 0, 0): statuses = ("WONTFIX", "FALSE_POSITIVE", "FALSE-POSITIVE") else: @@ -302,13 +274,9 @@ def test_findings_filter_on_multiple_criteria_3() -> None: def test_findings_filter_on_hotspots_multi_1() -> None: """test_findings_filter_on_hotspots_multi_1""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object( - sys, "argv", CSV_OPTS + [f"--{opt.RESOLUTIONS}", "ACKNOWLEDGED, SAFE", f"-{opt.KEYS_SHORT}", f"{util.PROJECT_1},{util.PROJECT_2}"] - ): - findings_export.main() - + util.run_success_cmd( + findings_export.main, f'{CSV_OPTS_STR} --{opt.RESOLUTIONS} "ACKNOWLEDGED, SAFE" -{opt.KEYS_SHORT} {util.PROJECT_1},{util.PROJECT_2}' + ) with open(file=util.CSV_FILE, mode="r", encoding="utf-8") as fh: csvreader = csv.reader(fh) next(csvreader) @@ -320,38 +288,31 @@ def test_findings_filter_on_hotspots_multi_1() -> None: def test_findings_filter_on_lang() -> None: """test_findings_filter_hotspot_on_lang""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.LANGUAGES}", "java,js"]): - findings_export.main() - util.clean(util.CSV_FILE) + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} --{opt.LANGUAGES} java,js", True) def test_findings_export() -> None: """test_findings_export""" + util.start_logging() for opts in __GOOD_OPTS: - if (util.SQ.edition() == "community" and ("--apps" in opts or "--portfolios" in opts)) or ( - util.SQ.edition() == "developer" and "--portfolios" in opts + fullcmd = " ".join([CMD] + util.STD_OPTS + opts) + if (util.SQ.edition() == "community" and (f"--{opt.APPS}" in opts or f"--{opt.PORTFOLIOS}" in opts)) or ( + util.SQ.edition() == "developer" and f"--{opt.PORTFOLIOS}" in opts ): - with pytest.raises(SystemExit) as e: - fullcmd = [CMD] + util.STD_OPTS + opts - with patch.object(sys, "argv", fullcmd): - findings_export.main() - assert int(str(e.value)) == errcodes.UNSUPPORTED_OPERATION + util.run_failed_cmd(findings_export.main, fullcmd, errcodes.UNSUPPORTED_OPERATION) else: - util.clean(util.CSV_FILE, util.JSON_FILE) - with pytest.raises(SystemExit) as e: - fullcmd = [CMD] + util.STD_OPTS + opts - log.info("Running %s", " ".join(fullcmd)) - with patch.object(sys, "argv", fullcmd): - findings_export.main() - assert int(str(e.value)) == errcodes.OK - if util.CSV_FILE in opts: - assert util.file_not_empty(util.CSV_FILE) - elif util.JSON_FILE in opts: - assert util.file_not_empty(util.JSON_FILE) - log.info("SUCCESS running: %s", " ".join(fullcmd)) - util.clean(util.CSV_FILE, util.JSON_FILE) + log.info("Running %s", fullcmd) + util.run_success_cmd(findings_export.main, fullcmd, True) + log.info("SUCCESS running: %s", fullcmd) + + +def test_findings_export_long() -> None: + """test_findings_export_long""" + for opts in __GOOD_OPTS_LONG: + fullcmd = " ".join([CMD] + util.STD_OPTS + opts) + log.info("Running %s", fullcmd) + util.run_success_cmd(findings_export.main, fullcmd, True) + log.info("SUCCESS running: %s", fullcmd) def test_issues_count_0() -> None: @@ -394,11 +355,7 @@ def test_search_too_many_issues() -> None: def test_output_format_sarif() -> None: """test_output_format_sarif""" - util.clean(SARIF_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", [CMD] + util.STD_OPTS + [f"--{opt.REPORT_FILE}", SARIF_FILE, f"--{opt.KEYS}", util.LIVE_PROJECT]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f"{CMD} {' '.join(util.STD_OPTS)} --{opt.REPORT_FILE} {SARIF_FILE} {LIVE_PROJ_KEY}") with open(SARIF_FILE, encoding="utf-8") as fh: sarif_json = json.loads(fh.read()) assert sarif_json["$schema"] == "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.4.json" @@ -425,12 +382,7 @@ def test_output_format_sarif() -> None: def test_output_format_json() -> None: """test_output_format_json""" - util.clean(util.JSON_FILE) - log.set_debug_level("INFO") - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", JSON_OPTS + [f"--{opt.KEYS}", util.LIVE_PROJECT]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f"{JSON_OPTS_STR} {LIVE_PROJ_KEY}") with open(util.JSON_FILE, encoding="utf-8") as fh: json_data = json.loads(fh.read()) for issue in json_data: @@ -452,11 +404,7 @@ def test_output_format_json() -> None: def test_output_format_csv() -> None: """test_output_format_csv""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.KEYS}", util.LIVE_PROJECT]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} {LIVE_PROJ_KEY}") with open(util.CSV_FILE, encoding="utf-8") as fd: reader = csv.reader(fd) row = next(reader) @@ -469,11 +417,7 @@ def test_output_format_csv() -> None: def test_output_format_branch() -> None: """test_output_format_branch""" for br in "develop", "master,develop": - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.KEYS}", util.LIVE_PROJECT, f"--{opt.BRANCHES}", br]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} {LIVE_PROJ_KEY} --{opt.BRANCHES} {br}") br_list = utilities.csv_to_list(br) with open(util.CSV_FILE, encoding="utf-8") as fd: reader = csv.reader(fd) @@ -482,16 +426,12 @@ def test_output_format_branch() -> None: assert line[BRANCH_COL] in br_list assert line[PR_COL] == "" assert line[PROJECT_COL] == util.LIVE_PROJECT - util.clean(util.CSV_FILE) + util.clean(util.CSV_FILE) def test_all_prs() -> None: """Tests that findings extport for all PRs of a project works""" - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.KEYS}", util.LIVE_PROJECT, f"--{opt.PULL_REQUESTS}", "*"]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + util.run_success_cmd(findings_export.main, f'{CSV_OPTS_STR} {LIVE_PROJ_KEY} --{opt.PULL_REQUESTS} "*"') with open(util.CSV_FILE, encoding="utf-8") as fd: reader = csv.reader(fd) try: @@ -509,12 +449,8 @@ def test_all_prs() -> None: def test_one_pr() -> None: """Tests that findings extport for a single name PR of a project works""" proj = projects.Project.get_object(endpoint=util.SQ, key=util.LIVE_PROJECT) - for pr in proj.pull_requests().keys(): - util.clean(util.CSV_FILE) - with pytest.raises(SystemExit) as e: - with patch.object(sys, "argv", CSV_OPTS + [f"--{opt.KEYS}", util.LIVE_PROJECT, f"--{opt.PULL_REQUESTS}", pr]): - findings_export.main() - assert int(str(e.value)) == errcodes.OK + for pr in list(proj.pull_requests().keys()): + util.run_success_cmd(findings_export.main, f"{CSV_OPTS_STR} {LIVE_PROJ_KEY} -{opt.PULL_REQUESTS_SHORT} {pr}") with open(util.CSV_FILE, encoding="utf-8") as fd: reader = csv.reader(fd) try: @@ -526,4 +462,4 @@ def test_one_pr() -> None: assert line[PROJECT_COL] == util.LIVE_PROJECT except StopIteration: pass - util.clean(util.CSV_FILE) + util.clean(util.CSV_FILE) diff --git a/test/unit/test_findings_sync.py b/test/unit/test_findings_sync.py index f7e92f7f..408d6c99 100644 --- a/test/unit/test_findings_sync.py +++ b/test/unit/test_findings_sync.py @@ -42,7 +42,7 @@ "-T", os.getenv("SONAR_TOKEN_SYNC_USER"), ] -SYNC_OPTS = ["--login", "syncer", f"-{opt.KEYS_SHORT}", "TESTSYNC", "-K", "TESTSYNC"] +SYNC_OPTS = ["--login", "syncer", f"-{opt.KEYS_SHORT}", "okorach_sonar-tools", "-K", "TESTSYNC"] ALL_OPTS = [CMD] + PLAT_OPTS + SYNC_OPTS + [f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] @@ -58,4 +58,4 @@ def test_sync_help() -> None: def test_sync(get_json_file: callable) -> None: """test_sync""" file = get_json_file - util.run_success_cmd(findings_sync.main, " ".join([CMD] + PLAT_OPTS + SYNC_OPTS) + f" -{opt.REPORT_FILE_SHORT} {file}") + util.run_success_cmd(findings_sync.main, " ".join([CMD] + PLAT_OPTS + SYNC_OPTS) + f" -{opt.REPORT_FILE_SHORT} {file}", True) diff --git a/test/unit/test_migration.py b/test/unit/test_migration.py index 05bf72c0..c9bc29b7 100644 --- a/test/unit/test_migration.py +++ b/test/unit/test_migration.py @@ -104,7 +104,7 @@ def test_migration(get_json_file: Generator[str]) -> None: assert json_config["projects"]["demo:gitlab-ci-maven"]["detectedCi"] == "Gitlab CI" assert json_config["projects"]["demo:github-actions-cli"]["detectedCi"] == "Github Actions" if util.SQ.edition() != "community": - assert p["branches"]["main"]["issues"]["thirdParty"] > 0 + assert sum([v for v in p["branches"]["main"]["issues"]["thirdParty"].values()]) > 0 for p in json_config["portfolios"].values(): assert "projects" in p diff --git a/test/unit/test_platform.py b/test/unit/test_platform.py index 389a28ee..89cc3edd 100644 --- a/test/unit/test_platform.py +++ b/test/unit/test_platform.py @@ -47,16 +47,16 @@ def test_plugins() -> None: def test_get_set_reset_settings() -> None: # util.start_logging() assert util.SQ.reset_setting("sonar.exclusions") - assert util.SQ.get_setting("sonar.exclusions") is None + assert util.SQ.get_setting("sonar.exclusions") == "" - assert util.SQ.set_setting("sonar.exclusions", "**/*.foo") - assert util.SQ.get_setting("sonar.exclusions") == ["**/*.foo"] + assert util.SQ.set_setting("sonar.exclusions", ["**/*.foo"]) + assert util.SQ.get_setting("sonar.exclusions") == "**/*.foo" - assert util.SQ.set_setting("sonar.exclusions", "**/*.foo,**/*.bar") - assert util.SQ.get_setting("sonar.exclusions") == ["**/*.foo", "**/*.bar"] + assert util.SQ.set_setting("sonar.exclusions", ["**/*.foo", "**/*.bar"]) + assert util.SQ.get_setting("sonar.exclusions") == "**/*.foo, **/*.bar" assert util.SQ.reset_setting("sonar.exclusions") - assert util.SQ.get_setting("sonar.exclusions") is None + assert util.SQ.get_setting("sonar.exclusions") == "" def test_import() -> None: diff --git a/test/unit/test_project_export.py b/test/unit/test_project_export.py index 11d69698..02676650 100644 --- a/test/unit/test_project_export.py +++ b/test/unit/test_project_export.py @@ -51,13 +51,13 @@ def test_export_single_proj(get_json_file: Generator[str]) -> None: def test_export_timeout(get_json_file: Generator[str]) -> None: """test_export_timeout""" cmd = f"{OPTS} --{opt.EXPORT} --{opt.REPORT_FILE} {get_json_file} --{opt.KEYS} okorach_sonar-tools --exportTimeout 10" - util.run_success_cmd(projects_cli.main, cmd) + util.run_success_cmd(projects_cli.main, cmd, True) def test_export_no_file(get_json_file: Generator[str]) -> None: """test_export_timeout""" cmd = f"{OPTS} --{opt.EXPORT} -{opt.KEYS_SHORT} okorach_sonar-tools" - util.run_success_cmd(projects_cli.main, cmd) + util.run_success_cmd(projects_cli.main, cmd, True) def test_export_non_existing_project(get_json_file: Generator[str]) -> None: diff --git a/test/unit/test_rules.py b/test/unit/test_rules.py index 549f1b3e..634ec661 100644 --- a/test/unit/test_rules.py +++ b/test/unit/test_rules.py @@ -41,12 +41,12 @@ def test_rules() -> None: """test_rules""" - util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)}') + util.run_success_cmd(rules_cli.main, f'{" ".join(CSV_OPTS)}', True) def test_rules_json_format() -> None: """test_rules_json_format""" - util.run_success_cmd(rules_cli.main, f'{" ".join(JSON_OPTS)}') + util.run_success_cmd(rules_cli.main, f'{" ".join(JSON_OPTS)}', True) def test_rules_filter_language() -> None: diff --git a/test/unit/utilities.py b/test/unit/utilities.py index 57a156bc..1939234c 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -45,7 +45,7 @@ LTA = "http://localhost:8000" LTS = LTA -LATEST_TEST = "http://localhost:10010" +LATEST_TEST = "http://localhost:20010" CB = "http://localhost:7000" @@ -150,7 +150,7 @@ def __get_args_and_file(string_arguments: str) -> tuple[Optional[str], list[str] imp_cmd = args.index(option) is not None break except ValueError: - logging.info("%s - ValueError", option) + pass for option in (f"-{opt.REPORT_FILE_SHORT}", f"--{opt.REPORT_FILE}"): try: return args[args.index(option) + 1], args, imp_cmd @@ -194,11 +194,13 @@ def run_cmd(func: callable, arguments: str, expected_code: int) -> Optional[str] return file -def run_success_cmd(func: callable, arguments: str) -> None: +def run_success_cmd(func: callable, arguments: str, post_cleanup: bool = False) -> None: """Runs a command that's suppose to end in success""" file = run_cmd(func, arguments, errcodes.OK) if file: assert file_not_empty(file) + if post_cleanup: + clean(file) def run_failed_cmd(func: callable, arguments: str, expected_code: int) -> None: From a71e23c25e96b6a50c2515a9108688ccb234c399 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Thu, 1 May 2025 11:08:44 +0200 Subject: [PATCH 25/29] Further improve tests (#1660) * Simplify help test * Fixes #1659 * Fix test export yaml to not pass json adn yaml files --- cli/config.py | 13 +++++++++++-- test/unit/test_config.py | 2 +- test/unit/test_findings_sync.py | 6 +----- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/cli/config.py b/cli/config.py index 3fadf575..3a5b4cb7 100644 --- a/cli/config.py +++ b/cli/config.py @@ -43,7 +43,7 @@ EXPORT_EMPTY = "exportEmpty" _EXPORT_CALLS = { - c.CONFIG_KEY_PLATFORM: [c.CONFIG_KEY_PLATFORM, platform.basics, None], + c.CONFIG_KEY_PLATFORM: [c.CONFIG_KEY_PLATFORM, platform.basics, platform.convert_for_yaml], options.WHAT_SETTINGS: [c.CONFIG_KEY_SETTINGS, platform.export, platform.convert_for_yaml], options.WHAT_RULES: [c.CONFIG_KEY_RULES, rules.export, rules.convert_for_yaml], options.WHAT_PROFILES: [c.CONFIG_KEY_PROFILES, qualityprofiles.export, qualityprofiles.convert_for_yaml], @@ -202,7 +202,16 @@ def export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> Non write_q.put(utilities.WRITE_END) write_q.join() print("\n}", file=fd) - utilities.normalize_json_file(file, remove_empty=False, remove_none=True) + if kwargs[options.FORMAT] == "yaml": + try: + with utilities.open_file(file, mode="r") as fd: + json_data = json.loads(fd.read()) + with utilities.open_file(file, mode="w") as fd: + print(yaml.dump(__convert_for_yaml(json_data), sort_keys=False), file=fd) + except json.decoder.JSONDecodeError: + log.warning("JSON Decode error while converting JSON file '%s' to YAML, is file complete?", file) + else: + utilities.normalize_json_file(file, remove_empty=False, remove_none=True) log.info("Exporting %s data from %s completed", mode.lower(), kwargs[options.URL]) diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 10b204ba..593f689e 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -57,7 +57,7 @@ def test_config_export_partial_3() -> None: def test_config_export_yaml() -> None: """test_config_export_yaml""" - util.run_success_cmd(config.main, f"{OPTS} -{opt.REPORT_FILE_SHORT} {util.YAML_FILE}", True) + util.run_success_cmd(config.main, f"{CMD} {util.SQS_OPTS} --{opt.EXPORT} -{opt.REPORT_FILE_SHORT} {util.YAML_FILE}", True) def test_config_export_wrong() -> None: diff --git a/test/unit/test_findings_sync.py b/test/unit/test_findings_sync.py index 408d6c99..6b96b34e 100644 --- a/test/unit/test_findings_sync.py +++ b/test/unit/test_findings_sync.py @@ -48,11 +48,7 @@ def test_sync_help() -> None: """test_sync""" - util.clean(util.JSON_FILE) - with pytest.raises(SystemExit): - with patch.object(sys, "argv", [CMD, "-h"]): - findings_sync.main() - assert not os.path.isfile(util.JSON_FILE) + util.run_failed_cmd(findings_sync.main, f"{CMD} -h", errcodes.ARGS_ERROR) def test_sync(get_json_file: callable) -> None: From 113b99fa90925717cb256b118fc288b99b69f833 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Thu, 1 May 2025 12:28:44 +0200 Subject: [PATCH 26/29] Fixes-sarif-format (#1656) * Fixes #1655 * cleanup --- cli/findings_export.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/cli/findings_export.py b/cli/findings_export.py index 7e936468..3bfa4ea9 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -73,6 +73,8 @@ options.LANGUAGES, ) +_SARIF_NO_CUSTOM_PROPERTIES = "sarifNoCustomProperties" + def parse_args(desc: str) -> Namespace: """Sets CLI parameters and parses them""" @@ -131,7 +133,7 @@ def parse_args(desc: str) -> Namespace: f"--{options.USE_FINDINGS}", required=False, default=False, action="store_true", help="Use export_findings() whenever possible" ) parser.add_argument( - "--sarifNoCustomProperties", + f"--{_SARIF_NO_CUSTOM_PROPERTIES}", required=False, default=False, action="store_true", @@ -177,7 +179,7 @@ def __write_json_findings(findings_list: dict[str, findings.Finding], fd: TextIO if kwargs[options.FORMAT] == "json": json_data = finding.to_json(DATES_WITHOUT_TIME) else: - json_data = finding.to_sarif(kwargs.get("full", True)) + json_data = finding.to_sarif(not kwargs.get(_SARIF_NO_CUSTOM_PROPERTIES, True)) if not kwargs[options.WITH_URL]: json_data.pop("url", None) print(f"{util.json_dump(json_data, indent=1)}{comma}", file=fd) @@ -285,7 +287,7 @@ def __get_component_findings(queue: Queue[tuple[object, ConfigSettings]], write_ else: new_params = params.copy() for p in ( - "sarifNoCustomProperties", + _SARIF_NO_CUSTOM_PROPERTIES, options.NBR_THREADS, options.CSV_SEPARATOR, options.COMPONENT_TYPE, From a7c72132e41f2e56216e2babe4ff956c93dd88e1 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Thu, 1 May 2025 12:38:41 +0200 Subject: [PATCH 27/29] Fixes #1662 (#1663) --- sonar/app_branches.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sonar/app_branches.py b/sonar/app_branches.py index 77cc979a..7ea05f16 100644 --- a/sonar/app_branches.py +++ b/sonar/app_branches.py @@ -175,12 +175,12 @@ def update(self, name: str, project_branches: list[Branch]) -> bool: :raises ObjectNotFound: If ApplicationBranch not found in SonarQube :return: whether the operation succeeded """ - if not name and not project_branches: + if not name: + name = self.name + if not project_branches or len(project_branches) == 0: return False params = self.api_params() - params["name"] = name - if len(project_branches) > 0: - params.update({"project": [], "projectBranch": []}) + params.update({"name": name, "project": [], "projectBranch": []}) for branch in project_branches: params["project"].append(branch.concerned_object.key) br_name = "" if branch.is_main() else branch.name From 7bfc00828f1b2440fb62d15274eb60b285985a35 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Fri, 2 May 2025 08:30:14 +0200 Subject: [PATCH 28/29] More-robust-sync-tests (#1664) * More robust test and fix source project branch * Fix typo * Remove useless imports --- test/unit/test_findings_sync.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/test/unit/test_findings_sync.py b/test/unit/test_findings_sync.py index 6b96b34e..ecedc023 100644 --- a/test/unit/test_findings_sync.py +++ b/test/unit/test_findings_sync.py @@ -20,30 +20,21 @@ # """ - sonar-findings-export tests + sonar-findings-sync tests """ import os -import sys from unittest.mock import patch -import pytest import utilities as util -import sonar.logging as log from sonar import errcodes from cli import findings_sync import cli.options as opt CMD = "sonar-findings-sync.py" -PLAT_OPTS = [f"--{opt.URL}", os.getenv("SONAR_HOST_URL"), f"--{opt.TOKEN}", os.getenv("SONAR_TOKEN_ADMIN_USER")] + [ - "-U", - os.getenv("SONAR_HOST_URL_TEST"), - "-T", - os.getenv("SONAR_TOKEN_SYNC_USER"), -] -SYNC_OPTS = ["--login", "syncer", f"-{opt.KEYS_SHORT}", "okorach_sonar-tools", "-K", "TESTSYNC"] -ALL_OPTS = [CMD] + PLAT_OPTS + SYNC_OPTS + [f"-{opt.REPORT_FILE_SHORT}", util.JSON_FILE] +PLAT_OPTS = f"--{opt.URL} {os.getenv('SONAR_HOST_URL')} --{opt.TOKEN} {os.getenv('SONAR_TOKEN_ADMIN_USER')} -U {os.getenv('SONAR_HOST_URL_TEST')} -T {os.getenv('SONAR_TOKEN_SYNC_USER')}" +SYNC_OPTS = f"--login syncer -{opt.KEYS_SHORT} {util.LIVE_PROJECT} -K TESTSYNC -b master -B main" def test_sync_help() -> None: @@ -53,5 +44,4 @@ def test_sync_help() -> None: def test_sync(get_json_file: callable) -> None: """test_sync""" - file = get_json_file - util.run_success_cmd(findings_sync.main, " ".join([CMD] + PLAT_OPTS + SYNC_OPTS) + f" -{opt.REPORT_FILE_SHORT} {file}", True) + util.run_success_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -{opt.REPORT_FILE_SHORT} {get_json_file}", True) From 31d33db66f2a8bc5f27bd158e42a34766e81a587 Mon Sep 17 00:00:00 2001 From: Olivier Korach Date: Sat, 3 May 2025 11:26:29 +0200 Subject: [PATCH 29/29] Fix-1665 (#1666) * Test for sonarcloud * Separate local and external URL * URL member name change * Introduce base_url() * use base_url() and local_url * Ajust tests to local_url and external_url * Fix typo * Fixes * Fix launch of v9.9 vs LTS vs LATEST --- cli/findings_export.py | 4 +-- cli/rules_cli.py | 2 +- sonar/app_branches.py | 8 ++--- sonar/applications.py | 4 +-- sonar/branches.py | 12 ++++---- sonar/devops.py | 4 +-- sonar/groups.py | 10 +++--- sonar/hotspots.py | 4 +-- sonar/issues.py | 4 +-- sonar/languages.py | 4 +-- sonar/metrics.py | 2 +- sonar/organizations.py | 4 +-- sonar/permissions/permission_templates.py | 4 +-- sonar/platform.py | 37 +++++++++++++---------- sonar/portfolio_reference.py | 2 +- sonar/portfolios.py | 4 +-- sonar/projects.py | 6 ++-- sonar/pull_requests.py | 6 ++-- sonar/qualitygates.py | 12 ++++---- sonar/qualityprofiles.py | 10 +++--- sonar/rules.py | 6 ++-- sonar/settings.py | 12 ++++---- sonar/sif.py | 2 +- sonar/sqobject.py | 8 +++-- sonar/tasks.py | 2 +- sonar/users.py | 6 ++-- sonar/webhooks.py | 8 ++--- test/integration/it.sh | 13 +++++--- test/unit/conftest.py | 4 +-- test/unit/test_groups.py | 2 +- test/unit/test_issues.py | 2 +- test/unit/test_platform.py | 2 +- test/unit/test_qg.py | 4 +-- test/unit/test_qp.py | 2 +- test/unit/test_sif.py | 4 +-- test/unit/test_tasks.py | 2 +- test/unit/test_users.py | 2 +- 37 files changed, 118 insertions(+), 106 deletions(-) diff --git a/cli/findings_export.py b/cli/findings_export.py index 3bfa4ea9..f3aff7ed 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -367,7 +367,7 @@ def __turn_off_use_findings_if_needed(endpoint: object, params: dict[str, str]) """Turn off use-findings option if some incompatible options (issue filters) are used""" if not params[options.USE_FINDINGS]: return params - if util.is_sonarcloud_url(endpoint.url): + if endpoint.is_sonarcloud(): log.warning("--%s option is not available with SonarCloud, disabling the option to proceed", options.USE_FINDINGS) params[options.USE_FINDINGS] = False return params @@ -424,7 +424,7 @@ def main() -> None: store_findings(components_list, params=params) except (PermissionError, FileNotFoundError) as e: util.exit_fatal(f"OS error while exporting findings: {e}", exit_code=errcodes.OS_ERROR) - log.info("%d returned findings from %s", TOTAL_FINDINGS, sqenv.url) + log.info("%d returned findings from %s", TOTAL_FINDINGS, sqenv.local_url) util.stop_clock(start_time) sys.exit(0) diff --git a/cli/rules_cli.py b/cli/rules_cli.py index 2915c0fb..165f7323 100755 --- a/cli/rules_cli.py +++ b/cli/rules_cli.py @@ -99,7 +99,7 @@ def main() -> int: else: __write_rules_json(file=file, rule_list=rule_list) - log.info("%d rules exported from %s", len(rule_list), endpoint.url) + log.info("%d rules exported from %s", len(rule_list), endpoint.local_url) util.stop_clock(start_time) sys.exit(0) except exceptions.SonarException as e: diff --git a/sonar/app_branches.py b/sonar/app_branches.py index 7ea05f16..438bc369 100644 --- a/sonar/app_branches.py +++ b/sonar/app_branches.py @@ -78,12 +78,12 @@ def get_object(cls, app: object, branch_name: str) -> ApplicationBranch: """ if app.endpoint.edition() == "community": raise exceptions.UnsupportedOperation(_NOT_SUPPORTED) - o = ApplicationBranch.CACHE.get(app.key, branch_name, app.endpoint.url) + o = ApplicationBranch.CACHE.get(app.key, branch_name, app.base_url()) if o: return o app.refresh() app.branches() - o = ApplicationBranch.CACHE.get(app.key, branch_name, app.endpoint.url) + o = ApplicationBranch.CACHE.get(app.key, branch_name, app.base_url()) if o: return o raise exceptions.ObjectNotFound(app.key, f"Application key '{app.key}' branch '{branch_name}' not found") @@ -126,7 +126,7 @@ def __str__(self) -> str: def __hash__(self) -> int: """Returns the object UUID""" - return hash((self.concerned_object.key, self.name, self.endpoint.url)) + return hash((self.concerned_object.key, self.name, self.base_url())) def is_main(self) -> bool: """Returns whether app branch is main""" @@ -231,7 +231,7 @@ def component_data(self) -> types.Obj: def url(self) -> str: """Returns the URL of the Application Branch""" - return f"{self.endpoint.url}/dashboard?id={self.concerned_object.key}&branch={quote(self.name)}" + return f"{self.base_url(local=False)}/dashboard?id={self.concerned_object.key}&branch={quote(self.name)}" def exists(app: object, branch: str) -> bool: diff --git a/sonar/applications.py b/sonar/applications.py index 6637ab43..9e783009 100644 --- a/sonar/applications.py +++ b/sonar/applications.py @@ -89,7 +89,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Application: :rtype: Application """ check_supported(endpoint) - o = Application.CACHE.get(key, endpoint.url) + o = Application.CACHE.get(key, endpoint.local_url) if o: return o try: @@ -112,7 +112,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> Application: :rtype: Application """ check_supported(endpoint) - o = Application.CACHE.get(data["key"], endpoint.url) + o = Application.CACHE.get(data["key"], endpoint.local_url) if not o: o = cls(endpoint, data["key"], data["name"]) o.reload(data) diff --git a/sonar/branches.py b/sonar/branches.py index 92cdb3ce..1affd31f 100644 --- a/sonar/branches.py +++ b/sonar/branches.py @@ -86,7 +86,7 @@ def get_object(cls, concerned_object: projects.Project, branch_name: str) -> Bra :rtype: Branch """ branch_name = unquote(branch_name) - o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.endpoint.url) + o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) if o: return o try: @@ -113,7 +113,7 @@ def load(cls, concerned_object: projects.Project, branch_name: str, data: types. :rtype: Branch """ branch_name = unquote(branch_name) - o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.endpoint.url) + o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) if not o: o = cls(concerned_object, branch_name) o._load(data) @@ -124,7 +124,7 @@ def __str__(self) -> str: def __hash__(self) -> int: """Computes a uuid for the branch that can serve as index""" - return hash((self.concerned_object.key, self.name, self.endpoint.url)) + return hash((self.concerned_object.key, self.name, self.base_url())) def project(self) -> projects.Project: """Returns the project key""" @@ -245,7 +245,7 @@ def url(self) -> str: :return: The branch URL in SonarQube as permalink :rtype: str """ - return f"{self.endpoint.url}/dashboard?id={self.concerned_object.key}&branch={requests.utils.quote(self.name)}" + return f"{self.base_url(local=False)}/dashboard?id={self.concerned_object.key}&branch={requests.utils.quote(self.name)}" def rename(self, new_name: str) -> bool: """Renames a branch @@ -321,7 +321,7 @@ def sync(self, another_branch: Branch, sync_settings: types.ConfigSettings) -> t from sonar.syncer import sync_lists report, counters = [], {} - log.info("Syncing %s (%s) and %s (%s) issues", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url) + log.info("Syncing %s (%s) and %s (%s) issues", str(self), self.base_url(), str(another_branch), another_branch.endpoint.local_url) (report, counters) = sync_lists( list(self.get_issues().values()), list(another_branch.get_issues().values()), @@ -329,7 +329,7 @@ def sync(self, another_branch: Branch, sync_settings: types.ConfigSettings) -> t another_branch, sync_settings=sync_settings, ) - log.info("Syncing %s (%s) and %s (%s) hotspots", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url) + log.info("Syncing %s (%s) and %s (%s) hotspots", str(self), self.base_url(), str(another_branch), another_branch.endpoint.local_url) (tmp_report, tmp_counts) = sync_lists( list(self.get_hotspots().values()), list(another_branch.get_hotspots().values()), diff --git a/sonar/devops.py b/sonar/devops.py index 5d5c0712..3c9e16c1 100644 --- a/sonar/devops.py +++ b/sonar/devops.py @@ -66,7 +66,7 @@ def __init__(self, endpoint: platform.Platform, key: str, platform_type: str) -> @classmethod def read(cls, endpoint: platform.Platform, key: str) -> DevopsPlatform: """Reads a devops platform object in Sonar instance""" - o = DevopsPlatform.CACHE.get(key, endpoint.url) + o = DevopsPlatform.CACHE.get(key, endpoint.local_url) if o: return o data = json.loads(endpoint.get(DevopsPlatform.API[c.LIST]).text) @@ -80,7 +80,7 @@ def read(cls, endpoint: platform.Platform, key: str) -> DevopsPlatform: def load(cls, endpoint: platform.Platform, plt_type: str, data: types.ApiPayload) -> DevopsPlatform: """Finds a devops platform object and loads it with data""" key = data["key"] - o = DevopsPlatform.CACHE.get(key, endpoint.url) + o = DevopsPlatform.CACHE.get(key, endpoint.local_url) if not o: o = DevopsPlatform(endpoint=endpoint, key=key, platform_type=plt_type) return o._load(data) diff --git a/sonar/groups.py b/sonar/groups.py index 40b7c5d1..aeab7d31 100644 --- a/sonar/groups.py +++ b/sonar/groups.py @@ -92,14 +92,14 @@ def read(cls, endpoint: pf.Platform, name: str) -> Group: :return: The group object """ log.debug("Reading group '%s'", name) - o = Group.CACHE.get(name, endpoint.url) + o = Group.CACHE.get(name, endpoint.local_url) if o: return o data = util.search_by_name(endpoint, name, Group.api_for(c.SEARCH, endpoint), "groups") if data is None: raise exceptions.ObjectNotFound(name, f"Group '{name}' not found.") # SonarQube 10 compatibility: "id" field is dropped, use "name" instead - o = Group.CACHE.get(data.get("id", data["name"]), endpoint.url) + o = Group.CACHE.get(data.get("id", data["name"]), endpoint.local_url) if o: return o return cls(endpoint, name, data=data) @@ -150,10 +150,10 @@ def get_object(cls, endpoint: pf.Platform, name: str) -> Group: :param str name: group name :return: The group """ - o = Group.CACHE.get(name, endpoint.url) + o = Group.CACHE.get(name, endpoint.local_url) if not o: get_list(endpoint) - o = Group.CACHE.get(name, endpoint.url) + o = Group.CACHE.get(name, endpoint.local_url) if not o: raise exceptions.ObjectNotFound(name, message=f"Group '{name}' not found") return o @@ -217,7 +217,7 @@ def url(self) -> str: :return: the SonarQube permalink URL to the group, actually the global groups page only since this is as close as we can get to the precise group definition """ - return f"{self.endpoint.url}/admin/groups" + return f"{self.base_url(local=False)}/admin/groups" def add_user(self, user: object) -> bool: """Adds an user to the group diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 1c5d27bf..db3d774c 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -114,7 +114,7 @@ def url(self) -> str: branch = f"branch={requests.utils.quote(self.branch)}&" elif self.pull_request is not None: branch = f"pullRequest={requests.utils.quote(self.pull_request)}&" - return f"{self.endpoint.url}/security_hotspots?{branch}id={self.projectKey}&hotspots={self.key}" + return f"{self.base_url(local=False)}/security_hotspots?{branch}id={self.projectKey}&hotspots={self.key}" def to_json(self, without_time: bool = False) -> types.ObjectJsonRepr: """ @@ -449,7 +449,7 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str, def get_object(endpoint: pf.Platform, key: str, data: dict[str] = None, from_export: bool = False) -> Hotspot: """Returns a hotspot from its key""" - o = Hotspot.CACHE.get(key, endpoint.url) + o = Hotspot.CACHE.get(key, endpoint.local_url) if not o: o = Hotspot(key=key, data=data, endpoint=endpoint, from_export=from_export) return o diff --git a/sonar/issues.py b/sonar/issues.py index d14303cb..85237324 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -189,7 +189,7 @@ def url(self) -> str: branch = f"&branch={requests.utils.quote(self.branch)}" elif self.pull_request is not None: branch = f"&pullRequest={requests.utils.quote(self.pull_request)}" - return f"{self.endpoint.url}/project/issues?id={self.projectKey}{branch}&issues={self.key}" + return f"{self.base_url(local=False)}/project/issues?id={self.projectKey}{branch}&issues={self.key}" def debt(self) -> int: """ @@ -969,7 +969,7 @@ def count_by_rule(endpoint: pf.Platform, **kwargs) -> dict[str, int]: def get_object(endpoint: pf.Platform, key: str, data: ApiPayload = None, from_export: bool = False) -> Issue: """Returns an issue from its key""" - o = Issue.CACHE.get(key, endpoint.url) + o = Issue.CACHE.get(key, endpoint.local_url) if not o: o = Issue(endpoint=endpoint, key=key, data=data, from_export=from_export) return o diff --git a/sonar/languages.py b/sonar/languages.py index 7db971b8..178af192 100644 --- a/sonar/languages.py +++ b/sonar/languages.py @@ -52,7 +52,7 @@ def __init__(self, endpoint: pf.Platform, key: str, name: str) -> None: @classmethod def load(cls, endpoint: pf.Platform, data: ApiPayload) -> Language: - o = Language.CACHE.get(data["key"], endpoint.url) + o = Language.CACHE.get(data["key"], endpoint.local_url) if not o: o = cls(endpoint=endpoint, key=data["key"], name=data["name"]) return o @@ -64,7 +64,7 @@ def read(cls, endpoint: pf.Platform, key: str) -> Language: :rtype: Language or None if not found """ get_list(endpoint) - return Language.CACHE.get(key, endpoint.url) + return Language.CACHE.get(key, endpoint.local_url) def number_of_rules(self, rule_type: str = None) -> int: """Count rules in the language, optionally filtering on rule type diff --git a/sonar/metrics.py b/sonar/metrics.py index 348307ab..3a0558df 100644 --- a/sonar/metrics.py +++ b/sonar/metrics.py @@ -99,7 +99,7 @@ def __init__(self, endpoint: pf.Platform, key: str, data: ApiPayload = None) -> @classmethod def get_object(cls, endpoint: pf.Platform, key: str) -> Metric: search(endpoint=endpoint) - o = Metric.CACHE.get(key, endpoint.url) + o = Metric.CACHE.get(key, endpoint.local_url) if not o: raise exceptions.ObjectNotFound(key, f"Metric key '{key}' not found") return o diff --git a/sonar/organizations.py b/sonar/organizations.py index 3b78b9b7..a8869292 100644 --- a/sonar/organizations.py +++ b/sonar/organizations.py @@ -73,7 +73,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Organization: """ if not endpoint.is_sonarcloud(): raise exceptions.UnsupportedOperation(_NOT_SUPPORTED) - o = Organization.CACHE.get(key, endpoint.url) + o = Organization.CACHE.get(key, endpoint.local_url) if o: return o try: @@ -99,7 +99,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> Organization: """ if not endpoint.is_sonarcloud(): raise exceptions.UnsupportedOperation(_NOT_SUPPORTED) - o = Organization.CACHE.get(data["key"], endpoint.url) + o = Organization.CACHE.get(data["key"], endpoint.local_url) if not o: o = cls(endpoint, data["key"], data["name"]) o.sq_json = data diff --git a/sonar/permissions/permission_templates.py b/sonar/permissions/permission_templates.py index 9fe1753f..ce77b2bc 100644 --- a/sonar/permissions/permission_templates.py +++ b/sonar/permissions/permission_templates.py @@ -87,7 +87,7 @@ def __str__(self) -> str: def __hash__(self) -> int: """Returns object unique id""" - return hash((self.name.lower(), self.endpoint.url)) + return hash((self.name.lower(), self.base_url())) def is_default_for(self, qualifier: str) -> bool: """Returns whether a template is the default for a type of qualifier""" @@ -205,7 +205,7 @@ def get_object(endpoint: pf.Platform, name: str) -> PermissionTemplate: """Returns Perm Template object corresponding to name""" if len(PermissionTemplate.CACHE) == 0: get_list(endpoint) - return PermissionTemplate.CACHE.get(name.lower(), endpoint.url) + return PermissionTemplate.CACHE.get(name.lower(), endpoint.local_url) def create_or_update(endpoint: pf.Platform, name: str, data: types.ObjectJsonRepr) -> PermissionTemplate: diff --git a/sonar/platform.py b/sonar/platform.py index bd9edbb7..e0f054fd 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -75,7 +75,8 @@ def __init__(self, url: str, token: str, org: str = None, cert_file: Optional[st :return: the SonarQube object :rtype: Platform """ - self.url = url.rstrip("/").lower() #: SonarQube URL + self.local_url = url.rstrip("/").lower() #: SonarQube URL + self.external_url = self.local_url self.__token = token self.__cert_file = cert_file self.__user_data = None @@ -93,15 +94,19 @@ def __str__(self) -> str: """ Returns the string representation of the SonarQube connection, with the token recognizable but largely redacted """ - return f"{util.redacted_token(self.__token)}@{self.url}" + return f"{util.redacted_token(self.__token)}@{self.local_url}" def __credentials(self) -> tuple[str, str]: return self.__token, "" def verify_connection(self) -> None: try: - log.info("Connecting to %s", self.url) + log.info("Connecting to %s", self.local_url) self.get("server/version") + if not self.is_sonarcloud(): + s = self.get_setting(key="sonar.core.serverBaseURL") + if s not in (None, ""): + self.external_url = s except (ConnectionError, RequestException) as e: util.handle_error(e, "verifying connection", catch_all=True) raise exceptions.ConnectionError(util.sonar_error(e.response)) @@ -157,7 +162,7 @@ def is_sonarcloud(self) -> bool: """ Returns whether the target platform is SonarCloud """ - return util.is_sonarcloud_url(self.url) + return util.is_sonarcloud_url(self.local_url) def basics(self) -> dict[str, str]: """ @@ -167,7 +172,7 @@ def basics(self) -> dict[str, str]: url = self.get_setting(key="sonar.core.serverBaseURL") if url in (None, ""): - url = self.url + url = self.local_url data = {"edition": self.edition(), "url": url} if self.is_sonarcloud(): return {**data, "organization": self.organization} @@ -248,7 +253,7 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N while retry: start = time.perf_counter_ns() r = request( - url=self.url + api, + url=self.local_url + api, auth=self.__credentials(), verify=self.__cert_file, params=params, @@ -258,7 +263,7 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N (retry, new_url) = _check_for_retry(r) log.debug("%s: %s took %d ms", req_type, url, (time.perf_counter_ns() - start) // 1000000) if retry: - self.url = new_url + self.local_url = new_url r.raise_for_status() except HTTPError as e: lvl = log.DEBUG if r.status_code in mute else log.ERROR @@ -530,7 +535,7 @@ def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: log.info("--- Auditing global settings ---") problems = [] platform_settings = self.get_settings() - settings_url = f"{self.url}/admin/settings" + settings_url = f"{self.local_url}/admin/settings" for key in audit_settings: if key.startswith("audit.globalSettings.range"): problems += _audit_setting_in_range(key, platform_settings, audit_settings, self.version(), settings_url) @@ -600,12 +605,12 @@ def _audit_logs(self, audit_settings: types.ConfigSettings) -> list[Problem]: log.warning("Warning found in %s: %s", logfile, line) rule = get_rule(RuleId.WARNING_IN_LOGS) if rule is not None: - problems.append(Problem(rule, f"{self.url}/admin/system", logfile, line)) + problems.append(Problem(rule, f"{self.local_url}/admin/system", logfile, line)) logs = self.get("system/logs", params={"name": "deprecation"}).text nb_deprecation = len(logs.splitlines()) if nb_deprecation > 0: rule = get_rule(RuleId.DEPRECATION_WARNINGS) - problems.append(Problem(rule, f"{self.url}/admin/system", nb_deprecation)) + problems.append(Problem(rule, f"{self.local_url}/admin/system", nb_deprecation)) return problems def _audit_project_default_visibility(self, audit_settings: types.ConfigSettings) -> list[Problem]: @@ -624,17 +629,17 @@ def _audit_project_default_visibility(self, audit_settings: types.ConfigSettings log.info("Project default visibility is '%s'", visi) if audit_settings.get("audit.globalSettings.defaultProjectVisibility", "private") != visi: rule = get_rule(RuleId.SETTING_PROJ_DEFAULT_VISIBILITY) - problems.append(Problem(rule, f"{self.url}/admin/projects_management", visi)) + problems.append(Problem(rule, f"{self.local_url}/admin/projects_management", visi)) return problems def _audit_admin_password(self) -> list[Problem]: log.info("Auditing admin password") problems = [] try: - r = requests.get(url=self.url + "/api/authentication/validate", auth=("admin", "admin"), timeout=self.http_timeout) + r = requests.get(url=self.local_url + "/api/authentication/validate", auth=("admin", "admin"), timeout=self.http_timeout) data = json.loads(r.text) if data.get("valid", False): - problems.append(Problem(get_rule(RuleId.DEFAULT_ADMIN_PASSWORD), self.url)) + problems.append(Problem(get_rule(RuleId.DEFAULT_ADMIN_PASSWORD), self.local_url)) else: log.info("User 'admin' default password has been changed") except requests.RequestException as e: @@ -644,7 +649,7 @@ def _audit_admin_password(self) -> list[Problem]: def __audit_group_permissions(self) -> list[Problem]: log.info("Auditing group global permissions") problems = [] - perms_url = f"{self.url}/admin/permissions" + perms_url = f"{self.local_url}/admin/permissions" groups = self.global_permissions().groups() if len(groups) > 10: problems.append(Problem(get_rule(rule_id=RuleId.RISKY_GLOBAL_PERMISSIONS), perms_url, len(groups))) @@ -668,7 +673,7 @@ def __audit_group_permissions(self) -> list[Problem]: def __audit_user_permissions(self) -> list[Problem]: log.info("Auditing users global permissions") problems = [] - perms_url = f"{self.url}/admin/permissions" + perms_url = f"{self.local_url}/admin/permissions" users = self.global_permissions().users() if len(users) > 10: msg = f"Too many ({len(users)}) users with direct global permissions, use groups instead" @@ -702,7 +707,7 @@ def _audit_lta_latest(self) -> list[Problem]: if not v: return [] # pylint: disable-next=E0606 - return [Problem(rule, self.url, ".".join([str(n) for n in sq_vers]), ".".join([str(n) for n in v]))] + return [Problem(rule, self.local_url, ".".join([str(n) for n in sq_vers]), ".".join([str(n) for n in v]))] def is_mqr_mode(self) -> bool: """Returns whether the platform is in MQR mode""" diff --git a/sonar/portfolio_reference.py b/sonar/portfolio_reference.py index e6c8219b..8684df06 100644 --- a/sonar/portfolio_reference.py +++ b/sonar/portfolio_reference.py @@ -56,7 +56,7 @@ def get_object(cls, endpoint: pf.Platform, key: str, parent_key: str) -> Portfol """Gets a subportfolio by reference object from its key and parent""" check_supported(endpoint) log.info("Getting subportfolio by ref key '%s:%s'", parent_key, key) - o = PortfolioReference.CACHE.get(f"{parent_key}:{key}", endpoint.url) + o = PortfolioReference.CACHE.get(f"{parent_key}:{key}", endpoint.local_url) if not o: raise exceptions.ObjectNotFound return o diff --git a/sonar/portfolios.py b/sonar/portfolios.py index 791e7596..87e43802 100644 --- a/sonar/portfolios.py +++ b/sonar/portfolios.py @@ -118,7 +118,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Portfolio: """Gets a portfolio object from its key""" check_supported(endpoint) log.debug("Getting portfolio object key '%s'", key) - o = Portfolio.CACHE.get(key, endpoint.url) + o = Portfolio.CACHE.get(key, endpoint.local_url) if o: log.debug("%s is in cache", str(o)) return o @@ -222,7 +222,7 @@ def refresh(self) -> None: def url(self) -> str: """Returns the object permalink""" - return f"{self.endpoint.url}/portfolio?id={self.key}" + return f"{self.base_url(local=False)}/portfolio?id={self.key}" def projects(self) -> Optional[dict[str, str]]: """Returns list of projects and their branches if selection mode is manual, None otherwise""" diff --git a/sonar/projects.py b/sonar/projects.py index 3a20645b..6db32bde 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -167,7 +167,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Project: :return: The Project :rtype: Project """ - o = Project.CACHE.get(key, endpoint.url) + o = Project.CACHE.get(key, endpoint.local_url) if o: return o try: @@ -194,7 +194,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> Project: :rtype: Project """ key = data["key"] - o = Project.CACHE.get(key, endpoint.url) + o = Project.CACHE.get(key, endpoint.local_url) if not o: o = cls(endpoint, key) o.reload(data) @@ -271,7 +271,7 @@ def url(self) -> str: :return: the SonarQube permalink to the project :rtype: str """ - return f"{self.endpoint.url}/dashboard?id={self.key}" + return f"{self.base_url(local=False)}/dashboard?id={self.key}" def last_analysis(self, include_branches: bool = False) -> datetime: """ diff --git a/sonar/pull_requests.py b/sonar/pull_requests.py index a363c986..17534242 100644 --- a/sonar/pull_requests.py +++ b/sonar/pull_requests.py @@ -63,11 +63,11 @@ def __str__(self) -> str: def __hash__(self) -> int: """Returns a PR unique ID""" - return hash((self.project().key, self.key, self.endpoint.url)) + return hash((self.project().key, self.key, self.base_url())) def url(self) -> str: """Returns the PR permalink (until PR is purged)""" - return f"{self.endpoint.url}/dashboard?id={self.concerned_object.key}&pullRequest={requests.utils.quote(self.key)}" + return f"{self.base_url(local=False)}/dashboard?id={self.concerned_object.key}&pullRequest={requests.utils.quote(self.key)}" def project(self) -> object: """Returns the project""" @@ -101,7 +101,7 @@ def get_object(pull_request_key: str, project: object, data: types.ApiPayload = if project.endpoint.edition() == "community": log.debug("Pull requests not available in Community Edition") return None - o = PullRequest.CACHE.get(project.key, pull_request_key, project.endpoint.url) + o = PullRequest.CACHE.get(project.key, pull_request_key, project.base_url()) if not o: o = PullRequest(project, pull_request_key, data=data) return o diff --git a/sonar/qualitygates.py b/sonar/qualitygates.py index 1023fa73..4f8e1b5b 100644 --- a/sonar/qualitygates.py +++ b/sonar/qualitygates.py @@ -110,7 +110,7 @@ def get_object(cls, endpoint: pf.Platform, name: str) -> QualityGate: :param name: Quality gate :return: the QualityGate object or None if not found """ - o = QualityGate.CACHE.get(name, endpoint.url) + o = QualityGate.CACHE.get(name, endpoint.local_url) if o: return o data = search_by_name(endpoint, name) @@ -124,7 +124,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> QualityGate: :return: the QualityGate object """ # SonarQube 10 compatibility: "id" field dropped, replaced by "name" - o = QualityGate.CACHE.get(data["name"], endpoint.url) + o = QualityGate.CACHE.get(data["name"], endpoint.local_url) if not o: o = cls(endpoint, data["name"], data=data) log.debug("Loading 2 %s QG from %s", o.name, util.json_dump(data)) @@ -152,14 +152,14 @@ def __str__(self) -> str: def __hash__(self) -> int: """Default UUID for SQ objects""" - return hash((self.name, self.endpoint.url)) + return hash((self.name, self.base_url())) def url(self) -> str: """ :return: The object permalink :rtype: str """ - return f"{self.endpoint.url}/quality_gates/show/{self.key}" + return f"{self.base_url(local=False)}/quality_gates/show/{self.key}" def projects(self) -> dict[str, projects.Project]: """ @@ -367,7 +367,7 @@ def audit(endpoint: pf.Platform = None, audit_settings: types.ConfigSettings = N nb_qg = len(quality_gates_list) log.debug("Auditing that there are no more than %s quality gates", str(max_qg)) if nb_qg > max_qg: - problems.append(Problem(get_rule(RuleId.QG_TOO_MANY_GATES), f"{endpoint.url}/quality_gates", nb_qg, 5)) + problems.append(Problem(get_rule(RuleId.QG_TOO_MANY_GATES), f"{endpoint.external_url}/quality_gates", nb_qg, 5)) for qg in quality_gates_list.values(): problems += qg.audit(audit_settings) if "write_q" in kwargs: @@ -384,7 +384,7 @@ def get_list(endpoint: pf.Platform) -> dict[str, QualityGate]: data = json.loads(endpoint.get(QualityGate.API[c.LIST]).text) qg_list = {} for qg in data["qualitygates"]: - qg_obj = QualityGate.CACHE.get(qg["name"], endpoint.url) + qg_obj = QualityGate.CACHE.get(qg["name"], endpoint.local_url) if qg_obj is None: qg_obj = QualityGate(endpoint=endpoint, name=qg["name"], data=qg.copy()) if endpoint.version() < (7, 9, 0) and "default" in data and data["default"] == qg["id"]: diff --git a/sonar/qualityprofiles.py b/sonar/qualityprofiles.py index 5b54c768..5059d908 100644 --- a/sonar/qualityprofiles.py +++ b/sonar/qualityprofiles.py @@ -109,7 +109,7 @@ def read(cls, endpoint: pf.Platform, name: str, language: str) -> Optional[Quali log.error("Language '%s' does not exist, quality profile creation aborted", language) return None log.debug("Reading quality profile '%s' of language '%s'", name, language) - o = QualityProfile.CACHE.get(name, language, endpoint.url) + o = QualityProfile.CACHE.get(name, language, endpoint.local_url) if o: return o data = util.search_by_name( @@ -179,14 +179,14 @@ def __str__(self) -> str: return f"quality profile '{self.name}' of language '{self.language}'" def __hash__(self) -> int: - return hash((self.name, self.language, self.endpoint.url)) + return hash((self.name, self.language, self.base_url())) def url(self) -> str: """ :return: the SonarQube permalink URL to the quality profile :rtype: str """ - return f"{self.endpoint.url}/profiles/show?language={self.language}&name={requests.utils.quote(self.name)}" + return f"{self.base_url(local=False)}/profiles/show?language={self.language}&name={requests.utils.quote(self.name)}" def last_use(self) -> datetime: """ @@ -628,7 +628,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings = None, ** for lang, nb_qp in langs.items(): if nb_qp > 5: rule = get_rule(RuleId.QP_TOO_MANY_QP) - problems.append(Problem(rule, f"{endpoint.url}/profiles?language={lang}", nb_qp, lang, 5)) + problems.append(Problem(rule, f"{endpoint.external_url}/profiles?language={lang}", nb_qp, lang, 5)) if "write_q" in kwargs: kwargs["write_q"].put(problems) return problems @@ -731,7 +731,7 @@ def get_object(endpoint: pf.Platform, name: str, language: str) -> Optional[Qual :return: The quality profile object, of None if not found """ get_list(endpoint) - o = QualityProfile.CACHE.get(name, language, endpoint.url) + o = QualityProfile.CACHE.get(name, language, endpoint.local_url) if not o: raise exceptions.ObjectNotFound(name, message=f"Quality Profile '{language}:{name}' not found") return o diff --git a/sonar/rules.py b/sonar/rules.py index a75e1cf5..a08c3e33 100644 --- a/sonar/rules.py +++ b/sonar/rules.py @@ -193,7 +193,7 @@ def __init__(self, endpoint: platform.Platform, key: str, data: types.ApiPayload @classmethod def get_object(cls, endpoint: platform.Platform, key: str) -> Rule: """Returns a rule object from the cache or from the platform itself""" - o = Rule.CACHE.get(key, endpoint.url) + o = Rule.CACHE.get(key, endpoint.local_url) if o: return o try: @@ -218,7 +218,7 @@ def create(cls, endpoint: platform.Platform, key: str, **kwargs) -> Optional[Rul @classmethod def load(cls, endpoint: platform.Platform, key: str, data: types.ApiPayload) -> Rule: """Loads a rule object""" - o = Rule.CACHE.get(key, endpoint.url) + o = Rule.CACHE.get(key, endpoint.local_url) if o: o.sq_json.update(data) return o @@ -378,7 +378,7 @@ def get_object(endpoint: platform.Platform, key: str) -> Optional[Rule]: :param str key: The rule key :rtype: Rule or None """ - o = Rule.CACHE.get(key, endpoint.url) + o = Rule.CACHE.get(key, endpoint.local_url) if o: return o try: diff --git a/sonar/settings.py b/sonar/settings.py index 1706af25..19eedd27 100644 --- a/sonar/settings.py +++ b/sonar/settings.py @@ -150,7 +150,7 @@ def __init__(self, endpoint: pf.Platform, key: str, component: object = None, da def read(cls, key: str, endpoint: pf.Platform, component: object = None) -> Setting: """Reads a setting from the platform""" log.debug("Reading setting '%s' for %s", key, str(component)) - o = Setting.CACHE.get(key, component, endpoint.url) + o = Setting.CACHE.get(key, component, endpoint.local_url) if o: return o if key == NEW_CODE_PERIOD and not endpoint.is_sonarcloud(): @@ -182,7 +182,7 @@ def create(cls, key: str, endpoint: pf.Platform, value: any = None, component: o def load(cls, key: str, endpoint: pf.Platform, data: types.ApiPayload, component: object = None) -> Setting: """Loads a setting with JSON data""" log.debug("Loading setting '%s' of component '%s' with data %s", key, str(component), str(data)) - o = Setting.CACHE.get(key, component, endpoint.url) + o = Setting.CACHE.get(key, component, endpoint.local_url) if not o: o = cls(key=key, endpoint=endpoint, data=data, component=component) o.reload(data) @@ -226,7 +226,7 @@ def reload(self, data: types.ApiPayload) -> None: def __hash__(self) -> int: """Returns object unique ID""" - return hash((self.key, self.component.key if self.component else None, self.endpoint.url)) + return hash((self.key, self.component.key if self.component else None, self.base_url())) def __str__(self) -> str: if self.component is None: @@ -386,10 +386,10 @@ def category(self) -> tuple[str, str]: def get_object(endpoint: pf.Platform, key: str, component: object = None) -> Setting: """Returns a Setting object from its key and, optionally, component""" - o = Setting.CACHE.get(key, component, endpoint.url) + o = Setting.CACHE.get(key, component, endpoint.local_url) if not o: get_all(endpoint, component) - return Setting.CACHE.get(key, component, endpoint.url) + return Setting.CACHE.get(key, component, endpoint.local_url) def __get_settings(endpoint: pf.Platform, data: types.ApiPayload, component: object = None) -> dict[str, Setting]: @@ -498,7 +498,7 @@ def set_new_code_period(endpoint: pf.Platform, nc_type: str, nc_value: str, proj def get_visibility(endpoint: pf.Platform, component: object) -> str: """Returns the platform global or component visibility""" key = COMPONENT_VISIBILITY if component else PROJECT_DEFAULT_VISIBILITY - o = Setting.CACHE.get(key, component, endpoint.url) + o = Setting.CACHE.get(key, component, endpoint.local_url) if o: return o if component: diff --git a/sonar/sif.py b/sonar/sif.py index b8ce8f22..57de1248 100644 --- a/sonar/sif.py +++ b/sonar/sif.py @@ -75,7 +75,7 @@ def url(self) -> str: """Returns the SQ URL of the SQ instance represented by the SIF""" if not self._url: if self.concerned_object: - self._url = self.concerned_object.url + self._url = self.concerned_object.external_url else: self._url = self.json.get("Settings", {}).get("sonar.core.serverBaseURL", "") return self._url diff --git a/sonar/sqobject.py b/sonar/sqobject.py index 51ffb179..4eff3250 100644 --- a/sonar/sqobject.py +++ b/sonar/sqobject.py @@ -51,7 +51,7 @@ def __init__(self, endpoint: object, key: str) -> None: def __hash__(self) -> int: """Default UUID for SQ objects""" - return hash((self.key, self.endpoint.url)) + return hash((self.key, self.base_url())) def __eq__(self, another: object) -> bool: if type(self) is type(another): @@ -80,7 +80,7 @@ def clear_cache(cls, endpoint: Optional[object] = None) -> None: if not endpoint: cls.CACHE.clear() else: - _ = [cls.CACHE.pop(o) for o in cls.CACHE.values().copy() if o.endpoint.url != endpoint.url] + _ = [cls.CACHE.pop(o) for o in cls.CACHE.values().copy() if o.base_url() != endpoint.local_url] except AttributeError: pass @@ -91,6 +91,10 @@ def reload(self, data: types.ObjectJsonRepr) -> None: else: self.sq_json.update(data) + def base_url(self, local: bool = True) -> str: + """Returns the platform base URL""" + return self.endpoint.local_url if local or self.endpoint.external_url in (None, "") else self.endpoint.external_url + def get( self, api: str, diff --git a/sonar/tasks.py b/sonar/tasks.py index 491d8a8f..bb92d280 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -85,7 +85,7 @@ def url(self) -> str: :return: the SonarQube permalink URL to the background task :rtype: str """ - u = f"{self.endpoint.url}/project/background_tasks" + u = f"{self.base_url(local=False)}/project/background_tasks" if self.component_key: u += f"?id={self.component_key}" return u diff --git a/sonar/users.py b/sonar/users.py index 0b699e29..c79429fe 100644 --- a/sonar/users.py +++ b/sonar/users.py @@ -140,7 +140,7 @@ def get_object(cls, endpoint: pf.Platform, login: str) -> User: :return: The user object :rtype: User """ - o = User.CACHE.get(login, endpoint.url) + o = User.CACHE.get(login, endpoint.local_url) if o: return o log.debug("Getting user '%s'", login) @@ -253,7 +253,7 @@ def url(self) -> str: since this is as close as we can get to the precise user definition :rtype: str """ - return f"{self.endpoint.url}/admin/users" + return f"{self.base_url(local=False)}/admin/users" def tokens(self, **kwargs) -> list[tokens.UserToken]: """ @@ -295,7 +295,7 @@ def update(self, **kwargs) -> User: self.set_scm_accounts(kwargs["scmAccounts"]) if "login" in kwargs: new_login = kwargs["login"] - o = User.CACHE.get(new_login, self.endpoint.url) + o = User.CACHE.get(new_login, self.base_url()) if not o: api = User.api_for("UPDATE_LOGIN", self.endpoint) if self.endpoint.version() >= (10, 4, 0): diff --git a/sonar/webhooks.py b/sonar/webhooks.py index dedf2869..8d806ad7 100644 --- a/sonar/webhooks.py +++ b/sonar/webhooks.py @@ -67,11 +67,11 @@ def __hash__(self) -> int: Returns an object unique Id :meta private: """ - return hash((self.name, self.project if self.project else "", self.endpoint.url)) + return hash((self.name, self.project if self.project else "", self.base_url())) def url(self) -> str: """Returns the object permalink""" - return f"{self.endpoint.url}/admin/webhooks" + return f"{self.base_url(local=False)}/admin/webhooks" def update(self, **kwargs) -> None: """Updates a webhook with new properties (name, url, secret) @@ -140,7 +140,7 @@ def update(endpoint: pf.Platform, name: str, **kwargs) -> None: """Updates a webhook with data in kwargs""" project_key = kwargs.pop("project", None) get_list(endpoint, project_key) - o = WebHook.CACHE.get(name, project_key, endpoint.url) + o = WebHook.CACHE.get(name, project_key, endpoint.local_url) if not o: create(endpoint, name, kwargs["url"], kwargs["secret"], project=project_key) else: @@ -150,7 +150,7 @@ def update(endpoint: pf.Platform, name: str, **kwargs) -> None: def get_object(endpoint: pf.Platform, name: str, project_key: str = None, data: types.ApiPayload = None) -> WebHook: """Gets a WebHook object from name a project key""" log.debug("Getting webhook name %s project key %s data = %s", name, str(project_key), str(data)) - o = WebHook.CACHE.get(name, project_key, endpoint.url) + o = WebHook.CACHE.get(name, project_key, endpoint.local_url) if not o: o = WebHook(endpoint=endpoint, name=name, project=project_key, data=data) return o diff --git a/test/integration/it.sh b/test/integration/it.sh index 76c9e6eb..2e67fe32 100755 --- a/test/integration/it.sh +++ b/test/integration/it.sh @@ -53,7 +53,7 @@ function backup_for { function tag_for { case $1 in lts|lta|lts-audit|lta-audit) - tag="enterprise" + tag="2025-lta-enterprise" ;; latest|latest-audit) tag="enterprise" @@ -67,6 +67,9 @@ function tag_for { cb|cb-audit) tag="community" ;; + 9) + tag="9-enterprise" + ;; *) logmsg "ERROR: Instance $1 has no corresponding tag" tag="NO_TAG" @@ -103,9 +106,9 @@ do echo sonar create -i $id -t "$(tag_for "$env")" -s $sqport -p $pgport -f "$(backup_for "$env")" sonar create -i $id -t "$(tag_for "$env")" -s $sqport -p $pgport -f "$(backup_for "$env")" 1>$IT_LOG_FILE 2>&1 export SONAR_TOKEN=$SONAR_TOKEN_ADMIN_USER - if [[ "$env" =~ ^lts.*$ ]]; then - logmsg "Using LTS token" - export SONAR_TOKEN=$SONAR_TOKEN_LTS_ADMIN_USER + if [[ "$env" =~ ^9.*$ ]]; then + logmsg "Using 9 token" + export SONAR_TOKEN=$SONAR_TOKEN_9_ADMIN_USER fi export SONAR_HOST_URL="http://localhost:$sqport" fi @@ -210,7 +213,7 @@ do f="config-$env-rel.json"; run_test "$f" sonar-config -e logmsg "IT compare released and unreleased $env" - for f in measures findings loc + for f in measures loc do root="$TMP/$f-$env" announce_test "$f-$env diff" diff --git a/test/unit/conftest.py b/test/unit/conftest.py index 3bf8459f..9d418eb3 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -48,9 +48,9 @@ def create_test_object(a_class: type, key: str) -> any: @pytest.fixture(autouse=True) def run_around_tests(): util.start_logging() - url = util.TEST_SQ.url + url = util.TEST_SQ.local_url yield - util.TEST_SQ.url = url + util.TEST_SQ.local_url = url @pytest.fixture diff --git a/test/unit/test_groups.py b/test/unit/test_groups.py index 08807ae9..8488aa71 100644 --- a/test/unit/test_groups.py +++ b/test/unit/test_groups.py @@ -78,7 +78,7 @@ def test_size() -> None: def test_url() -> None: gr = groups.Group.get_object(endpoint=util.SQ, name="sonar-users") - assert gr.url() == f"{util.SQ.url}/admin/groups" + assert gr.url() == f"{util.SQ.external_url}/admin/groups" def test_add_non_existing_user(get_test_group: Generator[groups.Group], get_test_user: Generator[users.User]) -> None: diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 155d2503..7138c7d0 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -211,7 +211,7 @@ def test_request_error() -> None: """test_request_error""" issues_d = issues.search_by_project(endpoint=tutil.TEST_SQ, project_key="project1") issue = list(issues_d.values())[0] - tutil.TEST_SQ.url = "http://localhost:3337" + tutil.TEST_SQ.local_url = "http://localhost:3337" assert not issue.add_comment("Won't work") assert not issue.assign("admin") diff --git a/test/unit/test_platform.py b/test/unit/test_platform.py index 89cc3edd..933c7b56 100644 --- a/test/unit/test_platform.py +++ b/test/unit/test_platform.py @@ -78,7 +78,7 @@ def test_sys_info() -> None: def test_wrong_url() -> None: - util.TEST_SQ.url = "http://localhost:3337" + util.TEST_SQ.local_url = "http://localhost:3337" util.TEST_SQ._sys_info = None with pytest.raises(RequestException): diff --git a/test/unit/test_qg.py b/test/unit/test_qg.py index fdf23b92..b9325107 100644 --- a/test/unit/test_qg.py +++ b/test/unit/test_qg.py @@ -35,9 +35,9 @@ def test_get_object(get_loaded_qg: Generator[qualitygates.QualityGate]) -> None: assert qg.name == util.TEMP_KEY assert str(qg) == f"quality gate '{util.TEMP_KEY}'" if util.SQ.version() < (10, 0, 0): - assert qg.url() == f"{util.SQ.url}/quality_gates/show/{qg.key}" + assert qg.url() == f"{util.SQ.external_url}/quality_gates/show/{qg.key}" else: - assert qg.url() == f"{util.SQ.url}/quality_gates/show/{util.TEMP_KEY}" + assert qg.url() == f"{util.SQ.external_url}/quality_gates/show/{util.TEMP_KEY}" qg2 = qualitygates.QualityGate.get_object(endpoint=util.SQ, name=util.TEMP_KEY) assert qg.projects() == {} assert qg2 is qg diff --git a/test/unit/test_qp.py b/test/unit/test_qp.py index a542f654..e44ca56e 100644 --- a/test/unit/test_qp.py +++ b/test/unit/test_qp.py @@ -96,7 +96,7 @@ def test_inheritance(get_test_qp: Generator[qualityprofiles.QualityProfile]) -> def test_read(get_test_qp: Generator[qualityprofiles.QualityProfile]) -> None: """test_read""" qp = get_test_qp - assert qp.url() == f"{util.SQ.url}/profiles/show?language=py&name={util.TEMP_KEY}" + assert qp.url() == f"{util.SQ.external_url}/profiles/show?language=py&name={util.TEMP_KEY}" new_qp = qualityprofiles.QualityProfile.read(util.SQ, util.TEMP_KEY, "py") assert qp is new_qp diff --git a/test/unit/test_sif.py b/test/unit/test_sif.py index 6f87340d..af2dae5d 100644 --- a/test/unit/test_sif.py +++ b/test/unit/test_sif.py @@ -112,8 +112,8 @@ def test_audit_sif_ut() -> None: assert sysinfo.ce_jvm_cmdline() == "-Xmx1G -Xms128m -XX:+HeapDumpOnOutOfMemoryError" assert sysinfo.search_jvm_cmdline() == "-Xmx1G -Xms1G -XX:+HeapDumpOnOutOfMemoryError" sysinfo = sif.Sif(json_sif, concerned_object=util.SQ) - assert sysinfo.url() == util.SQ.url - assert str(sysinfo).split("@")[1] == util.SQ.url + assert sysinfo.url() == util.SQ.external_url + assert str(sysinfo).split("@")[1] == util.SQ.external_url def test_modified_sif() -> None: diff --git a/test/unit/test_tasks.py b/test/unit/test_tasks.py index d5bf0ffa..69dcc41d 100644 --- a/test/unit/test_tasks.py +++ b/test/unit/test_tasks.py @@ -29,7 +29,7 @@ def test_task() -> None: """test_task""" task = tasks.search_last(component_key=tutil.LIVE_PROJECT, endpoint=tutil.SQ, type="REPORT") assert task is not None - assert task.url() == f"{tutil.SQ.url}/project/background_tasks?id={tutil.LIVE_PROJECT}" + assert task.url() == f"{tutil.SQ.external_url}/project/background_tasks?id={tutil.LIVE_PROJECT}" task.sq_json = None task._load() assert task.sq_json is not None diff --git a/test/unit/test_users.py b/test/unit/test_users.py index abe60da8..aadbc476 100644 --- a/test/unit/test_users.py +++ b/test/unit/test_users.py @@ -57,7 +57,7 @@ def test_create_delete(get_test_user: Generator[users.User]) -> None: user.name = "TEMP_USER" user.refresh() assert user.name == f"User name {util.TEMP_KEY}" - assert user.url() == f"{util.SQ.url}/admin/users" + assert user.url() == f"{util.SQ.external_url}/admin/users" def test_add_to_group(get_test_user: Generator[users.User]) -> None: