diff --git a/src/anchore_security_cli/identifiers/index/sqlite.py b/src/anchore_security_cli/identifiers/index/sqlite.py index 99a5980..1ac7a10 100644 --- a/src/anchore_security_cli/identifiers/index/sqlite.py +++ b/src/anchore_security_cli/identifiers/index/sqlite.py @@ -64,47 +64,30 @@ def _create_indices(self, conn: sqlite3.Connection) -> None: def _render(self, data_path: str, conn: sqlite3.Connection): for batch in batched(iglob(os.path.join(data_path, "**/ANCHORE-*.toml"), recursive=True), n=5000, strict=False): for file in batch: - self._logger.trace(f"Start rendering index data for file at {file}") - with open(file, "rb") as f: - data = tomllib.load(f) + try: + self._logger.trace(f"Start rendering index data for file at {file}") + with open(file, "rb") as f: + data = tomllib.load(f) - s = data["security"] - anchore_id = parse(s["id"]) - conn.execute( - """ - INSERT INTO `security_identifiers` ( - `anchore_id`, `year`, `index`, `allocated` - ) VALUES (?, ?, ?, ?) - """, - ( - s["id"], - anchore_id.year, - anchore_id.index, - s["allocated"], - ), - ) - - # Always insert an alias record for the `anchore` provider to the record. - # This gives a single simple lookup mechanism for indexing into the `security_identifiers` - # table given any id. - conn.execute( - """ - INSERT INTO `security_aliases` ( - `anchore_id`, `alias_provider`, `alias_id` - ) VALUES (?, ?, ?) - """, - ( - s["id"], - "anchore", - s["id"], - ), - ) - - for duplicate in s.get("duplicates", []): - if duplicate == s["id"]: - self._logger.warning(f"Unnecessary duplicate: {duplicate}") - continue + s = data["security"] + anchore_id = parse(s["id"]) + conn.execute( + """ + INSERT INTO `security_identifiers` ( + `anchore_id`, `year`, `index`, `allocated` + ) VALUES (?, ?, ?, ?) + """, + ( + s["id"], + anchore_id.year, + anchore_id.index, + s["allocated"], + ), + ) + # Always insert an alias record for the `anchore` provider to the record. + # This gives a single simple lookup mechanism for indexing into the `security_identifiers` + # table given any id. conn.execute( """ INSERT INTO `security_aliases` ( @@ -114,12 +97,15 @@ def _render(self, data_path: str, conn: sqlite3.Connection): ( s["id"], "anchore", - duplicate, + s["id"], ), ) - for provider, aliases in s.get("aliases", {}).items(): - for alias in aliases: + for duplicate in s.get("duplicates", []): + if duplicate == s["id"]: + self._logger.warning(f"Unnecessary duplicate: {duplicate}") + continue + conn.execute( """ INSERT INTO `security_aliases` ( @@ -128,9 +114,27 @@ def _render(self, data_path: str, conn: sqlite3.Connection): """, ( s["id"], - provider, - alias, + "anchore", + duplicate, ), ) - self._logger.trace(f"Finish rendering index data for file at {file}") + + for provider, aliases in s.get("aliases", {}).items(): + for alias in aliases: + conn.execute( + """ + INSERT INTO `security_aliases` ( + `anchore_id`, `alias_provider`, `alias_id` + ) VALUES (?, ?, ?) + """, + ( + s["id"], + provider, + alias, + ), + ) + self._logger.trace(f"Finish rendering index data for file at {file}") + except Exception: + self._logger.error(f"Unable to render file at {file}") + raise conn.commit() diff --git a/src/anchore_security_cli/vuln_index/spec/sqlite.py b/src/anchore_security_cli/vuln_index/spec/sqlite.py index 9267916..3e0d872 100644 --- a/src/anchore_security_cli/vuln_index/spec/sqlite.py +++ b/src/anchore_security_cli/vuln_index/spec/sqlite.py @@ -121,27 +121,31 @@ def _toml_to_json(self, toml_data: dict) -> dict: # noqa: C901, PLR0912 def _render(self, data_path: str, conn: sqlite3.Connection): for batch in batched(iglob(os.path.join(data_path, "**/ANCHORE-*.toml"), recursive=True), n=5000, strict=False): for file in batch: - self._logger.trace(f"Start rendering index data for file at {file}") - with open(file, "rb") as f: - toml_data = tomllib.load(f) - - anchore_id = parse(toml_data["vuln"]["id"]) - record = self._toml_to_json(toml_data) - jsonified = json.dumps(record, sort_keys=True, cls=DateTimeEncoder) - self.validator.validate(jsonified) - - conn.execute( - """ - INSERT INTO `records` ( - `anchore_id`, `year`, `index`, `spec` - ) VALUES (?, ?, ?, ?) - """, - ( - str(anchore_id), - anchore_id.year, - anchore_id.index, - jsonified, - ), - ) - self._logger.trace(f"Finish rendering index data for file at {file}") + try: + self._logger.trace(f"Start rendering index data for file at {file}") + with open(file, "rb") as f: + toml_data = tomllib.load(f) + + anchore_id = parse(toml_data["vuln"]["id"]) + record = self._toml_to_json(toml_data) + jsonified = json.dumps(record, sort_keys=True, cls=DateTimeEncoder) + self.validator.validate(jsonified) + + conn.execute( + """ + INSERT INTO `records` ( + `anchore_id`, `year`, `index`, `spec` + ) VALUES (?, ?, ?, ?) + """, + ( + str(anchore_id), + anchore_id.year, + anchore_id.index, + jsonified, + ), + ) + self._logger.trace(f"Finish rendering index data for file at {file}") + except Exception: + self._logger.error(f"Unable to render file at {file}") + raise conn.commit()