Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 49 additions & 45 deletions src/anchore_security_cli/identifiers/index/sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,47 +64,30 @@ def _create_indices(self, conn: sqlite3.Connection) -> None:
def _render(self, data_path: str, conn: sqlite3.Connection):
for batch in batched(iglob(os.path.join(data_path, "**/ANCHORE-*.toml"), recursive=True), n=5000, strict=False):
for file in batch:
self._logger.trace(f"Start rendering index data for file at {file}")
with open(file, "rb") as f:
data = tomllib.load(f)
try:
self._logger.trace(f"Start rendering index data for file at {file}")
with open(file, "rb") as f:
data = tomllib.load(f)

s = data["security"]
anchore_id = parse(s["id"])
conn.execute(
"""
INSERT INTO `security_identifiers` (
`anchore_id`, `year`, `index`, `allocated`
) VALUES (?, ?, ?, ?)
""",
(
s["id"],
anchore_id.year,
anchore_id.index,
s["allocated"],
),
)

# Always insert an alias record for the `anchore` provider to the record.
# This gives a single simple lookup mechanism for indexing into the `security_identifiers`
# table given any id.
conn.execute(
"""
INSERT INTO `security_aliases` (
`anchore_id`, `alias_provider`, `alias_id`
) VALUES (?, ?, ?)
""",
(
s["id"],
"anchore",
s["id"],
),
)

for duplicate in s.get("duplicates", []):
if duplicate == s["id"]:
self._logger.warning(f"Unnecessary duplicate: {duplicate}")
continue
s = data["security"]
anchore_id = parse(s["id"])
conn.execute(
"""
INSERT INTO `security_identifiers` (
`anchore_id`, `year`, `index`, `allocated`
) VALUES (?, ?, ?, ?)
""",
(
s["id"],
anchore_id.year,
anchore_id.index,
s["allocated"],
),
)

# Always insert an alias record for the `anchore` provider to the record.
# This gives a single simple lookup mechanism for indexing into the `security_identifiers`
# table given any id.
conn.execute(
"""
INSERT INTO `security_aliases` (
Expand All @@ -114,12 +97,15 @@ def _render(self, data_path: str, conn: sqlite3.Connection):
(
s["id"],
"anchore",
duplicate,
s["id"],
),
)

for provider, aliases in s.get("aliases", {}).items():
for alias in aliases:
for duplicate in s.get("duplicates", []):
if duplicate == s["id"]:
self._logger.warning(f"Unnecessary duplicate: {duplicate}")
continue

conn.execute(
"""
INSERT INTO `security_aliases` (
Expand All @@ -128,9 +114,27 @@ def _render(self, data_path: str, conn: sqlite3.Connection):
""",
(
s["id"],
provider,
alias,
"anchore",
duplicate,
),
)
self._logger.trace(f"Finish rendering index data for file at {file}")

for provider, aliases in s.get("aliases", {}).items():
for alias in aliases:
conn.execute(
"""
INSERT INTO `security_aliases` (
`anchore_id`, `alias_provider`, `alias_id`
) VALUES (?, ?, ?)
""",
(
s["id"],
provider,
alias,
),
)
self._logger.trace(f"Finish rendering index data for file at {file}")
except Exception:
self._logger.error(f"Unable to render file at {file}")
raise
conn.commit()
50 changes: 27 additions & 23 deletions src/anchore_security_cli/vuln_index/spec/sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,27 +121,31 @@ def _toml_to_json(self, toml_data: dict) -> dict: # noqa: C901, PLR0912
def _render(self, data_path: str, conn: sqlite3.Connection):
for batch in batched(iglob(os.path.join(data_path, "**/ANCHORE-*.toml"), recursive=True), n=5000, strict=False):
for file in batch:
self._logger.trace(f"Start rendering index data for file at {file}")
with open(file, "rb") as f:
toml_data = tomllib.load(f)

anchore_id = parse(toml_data["vuln"]["id"])
record = self._toml_to_json(toml_data)
jsonified = json.dumps(record, sort_keys=True, cls=DateTimeEncoder)
self.validator.validate(jsonified)

conn.execute(
"""
INSERT INTO `records` (
`anchore_id`, `year`, `index`, `spec`
) VALUES (?, ?, ?, ?)
""",
(
str(anchore_id),
anchore_id.year,
anchore_id.index,
jsonified,
),
)
self._logger.trace(f"Finish rendering index data for file at {file}")
try:
self._logger.trace(f"Start rendering index data for file at {file}")
with open(file, "rb") as f:
toml_data = tomllib.load(f)

anchore_id = parse(toml_data["vuln"]["id"])
record = self._toml_to_json(toml_data)
jsonified = json.dumps(record, sort_keys=True, cls=DateTimeEncoder)
self.validator.validate(jsonified)

conn.execute(
"""
INSERT INTO `records` (
`anchore_id`, `year`, `index`, `spec`
) VALUES (?, ?, ?, ?)
""",
(
str(anchore_id),
anchore_id.year,
anchore_id.index,
jsonified,
),
)
self._logger.trace(f"Finish rendering index data for file at {file}")
except Exception:
self._logger.error(f"Unable to render file at {file}")
raise
conn.commit()