xref: /openbmc/openbmc-tools/tof-voters/libvoters/subcmd/verify-files.py (revision 58e378d541af1eb4320d91ce502b6bb6b24b2837)
1#!/usr/bin/python3
2
3import argparse
4import json
5import os
6import re
7from typing import Set, Tuple
8
9import libvoters.acceptable as acceptable
10
11
12class subcmd:
13    def __init__(self, parser: argparse._SubParsersAction) -> None:
14        p = parser.add_parser(
15            "verify-files",
16            help="Check all files against `acceptable` settings.",
17        )
18        p.set_defaults(cmd=self)
19
20    def run(self, args: argparse.Namespace) -> int:
21        data_path: str = args.dir
22
23        if not os.path.exists(data_path) or not os.path.isdir(data_path):
24            print(
25                f"Data directory {data_path} does not exist or is not a directory."
26            )
27            return 1
28
29        # Set to store unique (project, file) pairs
30        unique_files: Set[Tuple[str, str]] = set()
31        rejected_projects: Set[str] = set()
32
33        # Read all JSON files from data directory
34        for filename in sorted(os.listdir(data_path)):
35            if not re.match(r"[0-9]+\.json", filename):
36                continue
37
38            file_path = os.path.join(data_path, filename)
39            if not os.path.isfile(file_path):
40                continue
41
42            try:
43                with open(file_path, "r") as f:
44                    data = json.load(f)
45
46                project = data.get("project", "")
47                if not project:
48                    continue
49
50                # Check if project is acceptable
51                if not acceptable.project(project):
52                    if project not in rejected_projects:
53                        print(f"Rejected project: {project}")
54                        rejected_projects.add(project)
55                    continue
56
57                # Extract files from the last patchSet
58                patch_sets = data.get("patchSets", [])
59                if patch_sets:
60                    last_patch_set = sorted(
61                        patch_sets, key=lambda x: x["number"]
62                    )[-1]
63                    files = last_patch_set.get("files", [])
64                    for file_data in files:
65                        file_name = file_data.get("file", "")
66                        if file_name:
67                            unique_files.add((project, file_name))
68
69            except (json.JSONDecodeError, KeyError) as e:
70                print(f"Error processing {filename}: {e}")
71                continue
72
73        # Verify each unique file
74        for project, file_name in sorted(unique_files):
75            result = acceptable.file(project, file_name)
76            print(f"{project}:{file_name} -> {result}")
77
78        return 0
79