Initial commit
This commit is contained in:
46
.github/report_processing/process_bom_files.py
vendored
Normal file
46
.github/report_processing/process_bom_files.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# look it would be piss easy to just keep it as a CSV but we can post process to add cool things like cost to each of the parts
|
||||
import csv
|
||||
import sys
|
||||
import chevron
|
||||
import datetime
|
||||
from pprint import pprint
|
||||
|
||||
file_path_delimter = "\\" if sys.platform == "win32" else "/"
|
||||
|
||||
def load_bom(filename : str) -> dict:
|
||||
out_dict = {
|
||||
"parts" : [],
|
||||
"time" : str(datetime.datetime.now().time()),
|
||||
"date" : str(datetime.datetime.now().date().strftime("%d-%m-%Y")),
|
||||
"total_cost" : 0,
|
||||
"total_parts" : 0,
|
||||
"project_name" : filename.strip(".csv").strip("bom").split(file_path_delimter)[-1]
|
||||
}
|
||||
|
||||
with open(filename, "r") as csv_file:
|
||||
for row in csv.DictReader(csv_file):
|
||||
part_cost = 0
|
||||
out_dict["total_parts"] += 1
|
||||
out_dict["parts"].append(
|
||||
{
|
||||
"Reference" : row["Refs"],
|
||||
"Value" : row["Value"],
|
||||
"Quantity" : row["Qty"],
|
||||
"part_number" : row["Footprint"],
|
||||
"cost" : part_cost, # add some API call somewhere here
|
||||
}
|
||||
)
|
||||
return out_dict
|
||||
|
||||
def main():
|
||||
report_hash = load_bom(sys.argv[1])
|
||||
# pprint(report_hash)
|
||||
with open(sys.argv[2], "r") as txt:
|
||||
out = chevron.render(txt.read(), report_hash)
|
||||
with open(sys.argv[3], "w") as md:
|
||||
md.write(out)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
57
.github/report_processing/process_drc_json.py
vendored
Normal file
57
.github/report_processing/process_drc_json.py
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
from violation import Violation
|
||||
import datetime
|
||||
import json
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
|
||||
def process_violation_list(drc_json : dict, list_name : str) -> None:
|
||||
if list_name in drc_json.keys():
|
||||
unconnected_items_errors = []
|
||||
unconnected_items_warns = []
|
||||
number_of_errors = 0
|
||||
number_of_warns = 0
|
||||
for violation in drc_json[list_name]:
|
||||
v = Violation(violation, "drc")
|
||||
if (v.violation_type == "error"):
|
||||
unconnected_items_errors.append(v)
|
||||
number_of_errors += 1
|
||||
if (v.violation_type == "warn"):
|
||||
unconnected_items_warns.append(v)
|
||||
number_of_warns += 1
|
||||
|
||||
drc_json[list_name] = {
|
||||
"errors" : unconnected_items_errors,
|
||||
"warns" : unconnected_items_warns,
|
||||
"number_of_errors" : number_of_errors,
|
||||
"number_of_warns" : number_of_warns,
|
||||
}
|
||||
else:
|
||||
drc_json.setdefault(list_name, {})
|
||||
drc_json[list_name].setdefault("number_of_errors", 0)
|
||||
drc_json[list_name].setdefault("number_of_warns", 0)
|
||||
|
||||
def process_report(report : str) -> dict:
|
||||
out_dict : dict = json.loads(report)
|
||||
number_of_errors = 0;
|
||||
number_of_errors = 0;
|
||||
|
||||
process_violation_list(out_dict, "unconnected_items")
|
||||
process_violation_list(out_dict, "violations")
|
||||
process_violation_list(out_dict, "schematic_parity")
|
||||
|
||||
out_dict.setdefault(
|
||||
"total_errors",
|
||||
out_dict["unconnected_items"]["number_of_errors"] +
|
||||
out_dict["violations"]["number_of_errors"] +
|
||||
out_dict["schematic_parity"]["number_of_errors"]
|
||||
)
|
||||
|
||||
out_dict.setdefault(
|
||||
"total_warns",
|
||||
out_dict["unconnected_items"]["number_of_warns"] +
|
||||
out_dict["violations"]["number_of_warns"] +
|
||||
out_dict["schematic_parity"]["number_of_warns"]
|
||||
)
|
||||
|
||||
return out_dict
|
||||
53
.github/report_processing/process_erc_json.py
vendored
Normal file
53
.github/report_processing/process_erc_json.py
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
from violation import Violation
|
||||
import datetime
|
||||
import json
|
||||
|
||||
class Sheet:
|
||||
def __init__(self, json_obj : dict) -> None:
|
||||
self.name : str = json_obj["path"]
|
||||
self.name_md : str = self.name.replace(" ", "-")
|
||||
self.number_of_errors : int = 0
|
||||
self.number_of_warns : int = 0
|
||||
self.errors : list[Violation] = list()
|
||||
self.warns : list[Violation] = list()
|
||||
|
||||
for violation in json_obj["violations"]:
|
||||
v = Violation(violation, "erc")
|
||||
if (v.violation_type == "error"):
|
||||
self.errors.append(v)
|
||||
self.number_of_errors += 1
|
||||
if (v.violation_type == "warn"):
|
||||
self.warns.append(v)
|
||||
self.number_of_warns += 1
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
out_dict = self.__dict__
|
||||
errors_strings = []
|
||||
warns_strings = []
|
||||
|
||||
for error in out_dict["errors"]:
|
||||
errors_strings.append(error.__dict__)
|
||||
out_dict["errors"] = errors_strings
|
||||
|
||||
for warn in out_dict["warns"]:
|
||||
warns_strings.append(warn.__dict__)
|
||||
out_dict["warns"] = warns_strings
|
||||
|
||||
return out_dict
|
||||
|
||||
def process_report(report : str) -> dict:
|
||||
out_dict : dict = json.loads(report)
|
||||
|
||||
sheets = [Sheet(sheet) for sheet in out_dict["sheets"]]
|
||||
out_dict["sheets"] = [sheet.to_dict() for sheet in sheets]
|
||||
|
||||
out_dict.setdefault(
|
||||
"total_errors",
|
||||
sum(sheet.number_of_errors for sheet in sheets)
|
||||
)
|
||||
out_dict.setdefault(
|
||||
"total_warns",
|
||||
sum(sheet.number_of_warns for sheet in sheets)
|
||||
)
|
||||
|
||||
return out_dict
|
||||
50
.github/report_processing/process_json_reports.py
vendored
Normal file
50
.github/report_processing/process_json_reports.py
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# usage: python process_json_reports.py report.json template.mustache outfile.md project_name
|
||||
import chevron
|
||||
import sys
|
||||
import datetime
|
||||
import json
|
||||
import process_erc_json
|
||||
import process_drc_json
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
def load_report(filename : str, project_name : str) -> dict:
|
||||
out_dict : dict = {}
|
||||
with open(filename, "r") as js:
|
||||
if ("erc" in filename.lower()):
|
||||
out_dict = process_erc_json.process_report(js.read())
|
||||
if ("drc" in filename.lower()):
|
||||
out_dict = process_drc_json.process_report(js.read())
|
||||
|
||||
out_dict.setdefault(
|
||||
"time",
|
||||
str(datetime.datetime.now().time())
|
||||
)
|
||||
|
||||
out_dict.setdefault(
|
||||
"date",
|
||||
str(datetime.datetime.now().date().strftime("%d-%m-%Y"))
|
||||
)
|
||||
|
||||
out_dict.setdefault(
|
||||
"project_name",
|
||||
project_name
|
||||
)
|
||||
|
||||
out_dict.setdefault(
|
||||
"has_violations",
|
||||
True if out_dict["total_warns"] + out_dict["total_errors"] else None
|
||||
)
|
||||
|
||||
return out_dict
|
||||
|
||||
def main():
|
||||
report_hash = load_report(sys.argv[1], sys.argv[4])
|
||||
# pprint(report_hash)
|
||||
with open(sys.argv[2], "r") as txt:
|
||||
out = chevron.render(txt.read(), report_hash)
|
||||
with open(sys.argv[3], "w") as md:
|
||||
md.write(out)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
94
.github/report_processing/process_output_files.py
vendored
Normal file
94
.github/report_processing/process_output_files.py
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
import chevron
|
||||
import sys
|
||||
import datetime
|
||||
import json
|
||||
import glob
|
||||
from pathlib import Path
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
# erc
|
||||
# {
|
||||
# "project_name" : "string",
|
||||
# "passing_erc" : "bool",
|
||||
# "erc_summary_link" : "link",
|
||||
# }
|
||||
|
||||
# drc
|
||||
# {
|
||||
# "project_name" : "string",
|
||||
# "passing_drc" : "bool",
|
||||
# "drc_summary_link" : "link",
|
||||
# }
|
||||
|
||||
# project
|
||||
# {
|
||||
# "project_name" : "string",
|
||||
# "project_link" : "link",
|
||||
# "schematic_link" : "link",
|
||||
# "gerber_link" : "link",
|
||||
# "bom_report_link" : "link",
|
||||
# "bom_csv_link" : "link"
|
||||
# }
|
||||
|
||||
EXTRAS_FILENAME = "readme_extras.json"
|
||||
|
||||
def load_json_file(filename : str) -> dict:
|
||||
with open(Path(f"{filename}/{filename}"), "r") as js:
|
||||
return json.loads(js.read())
|
||||
|
||||
def create_hash(filenames : list[str]) -> dict:
|
||||
report_outs = filenames
|
||||
report_outs.remove("readme_extras.json")
|
||||
|
||||
extras = {}
|
||||
with open("readme_extras.json", "r") as js:
|
||||
extras = json.loads(js.read())
|
||||
|
||||
reports_dicts : list[dict] = []
|
||||
for report_name in report_outs:
|
||||
reports_dicts.append(load_json_file(report_name))
|
||||
|
||||
readme_hash = {
|
||||
**extras,
|
||||
"projects" : [],
|
||||
"did_error" : False,
|
||||
"multiple_projects" : None
|
||||
}
|
||||
for report in reports_dicts:
|
||||
for project in readme_hash["projects"]:
|
||||
if project["project_name"] == report["project_name"]:
|
||||
for key in report.keys():
|
||||
project.setdefault(key, report[key])
|
||||
break
|
||||
else:
|
||||
readme_hash["projects"].append(report)
|
||||
|
||||
pprint(readme_hash)
|
||||
|
||||
for project in readme_hash["projects"]:
|
||||
readme_hash["did_error"] |= not project["passing_erc"]
|
||||
readme_hash["did_error"] |= not project["passing_drc"]
|
||||
project.setdefault("passing_erc_emoji", "✅" if project["passing_erc"] == "true" else "❌")
|
||||
project.setdefault("passing_drc_emoji", "✅" if project["passing_drc"] == "true" else "❌")
|
||||
|
||||
readme_hash["multiple_projects"] = True if len(readme_hash["projects"]) > 1 else None
|
||||
|
||||
pprint(readme_hash)
|
||||
return readme_hash
|
||||
|
||||
def main():
|
||||
print(sys.argv)
|
||||
|
||||
readme_template, *args = sys.argv[1:]
|
||||
|
||||
report_hash = create_hash(args)
|
||||
|
||||
with open(readme_template, "r") as txt:
|
||||
out = chevron.render(txt.read(), report_hash)
|
||||
with open("README.md", "w") as md:
|
||||
md.write(out)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
3
.github/report_processing/requirements.txt
vendored
Normal file
3
.github/report_processing/requirements.txt
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
chevron
|
||||
pathlib
|
||||
ruamel.yaml
|
||||
18
.github/report_processing/violation.py
vendored
Normal file
18
.github/report_processing/violation.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
class Violation:
|
||||
def __init__(self, violation : dict, violation_report_type : str = ["erc", "drc"]) -> None:
|
||||
self.violation_type : str = \
|
||||
"warn" if violation["severity"] == "warning" else "error"
|
||||
self.name : str = violation["description"]
|
||||
self.content : str = ""
|
||||
|
||||
# this violation_report_type purely exists because of a bug
|
||||
# in kicads json output format where json output on erc reports
|
||||
# the position in decimeters
|
||||
for item in violation["items"]:
|
||||
item_string = item["description"]
|
||||
x : float = float(item["pos"]["x"]) * (100.0 if (violation_report_type == "erc") else 1.0)
|
||||
y : float = float(item["pos"]["y"]) * (100.0 if (violation_report_type == "erc") else 1.0)
|
||||
# print(f"{x=}")
|
||||
|
||||
item_string += f" at [x = {x:.4}mm, y = {y:.4}mm]\n"
|
||||
self.content += item_string
|
||||
Reference in New Issue
Block a user