diff --git a/Sources/Fault/Entries/atpg.swift b/Sources/Fault/Entries/atpg.swift index 1a8f244..72740b8 100644 --- a/Sources/Fault/Entries/atpg.swift +++ b/Sources/Fault/Entries/atpg.swift @@ -12,97 +12,137 @@ // See the License for the specific language governing permissions and // limitations under the License. +import ArgumentParser import BigInt import Collections -import ArgumentParser -import CoreFoundation // Not automatically imported on Linux +import CoreFoundation // Not automatically imported on Linux import Defile import Foundation import PythonKit import Yams - extension Fault { struct ATPG: ParsableCommand { static let configuration = CommandConfiguration( - abstract: "Generate/import test vectors for a particular circuit and calculate coverage." + abstract: + "Generate/import test vectors for a particular circuit and calculate coverage." + ) + + @Option( + name: [.short, .long], help: "Path to the output JSON file. (Default: input + .tv.json)" ) - - @Option(name: [.short, .long], help: "Path to the output JSON file. (Default: input + .tv.json)") var output: String? - + @Option(help: "Path to the output SVF file. (Default: input + .tv.svf)") var outputSvf: String? - - @Option(name: [.long, .customLong("output-faultPoints")], help: "Path to the output yml file listing all generated fault points. (Default: nil)") + + @Option( + name: [.long, .customLong("output-faultPoints")], + help: "Path to the output yml file listing all generated fault points. (Default: nil)") var outputFaultPoints: String? - - @Option(name: [.long, .customLong("output-covered")], help: "Path to the output yml file listing coverage metadata, i.e., ratio and fault points covered. (Default: nil)") + + @Option( + name: [.long, .customLong("output-covered")], + help: + "Path to the output yml file listing coverage metadata, i.e., ratio and fault points covered. (Default: nil)" + ) var outputCoverageMetadata: String? - - @Option(name: [.short, .long, .customLong("cellModel")], help: "A Verilog model with which standard cells can be simulated.") + + @Option( + name: [.short, .long, .customLong("cellModel")], + help: "A Verilog model with which standard cells can be simulated.") var cellModel: String - - @Option(name: [.customShort("v"), .long], help: "Number of test vectors to generate in the first batch.") + + @Option( + name: [.customShort("v"), .long], + help: "Number of test vectors to generate in the first batch.") var tvCount: Int = 100 - - @Option(name: [.customShort("r"), .long], help: "Increment in test vector count in subsequent batches should sufficient coverage not be reached.") + + @Option( + name: [.customShort("r"), .long], + help: + "Increment in test vector count in subsequent batches should sufficient coverage not be reached." + ) var increment: Int = 50 - - @Option(name: [.short, .long], help: "The minimum coverage to reach before ceasing increments. If set to 0, only the initial batch is run.") + + @Option( + name: [.short, .long], + help: + "The minimum coverage to reach before ceasing increments. If set to 0, only the initial batch is run." + ) var minCoverage: Float = 80 - - @Option(help: "Ceiling for Test Vector increments: if this number is reached, no more increments will occur regardless the coverage.") + + @Option( + help: + "Ceiling for Test Vector increments: if this number is reached, no more increments will occur regardless the coverage." + ) var ceiling: Int? - + @Option(help: "Type of the pseudo-random internal test-vector-generator.") var tvGen: String = "swift" - - @Option(help: "A \(MemoryLayout.size)-byte value to use as an RNG seed for test vector generators, provided as a hexadecimal string (without 0x).") + + @Option( + help: + "A \(MemoryLayout.size)-byte value to use as an RNG seed for test vector generators, provided as a hexadecimal string (without 0x)." + ) var rngSeed: String = "DEADCAFEDEADF00D" - - @Option(name: [.customShort("g"), .long], help: "Use an external TV Generator: Atalanta or PODEM.") + + @Option( + name: [.customShort("g"), .long], + help: "Use an external TV Generator: Atalanta or PODEM.") var etvGen: String? - - @Option(name: [.short, .long], help: "Netlist in bench format. (Required iff generator is set to Atalanta or PODEM.)") + + @Option( + name: [.short, .long], + help: "Netlist in bench format. (Required iff generator is set to Atalanta or PODEM.)") var bench: String? - + @Flag(help: "Generate only one testbench for inspection, and do not delete it.") var sampleRun: Bool = false - + @OptionGroup var bypass: BypassOptions - - @Option(help: "If provided, this JSON file's test vectors are simulated and no generation is attempted.") + + @Option( + help: + "If provided, this JSON file's test vectors are simulated and no generation is attempted." + ) var externalTVSet: String? - - @Option(help: "If provided, this JSON file's test vector are used as the initial set of test vectors, with iterations taking place with them in mind.") + + @Option( + help: + "If provided, this JSON file's test vector are used as the initial set of test vectors, with iterations taking place with them in mind." + ) var iteratingUpon: String? - - @Option(name: [.customShort("D"), .customLong("define")], help: "Define statements to include during simulations.") + + @Option( + name: [.customShort("D"), .customLong("define")], + help: "Define statements to include during simulations.") var defines: [String] = [] - - @Option(name: [.customShort("I"), .customLong("include")], help: "Extra verilog models to include during simulations.") + + @Option( + name: [.customShort("I"), .customLong("include")], + help: "Extra verilog models to include during simulations.") var includes: [String] = [] - + @Argument(help: "The cutaway netlist to generate patterns for.") var file: String - + mutating func run() throws { - + if !TVGeneratorFactory.validNames.contains(tvGen) { throw ValidationError("Invalid test-vector generator \(tvGen).") } - + let fileManager = FileManager() guard fileManager.fileExists(atPath: file) else { throw ValidationError("File '\(file)' not found.") } - + guard fileManager.fileExists(atPath: cellModel) else { throw ValidationError("Cell model file '\(cellModel)' not found.") } - + if !cellModel.hasSuffix(".v"), !cellModel.hasSuffix(".sv") { Stderr.print( "Warning: Cell model file provided does not end with .v or .sv." @@ -111,7 +151,7 @@ extension Fault { let jsonOutput = output ?? file.replacingExtension(".cut.v", with: ".tv.json") let svfOutput = outputSvf ?? file.replacingExtension(".cut.v", with: ".tv.svf") - + // MARK: Importing Python and Pyverilog let parse = Python.import("pyverilog.vparser.parser").parse @@ -160,29 +200,31 @@ extension Fault { Stderr.print("Unknown external test vector generator '\(tvGenerator)'.") Foundation.exit(EX_USAGE) } - - let benchUnwrapped = bench! // Program exits if etvGen.value isn't nil and bench.value is or vice versa + + let benchUnwrapped = bench! // Program exits if etvGen.value isn't nil and bench.value is or vice versa if !fileManager.fileExists(atPath: benchUnwrapped) { throw ValidationError("Bench file '\(benchUnwrapped)' not found.") } - (etvSetVectors, etvSetInputs) = etvgen.generate(file: benchUnwrapped, module: "\(definition.name)") + (etvSetVectors, etvSetInputs) = etvgen.generate( + file: benchUnwrapped, module: "\(definition.name)") if etvSetVectors.count == 0 { - Stderr.print("Bench netlist appears invalid (no vectors generated). Are you sure there are no floating nets/outputs?") + Stderr.print( + "Bench netlist appears invalid (no vectors generated). Are you sure there are no floating nets/outputs?" + ) Foundation.exit(EX_DATAERR) } else { - print("Generated \(etvSetVectors.count) test vectors using external utilties to verify.") + print( + "Generated \(etvSetVectors.count) test vectors using external utilties to verify." + ) } } let tvMinimumCoverage = minCoverage / 100 - let finalTvCeiling: Int = ceiling ?? ( - etvSetVectors.count == 0 ? - 1000 : - etvSetVectors.count - ) - + let finalTvCeiling: Int = + ceiling ?? (etvSetVectors.count == 0 ? 1000 : etvSetVectors.count) + let finalRNGSeed = UInt(rngSeed, radix: 16)! do { @@ -217,9 +259,7 @@ extension Fault { evtInputsMinusIgnored.append(input) } } - print(inputsMinusIgnored); - print(evtInputsMinusIgnored); - assert(inputsMinusIgnored.count == evtInputsMinusIgnored.count); + assert(inputsMinusIgnored.count == evtInputsMinusIgnored.count) inputsMinusIgnored = evtInputsMinusIgnored } @@ -232,7 +272,7 @@ extension Fault { } else { let minimum = min(port.from, port.to) let maximum = max(port.from, port.to) - for i in minimum ... maximum { + for i in minimum...maximum { faultPoints.insert("\(port.name) [\(i)]") } } @@ -257,10 +297,14 @@ extension Fault { } if warnAboutDFF { - print("Warning: D-flipflops were found in this netlist. Are you sure you ran it through 'fault cut'?") + print( + "Warning: D-flipflops were found in this netlist. Are you sure you ran it through 'fault cut'?" + ) } - print("Found \(faultPoints.count) fault sites in \(gateCount) gates and \(ports.count) ports.") + print( + "Found \(faultPoints.count) fault sites in \(gateCount) gates and \(ports.count) ports." + ) // MARK: Load Initial Set @@ -316,16 +360,18 @@ extension Fault { coverageList: result.coverageList ) let jsonRawOutput = jsonOutput.replacingExtension(".tv.json", with: ".raw_tv.json") - + print("Writing raw generated test vectors in Fault JSON format to \(jsonOutput)…") try encoder.encode(rawTVInfo).write(to: URL(fileURLWithPath: jsonRawOutput)) - + let tvInfo = TVInfo( inputs: inputsMinusIgnored, outputs: outputs, coverageList: Compactor.compact(coverageList: result.coverageList) ) - print("Writing compacted generated test vectors in Fault JSON format to \(jsonOutput)…") + print( + "Writing compacted generated test vectors in Fault JSON format to \(jsonOutput)…" + ) try encoder.encode(tvInfo).write(to: URL(fileURLWithPath: jsonOutput)) // try File.open(svfOutput, mode: .write) { @@ -334,7 +380,8 @@ extension Fault { // } if let coverageMetaFilePath = outputCoverageMetadata { - print("Writing YAML file of final coverage metadata to \(coverageMetaFilePath)…") + print( + "Writing YAML file of final coverage metadata to \(coverageMetaFilePath)…") try File.open(coverageMetaFilePath, mode: .write) { try $0.write(string: YAMLEncoder().encode(result.coverageMeta)) } diff --git a/Sources/Fault/Synthesis.swift b/Sources/Fault/Synthesis.swift index 96d2957..10a6da2 100644 --- a/Sources/Fault/Synthesis.swift +++ b/Sources/Fault/Synthesis.swift @@ -25,56 +25,56 @@ enum Synthesis { ) -> String { let opt = optimize ? "opt" : "" return """ - # read liberty - read_liberty -lib -ignore_miss_dir -setattr blackbox \(libertyFile) + # read liberty + read_liberty -lib -ignore_miss_dir -setattr blackbox \(libertyFile) - # read black boxes - read_verilog -sv -lib \(blackboxedModules.map { "'\($0)'" }.joined(separator: " ")) + # read black boxes + read_verilog -sv -lib \(blackboxedModules.map { "'\($0)'" }.joined(separator: " ")) - # read design - read_verilog -sv \(files.map { "'\($0)'" }.joined(separator: " ")) + # read design + read_verilog -sv \(files.map { "'\($0)'" }.joined(separator: " ")) - # check design hierarchy - hierarchy \(checkHierarchy ? "-check" : "") -top \(module) - flatten; + # check design hierarchy + hierarchy \(checkHierarchy ? "-check" : "") -top \(module) + flatten; - # translate processes (always blocks) - proc; \(opt) + # translate processes (always blocks) + proc; \(opt) - # detect and optimize FSM encodings - fsm; \(opt) + # detect and optimize FSM encodings + fsm; \(opt) - # implement memories (arrays) - memory; \(opt) + # implement memories (arrays) + memory; \(opt) - # convert to gate logic - techmap; \(opt) + # convert to gate logic + techmap; \(opt) - # flatten - flatten; \(opt) + # flatten + flatten; \(opt) - # mapping flip-flops to mycells.lib - dfflibmap -liberty \(libertyFile) + # mapping flip-flops to mycells.lib + dfflibmap -liberty \(libertyFile) - # expose dff - \(cutting ? "expose -cut -evert-dff; \(opt)" : "") + # expose dff + \(cutting ? "expose -cut -evert-dff; \(opt)" : "") - # mapping logic to mycells.lib - abc -liberty \(libertyFile) - splitnets -ports + # mapping logic to mycells.lib + abc -liberty \(libertyFile) + splitnets - # print gate count - stat + # print gate count + stat - # cleanup - opt_clean -purge + # cleanup + opt_clean -purge - # names - # autoname + # names + # autoname - write_verilog -noexpr -nohex -nodec -defparam \(output)+attrs - write_verilog -noexpr -noattr -noexpr -nohex -nodec -defparam \(output) - # write_blif -gates -unbuf DFFSR D Q \(output).blif - """ + write_verilog -noexpr -nohex -nodec -defparam \(output)+attrs + write_verilog -noexpr -noattr -noexpr -nohex -nodec -defparam \(output) + # write_blif -gates -unbuf DFFSR D Q \(output).blif + """ } } diff --git a/flake.lock b/flake.lock index 2217483..a77599a 100644 --- a/flake.lock +++ b/flake.lock @@ -5,6 +5,7 @@ "flake-compat": "flake-compat", "flake-utils": "flake-utils", "nixpkgs": [ + "nl2bench", "quaigh", "nixpkgs" ], @@ -25,6 +26,31 @@ "type": "github" } }, + "cargo2nix_2": { + "inputs": { + "flake-compat": "flake-compat_2", + "flake-utils": "flake-utils_2", + "nixpkgs": [ + "quaigh", + "nixpkgs" + ], + "rust-overlay": "rust-overlay_2" + }, + "locked": { + "lastModified": 1705129117, + "narHash": "sha256-LgdDHibvimzYhxBK3kxCk2gAL7k4Hyigl5KI0X9cijA=", + "owner": "cargo2nix", + "repo": "cargo2nix", + "rev": "ae19a9e1f8f0880c088ea155ab66cee1fa001f59", + "type": "github" + }, + "original": { + "owner": "cargo2nix", + "ref": "release-0.11.0", + "repo": "cargo2nix", + "type": "github" + } + }, "flake-compat": { "flake": false, "locked": { @@ -41,6 +67,22 @@ "type": "github" } }, + "flake-compat_2": { + "flake": false, + "locked": { + "lastModified": 1696426674, + "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, "flake-utils": { "inputs": { "systems": "systems" @@ -59,6 +101,24 @@ "type": "github" } }, + "flake-utils_2": { + "inputs": { + "systems": "systems_2" + }, + "locked": { + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, "libparse": { "inputs": { "nixpkgs": [ @@ -86,11 +146,11 @@ "nixpkgs": "nixpkgs" }, "locked": { - "lastModified": 1718284527, - "narHash": "sha256-J3Enh0VOQO9kW9xjSJmCly/6K4Jma8UL/LrCJd1/tP8=", + "lastModified": 1727685714, + "narHash": "sha256-WI7eCVT8fp4k6Iizky0UeFr/3vPnOat1P1gr+tLufRk=", "owner": "efabless", "repo": "nix-eda", - "rev": "90f2333a2dd634fa3462b6b23a8c99d4132d340e", + "rev": "3fc4884a62cd3b62230ee90b9687f5aaebe98543", "type": "github" }, "original": { @@ -115,24 +175,41 @@ "type": "github" } }, + "nixpkgs_2": { + "locked": { + "lastModified": 1717144377, + "narHash": "sha256-F/TKWETwB5RaR8owkPPi+SPJh83AQsm6KrQAlJ8v/uA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "805a384895c696f802a9bf5bf4720f37385df547", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-24.05", + "repo": "nixpkgs", + "type": "github" + } + }, "nl2bench": { "inputs": { "libparse": "libparse", "nix-eda": [ "nix-eda" - ] + ], + "quaigh": "quaigh" }, "locked": { - "lastModified": 1727023453, - "narHash": "sha256-dh7gV/vRmTFrG1sbVLewq3tieSonn8uHMgFHdasep+g=", + "lastModified": 1727695229, + "narHash": "sha256-WZUPoPKAilBWRfnRifMzwEXJePFuc2imN2XUNkGklRI=", "owner": "donn", "repo": "nl2bench", - "rev": "42f08f65b35c286af42478b5ce3efd68e559b60e", + "rev": "179d8dfdf7c0843a54fbadaeed007586525e1d77", "type": "github" }, "original": { "owner": "donn", - "ref": "dirtiest_workaround_imaginable", + "ref": "pyosys", "repo": "nl2bench", "type": "github" } @@ -140,22 +217,40 @@ "quaigh": { "inputs": { "cargo2nix": "cargo2nix", + "nixpkgs": "nixpkgs_2" + }, + "locked": { + "lastModified": 1722162293, + "narHash": "sha256-WILtL6WKXs5pB5Jujx9HIT2w1jiVTZymXC7DTuqLPEM=", + "owner": "coloquinte", + "repo": "quaigh", + "rev": "2fec998178d4e48c5379dd0a2025f8688797f99a", + "type": "github" + }, + "original": { + "owner": "coloquinte", + "repo": "quaigh", + "type": "github" + } + }, + "quaigh_2": { + "inputs": { + "cargo2nix": "cargo2nix_2", "nixpkgs": [ "nix-eda", "nixpkgs" ] }, "locked": { - "lastModified": 1722015420, - "narHash": "sha256-WILtL6WKXs5pB5Jujx9HIT2w1jiVTZymXC7DTuqLPEM=", + "lastModified": 1719607439, + "narHash": "sha256-ZRWbrI+HCpTZAbgTjqf4IWMuUspk++DI4DGeVTBNumQ=", "owner": "donn", "repo": "quaigh", - "rev": "39b14e2d3e45b5e04338cf22adeede35dddc3dd6", + "rev": "2253ddcfb8ba62683f1da54a54d06734126fb612", "type": "github" }, "original": { "owner": "donn", - "ref": "fix_nix_linux", "repo": "quaigh", "type": "github" } @@ -164,10 +259,39 @@ "inputs": { "nix-eda": "nix-eda", "nl2bench": "nl2bench", - "quaigh": "quaigh" + "quaigh": "quaigh_2" } }, "rust-overlay": { + "inputs": { + "flake-utils": [ + "nl2bench", + "quaigh", + "cargo2nix", + "flake-utils" + ], + "nixpkgs": [ + "nl2bench", + "quaigh", + "cargo2nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1705112162, + "narHash": "sha256-IAM0+Uijh/fwlfoeDrOwau9MxcZW3zeDoUHc6Z3xfqM=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "9e0af26ffe52bf955ad5575888f093e41fba0104", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "rust-overlay_2": { "inputs": { "flake-utils": [ "quaigh", @@ -208,6 +332,21 @@ "repo": "default", "type": "github" } + }, + "systems_2": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index cf2d974..ca76007 100644 --- a/flake.nix +++ b/flake.nix @@ -2,11 +2,11 @@ inputs = { nix-eda.url = github:efabless/nix-eda; nl2bench = { - url = github:donn/nl2bench/dirtiest_workaround_imaginable; + url = github:donn/nl2bench/pyosys; inputs.nix-eda.follows = "nix-eda"; }; quaigh = { - url = github:donn/quaigh/fix_nix_linux; + url = github:donn/quaigh; inputs.nixpkgs.follows = "nix-eda/nixpkgs"; }; };