Skip to content

Commit

Permalink
Merge pull request #18 from CoinFabrik/add-ci
Browse files Browse the repository at this point in the history
Add general-rust ci
  • Loading branch information
faculerena authored Mar 25, 2024
2 parents a73f392 + e14d86f commit bd008bb
Show file tree
Hide file tree
Showing 9 changed files with 103 additions and 96 deletions.
85 changes: 85 additions & 0 deletions .github/workflows/general-rust.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
name: General Rust CI

on:
push:
branches:
- main
paths:
- "apps/cargo-scout-audit/**"
pull_request:
paths:
- "apps/cargo-scout-audit/**"
workflow_dispatch:

env:
CARGO_TERM_COLOR: always

jobs:
format:
name: Check Rust Format
runs-on: ubuntu-latest

steps:
- name: Checkout Code
uses: actions/checkout@v4

- name: Update Rust Toolchain
run: rustup update

- name: Install rustfmt
run: rustup component add rustfmt

- name: Check Formatting
run: cd apps/cargo-scout-audit && cargo fmt -- --check

clippy:
name: Lint with Clippy
runs-on: ubuntu-latest

steps:
- name: Checkout Code
uses: actions/checkout@v4

- name: Cache Rust Dependencies
uses: actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
./apps/cargo-scout-audit/target
key: ${{ runner.os }}-cargo-${{ hashFiles('apps/cargo-scout-audit/Cargo.lock') }}

- name: Update Rust Toolchain
run: rustup update

- name: Install clippy
run: rustup component add clippy

- name: Lint Code
run: cd apps/cargo-scout-audit && cargo clippy --all-targets --all-features -- -D warnings

udeps:
name: Check Unused Dependencies with cargo-udeps
runs-on: ubuntu-latest

steps:
- name: Checkout Code
uses: actions/checkout@v4

- name: Cache Rust Dependencies
uses: actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
./apps/cargo-scout-audit/target
key: ${{ runner.os }}-cargo-${{ hashFiles('apps/cargo-scout-audit/Cargo.lock') }}

- name: Update Rust Toolchain
run: rustup update

- name: Install cargo-udeps
run: cargo install cargo-udeps

- name: Check Unused Dependencies
run: cd apps/cargo-scout-audit && cargo udeps --all-targets
2 changes: 0 additions & 2 deletions apps/cargo-scout-audit/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,7 @@ tera = {version = "=1.19.1", features=["builtins"]}
webbrowser = "=0.8.12"

scout-audit-internal = { version = "0.2.3", path = "../../scout-audit-internal", features = ["detector"] }
pulldown-cmark = "0.10.0"

[dev-dependencies]
colored = "2.0.0"
config = "0.14.0"

2 changes: 1 addition & 1 deletion apps/cargo-scout-audit/src/output/html/tera.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ impl Default for HtmlEngine {
tera: Tera::default(),
};
let _ = engine.load_templates();
return engine;
engine
}
}
impl HtmlEngine {
Expand Down
18 changes: 8 additions & 10 deletions apps/cargo-scout-audit/src/output/report.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use chrono::offset::Local;
use core::panic;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::{collections::HashMap, os::unix::process::CommandExt, path::PathBuf};
use std::path::Path;
use std::{collections::HashMap, os::unix::process::CommandExt};

use super::{html, markdown, pdf, vulnerabilities::*};

Expand Down Expand Up @@ -82,7 +83,7 @@ impl Report {
markdown::generate_markdown(self)
}

pub fn generate_pdf(&self, path: &PathBuf) -> Result<()> {
pub fn generate_pdf(&self, path: &Path) -> Result<()> {
let temp_html = pdf::generate_pdf(self)?;

std::process::Command::new("wkhtmltopdf")
Expand Down Expand Up @@ -136,7 +137,6 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc
})
.collect::<Vec<Value>>();

let mut id: u32 = 0;
let mut det_map: HashMap<_, _> = blockchain
.get_array_of_vulnerability_names()
.iter()
Expand All @@ -145,7 +145,7 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc

let mut findings: Vec<Finding> = Vec::new();

for finding in &scout_findings {
for (id, finding) in scout_findings.iter().enumerate() {
let category: String = finding
.get("message")
.and_then(|message| message.get("code"))
Expand Down Expand Up @@ -177,8 +177,7 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc
.get("file_name")
.unwrap_or(&Value::default())
.to_string()
.trim_matches('"')
.to_string(),
.trim_matches('"'),
sp[0].get("line_start").unwrap_or(&Value::default()),
sp[0].get("column_start").unwrap_or(&Value::default()),
sp[0].get("line_end").unwrap_or(&Value::default()),
Expand All @@ -190,7 +189,7 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc
let byte_start = sp[0].get("byte_start").unwrap().as_u64().unwrap() as usize;
let byte_end = sp[0].get("byte_end").unwrap().as_u64().unwrap() as usize;

let code_snippet: String = std::fs::read_to_string(&(file.trim_matches('"'))).unwrap()
let code_snippet: String = std::fs::read_to_string(file.trim_matches('"')).unwrap()
[byte_start..byte_end]
.to_string();

Expand All @@ -206,7 +205,7 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc
*v += 1;

let fndg = Finding {
id,
id: id as u32,
occurrence_index: *v,
category_id: blockchain
.get_raw_vuln_from_name(&category)
Expand All @@ -221,7 +220,6 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc
.trim_start_matches('/')
.to_string(),
};
id += 1;
findings.push(fndg);
}

Expand All @@ -232,7 +230,7 @@ pub fn generate_report(scout_output: String, info: ProjectInfo, blockchain: Bloc

let mut categories: Vec<Category> = Vec::new();

for (vuln, _) in &summary_map {
for vuln in summary_map.keys() {
let raw_vuln = blockchain.get_raw_vuln_from_name(vuln);
let id = raw_vuln.vulnerability_class.to_string();
let vuln = Vulnerability::from(raw_vuln);
Expand Down
4 changes: 2 additions & 2 deletions apps/cargo-scout-audit/src/startup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ fn run_dylint(

std::io::Read::read_to_string(&mut stdout_file, &mut cts)?;

std::io::Write::write(&mut json_file, &cts.as_bytes())?;
std::io::Write::write(&mut json_file, cts.as_bytes())?;
}
OutputFormat::Markdown => {
let mut content = String::new();
Expand All @@ -338,7 +338,7 @@ fn run_dylint(
PathBuf::from("report.sarif")
};

let mut sarif_file = fs::File::create(&path)?;
let mut sarif_file = fs::File::create(path)?;

let mut content = String::new();
std::io::Read::read_to_string(&mut stdout_file, &mut content)?;
Expand Down
2 changes: 1 addition & 1 deletion apps/cargo-scout-audit/src/utils/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,5 +81,5 @@ pub fn profile_enabled_detectors(config: toml::Table, profile: String) -> Result
}
}
}
return Ok(ret_vec);
Ok(ret_vec)
}
70 changes: 1 addition & 69 deletions apps/cargo-scout-audit/src/utils/output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ fn get_errors_from_output(
})
.collect::<Vec<&str>>();

assert!(&true_finds.len() == &scout_internals_spans.len());
assert!(true_finds.len() == scout_internals_spans.len());

for (i, elem) in true_finds.iter().enumerate() {
let parts = elem.split('\n').collect::<Vec<&str>>();
Expand All @@ -128,71 +128,3 @@ fn get_errors_from_output(
}
Ok(errors)
}

fn serify(
scout_output: File,
scout_internals: File,
bc: BlockChain,
) -> anyhow::Result<serde_json::Value> {
let errors: HashMap<String, (Vec<Value>, String)> =
get_errors_from_output(scout_output, scout_internals, bc)?;

let sarif_output = json!({
"$schema": "https://json.schemastore.org/sarif-2.1.0",
"version": "2.1.0",
"runs": [
{
"tool": {
"driver": {
"name": env!("CARGO_PKG_NAME"),
"version": env!("CARGO_PKG_VERSION"),
"rules": get_chain_enum(bc).filter(|e| {
errors.contains_key(&e.to_string()) && !errors.get(&e.to_string()).unwrap().0.is_empty()
}).map(|e| {
json!({
"id": e.to_string(),
"shortDescription": {
"text": e.get_lint_message()
}})

}).collect::<Vec<serde_json::Value>>(),
"informationUri": "https://coinfabrik.github.io/scout/",
}
},
"results": build_sarif_results(&errors)?,
}
]
});
let json_errors = serde_json::to_value(sarif_output)?;
Ok(json_errors)
}

pub fn format_into_sarif(
scout_output: File,
scout_internals: File,
bc: BlockChain,
) -> anyhow::Result<String> {
Ok(serify(scout_output, scout_internals, bc)?.to_string())
}

fn build_sarif_results(
errors: &HashMap<String, (Vec<Value>, String)>,
) -> anyhow::Result<Vec<serde_json::Value>> {
let runs: Vec<Value> = errors
.iter()
.flat_map(|(name, (spans, msg))| {
spans.iter().filter_map(move |span| {
Some(json!({
"ruleId": name,
"level": "error",
"message": {
"text": msg
},
"locations": [span],
}))
})
})
.collect();

Ok(runs)
}
12 changes: 4 additions & 8 deletions apps/cargo-scout-audit/tests/integration_tests/detectors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ fn test() {
vec![false; integration_tests_to_run.as_ref().map_or(0, |v| v.len())];

// Get the configuration
let detectors_config = Configuration::build().expect(&"Failed to get the configuration".red());
let detectors_config = Configuration::build()
.unwrap_or_else(|_| panic!("{}", "Failed to get the configuration".red().to_string()));

// Run all integration tests
for detector_config in detectors_config.detectors.iter() {
Expand All @@ -65,15 +66,10 @@ fn test() {
println!("\n{} {}", "Testing detector:".bright_cyan(), detector_name);
for example in detector_config.testcases.iter() {
if let Some(vulnerable_path) = &example.vulnerable_path {
execute_and_validate_testcase(&detector_name, lint_message, &vulnerable_path, true);
execute_and_validate_testcase(&detector_name, lint_message, vulnerable_path, true);
}
if let Some(remediated_path) = &example.remediated_path {
execute_and_validate_testcase(
&detector_name,
lint_message,
&remediated_path,
false,
);
execute_and_validate_testcase(&detector_name, lint_message, remediated_path, false);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ impl Configuration {
.ok_or(anyhow!("Failed to find testcases path"))?
.join("test-cases");
let testcases_paths: Vec<PathBuf> = std::fs::read_dir(&testcases_root_path)?
.into_iter()
.filter_map(|r| r.ok().map(|f| f.path()))
.filter(|r| r.is_dir())
.collect();
Expand All @@ -47,7 +46,6 @@ impl Configuration {
let detector_name = detector.to_string();
let testcases_root_path = testcases_root_path.join(detector_name);
let testcases_paths: Vec<PathBuf> = std::fs::read_dir(testcases_root_path)?
.into_iter()
.filter_map(|r| r.ok().map(|f| f.path()))
.filter(|r| r.is_dir())
.collect();
Expand Down Expand Up @@ -101,7 +99,7 @@ impl Configuration {
.into_iter()
.sorted()
.zip(Detector::iter().map(|d| d.to_string()).sorted())
.filter(|(p, d)| p.file_name().unwrap().to_string_lossy() != d.to_string())
.filter(|(p, d)| p.file_name().unwrap().to_string_lossy() != *d)
.count();

if count > 0 {
Expand Down

0 comments on commit bd008bb

Please sign in to comment.