Skip to content

Commit

Permalink
feat: allow running plugins via path
Browse files Browse the repository at this point in the history
This commit improves the logic for resolving dependencies and now
properly retrieves dependencies of top-level policy plugins
  • Loading branch information
patrickjcasey committed Oct 28, 2024
1 parent 2e4302e commit 86b65ae
Show file tree
Hide file tree
Showing 28 changed files with 471 additions and 377 deletions.
2 changes: 1 addition & 1 deletion config/Hipcheck.kdl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
plugins {
plugin "mitre/activity" version="0.1.0"
plugin "mitre/binary" version="0.1.0"
plugin "mitre/fuzz" version="0.1.0"
plugin "mitre/fuzz" version="0.1.0" manifest="./plugins/fuzz/plugin.kdl"
plugin "mitre/review" version="0.1.0"
plugin "mitre/typo" version="0.1.0"
plugin "mitre/affiliation" version="0.1.0"
Expand Down
18 changes: 3 additions & 15 deletions hipcheck/src/analysis/score.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::{
engine::HcEngine,
error::Result,
hc_error,
plugin::QueryResult,
plugin::{QueryResult, MITRE_LEGACY_PLUGINS},
policy_exprs::Executor,
shell::spinner_phase::SpinnerPhase,
};
Expand Down Expand Up @@ -61,19 +61,7 @@ impl PluginAnalysisResults {
/// Get all results from non-legacy analyses.
pub fn plugin_results(&self) -> impl Iterator<Item = (&Analysis, &PluginAnalysisResult)> {
self.table.iter().filter_map(|(analysis, result)| {
if [
REVIEW_PHASE,
IDENTITY_PHASE,
BINARY_PHASE,
ACTIVITY_PHASE,
FUZZ_PHASE,
TYPO_PHASE,
AFFILIATION_PHASE,
CHURN_PHASE,
ENTROPY_PHASE,
]
// Horrifying conversion, but necessary.
.contains(&(analysis.plugin).as_ref())
if MITRE_LEGACY_PLUGINS.contains(&analysis.plugin.as_str())
&& analysis.publisher == MITRE_PUBLISHER
{
None
Expand Down Expand Up @@ -229,7 +217,7 @@ fn wrapped_query(
query: String,
key: Value,
) -> Result<QueryResult> {
if publisher == *MITRE_PUBLISHER {
if publisher == *MITRE_PUBLISHER && MITRE_LEGACY_PLUGINS.contains(&plugin.as_str()) {
if query != *DEFAULT_QUERY {
return Err(hc_error!("legacy analyses only have a default query"));
}
Expand Down
24 changes: 16 additions & 8 deletions hipcheck/src/cache/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ use std::path::{Path, PathBuf};

use pathbuf::pathbuf;

use crate::plugin::{PluginName, PluginPublisher, PluginVersion};
use crate::plugin::PluginId;

/// Plugins are stored with the following format `<path_to_plugin_cache>/<publisher>/<plugin_name>/<version>`
pub struct HcPluginCache {
/// path to the root of the plugin cache
path: PathBuf,
}

Expand All @@ -17,13 +18,20 @@ impl HcPluginCache {
Self { path: plugins_path }
}

/// The folder in which a specific PluginID will be stored
///
/// `<path_to_plugin_cache>/<publisher>/<plugin_name>/<version>`
pub fn plugin_download_dir(
&self,
publisher: &PluginPublisher,
name: &PluginName,
version: &PluginVersion,
) -> PathBuf {
self.path.join(&publisher.0).join(&name.0).join(&version.0)
pub fn plugin_download_dir(&self, plugin_id: &PluginId) -> PathBuf {
self.path
.join(plugin_id.publisher().as_ref())
.join(plugin_id.name().as_ref())
.join(plugin_id.version().as_ref())
}

/// The path to where the `plugin.kdl` file for a specific PluginId will be stored
///
/// `<path_to_plugin_cache>/<publisher>/<plugin_name>/<version>/plugin.kdl`
pub fn plugin_kdl(&self, plugin_id: &PluginId) -> PathBuf {
self.plugin_download_dir(plugin_id).join("plugin.kdl")
}
}
39 changes: 11 additions & 28 deletions hipcheck/src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,11 @@ use crate::{
QueryResult,
},
policy::PolicyFile,
util::fs::{find_file_by_name, read_string},
Result,
};
use futures::future::{BoxFuture, FutureExt};
use serde_json::Value;
use std::{
str::FromStr,
sync::{Arc, LazyLock},
};
use std::sync::{Arc, LazyLock};
use tokio::runtime::{Handle, Runtime};

// Salsa doesn't natively support async functions, so our recursive `query()` function that
Expand Down Expand Up @@ -68,8 +64,11 @@ fn default_query_explanation(
) -> Result<Option<String>> {
let core = db.core();
let key = get_plugin_key(publisher.as_str(), plugin.as_str());
let Some(p_handle) = core.plugins.get(&plugin) else {
return Err(hc_error!("No such plugin {}", key));
let Some(p_handle) = core.plugins.get(&key) else {
return Err(hc_error!(
"Plugin '{}' not found",
key,
));
};
Ok(p_handle.get_default_query_explanation().cloned())
}
Expand All @@ -84,6 +83,7 @@ fn query(
let runtime = RUNTIME.handle();
let core = db.core();
let hash_key = get_plugin_key(publisher.as_str(), plugin.as_str());

// Find the plugin
let Some(p_handle) = core.plugins.get(&hash_key) else {
return Err(hc_error!("No such plugin {}", hash_key));
Expand Down Expand Up @@ -237,25 +237,14 @@ pub fn start_plugins(

let mut plugins = vec![];
for plugin_id in required_plugin_names.iter() {
let plugin_dir = plugin_cache.plugin_download_dir(
&plugin_id.publisher,
&plugin_id.name,
&plugin_id.version,
);

// determine entrypoint for this plugin
let plugin_kdl = find_file_by_name(plugin_dir, "plugin.kdl")?;
let contents = read_string(&plugin_kdl)?;
let plugin_manifest = PluginManifest::from_str(contents.as_str())?;
let plugin_manifest = PluginManifest::from_file(plugin_cache.plugin_kdl(plugin_id))?;
let entrypoint = plugin_manifest
.get_entrypoint(&current_arch)
.ok_or_else(|| {
hc_error!(
"Could not find {} entrypoint for {}/{} {}",
"Could not find {} entrypoint for {}",
current_arch,
plugin_id.publisher.0,
plugin_id.name.0,
plugin_id.version.0
plugin_id
)
})?;

Expand All @@ -267,13 +256,7 @@ pub fn start_plugins(
// find and serialize config for plugin
let config = policy_file
.get_config(plugin_id.to_policy_file_plugin_identifier().as_str())
.ok_or_else(|| {
hc_error!(
"Could not find config for {} {}",
plugin_id.to_policy_file_plugin_identifier(),
plugin_id.version.0
)
})?;
.ok_or_else(|| hc_error!("Could not find config for {}", plugin_id))?;
let config = serde_json::to_value(&config).map_err(|_e| {
hc_error!(
"Error serializing config for {}",
Expand Down
162 changes: 4 additions & 158 deletions hipcheck/src/plugin/download_manifest.rs
Original file line number Diff line number Diff line change
@@ -1,25 +1,12 @@
// SPDX-License-Identifier: Apache-2.0

use super::{PluginId, PluginManifest, PluginName, PluginPublisher, PluginVersion};
use crate::{
cache::plugin::HcPluginCache,
error::Error,
hc_error,
plugin::{
arch::Arch,
get_current_arch,
retrieval::{download_plugin, extract_plugin},
},
plugin::{arch::Arch, PluginVersion},
util::kdl::{extract_data, ParseKdlNode},
util::{
fs::{find_file_by_name, read_string},
http::agent::agent,
},
};
use fs_extra::dir::remove;
use kdl::{KdlDocument, KdlNode, KdlValue};
use std::{collections::HashSet, fmt::Display, io::Read, str::FromStr};
use url::Url;
use std::{fmt::Display, str::FromStr};

#[cfg(test)]
use crate::plugin::arch::KnownArch;
Expand Down Expand Up @@ -277,95 +264,6 @@ impl ParseKdlNode for DownloadManifestEntry {
}
}

impl DownloadManifestEntry {
/// This function does the following:
/// 1. Download specified plugin
/// 1. Verify its size and hash
/// 1. Extract plugin into plugin-specific folder
/// 1. Finds `plugin.kdl` inside plugin-specific folder and calls this recursively
pub fn download_and_unpack_plugin<'a>(
&self,
plugin_cache: &HcPluginCache,
publisher: &PluginPublisher,
name: &PluginName,
version: &PluginVersion,
downloaded_plugins: &'a mut HashSet<PluginId>,
) -> Result<&'a HashSet<PluginId>, Error> {
let current_arch = get_current_arch();

let plugin_id = PluginId::new(publisher.clone(), name.clone(), version.clone());

if downloaded_plugins.contains(&plugin_id) {
return Ok(downloaded_plugins);
}

// currently plugins are put in HC_CACHE/plugins/<publisher>/<name>/<version>
let download_dir = plugin_cache.plugin_download_dir(publisher, name, version);

let output_path = download_plugin(
&self.url,
download_dir.as_path(),
self.size.bytes,
&self.hash,
)
.map_err(|e| {
// delete any leftover remnants
let _ = remove(download_dir.as_path());
hc_error!("Error [{}] downloading '{}'", e, &self.url)
})?;

extract_plugin(
output_path.as_path(),
download_dir.as_path(),
self.compress.format,
)
.map_err(|e| {
// delete any leftover remnants
let _ = remove(download_dir.as_path());
hc_error!(
"Error [{}] extracting plugin '{}/{}' version {} for {}",
e,
publisher.0,
name.0,
version.0,
current_arch,
)
})?;

// locate the plugin manfiest for this plugin, read its contents and serialize to PluginManifest
let plugin_manifest_path = find_file_by_name(download_dir.as_path(), "plugin.kdl")?;
let contents = read_string(plugin_manifest_path)?;
let plugin_manifest = PluginManifest::from_str(contents.as_str())?;

downloaded_plugins.insert(plugin_id);

for dependency in plugin_manifest.dependencies.0.iter() {
let url = match &dependency.manifest {
Some(url) => url,
None => {
return Err(hc_error!(
"No manifest URL provided for {}/{} {}",
dependency.publisher.0,
dependency.name.0,
dependency.version.0
))
}
};

let download_manifest = DownloadManifest::from_network(url)?;

download_manifest.download_and_unpack_all_plugins(
plugin_cache,
&dependency.publisher,
&dependency.name,
&dependency.version,
downloaded_plugins,
)?;
}
Ok(downloaded_plugins)
}
}

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DownloadManifest {
pub entries: Vec<DownloadManifestEntry>,
Expand All @@ -381,59 +279,6 @@ impl DownloadManifest {
pub fn len(&self) -> usize {
self.entries.len()
}

/// fetch download manifest file from the network and parse it into a DownloadManifest
pub fn from_network(url: &Url) -> Result<Self, Error> {
let agent = agent();
let response = agent
.get(url.as_str())
.call()
.map_err(|e| hc_error!("Error [{}] retrieving download manifest {}", e, url))?;
let error_code = response.status();
if error_code != 200 {
return Err(hc_error!(
"HTTP error code {} when retrieving {}",
error_code,
url
));
}

// extract bytes from response
// preallocate 10 MB to cut down on number of allocations needed
let mut contents = Vec::with_capacity(10 * 1024 * 1024);
let amount_read = response
.into_reader()
.read_to_end(&mut contents)
.map_err(|e| hc_error!("Error [{}] reading download manifest into buffer", e))?;
contents.truncate(amount_read);
let contents = String::from_utf8_lossy(&contents);

// attempt to deserialize
let download_manifest = Self::from_str(&contents)?;
Ok(download_manifest)
}

/// Downloads all plugins specified in the download manifest file
pub fn download_and_unpack_all_plugins<'a>(
&self,
plugin_cache: &HcPluginCache,
publisher: &PluginPublisher,
name: &PluginName,
version: &PluginVersion,
downloaded_plugins: &'a mut HashSet<PluginId>,
) -> Result<&'a HashSet<PluginId>, Error> {
for entry in self.entries.iter() {
entry.download_and_unpack_plugin(
plugin_cache,
publisher,
name,
version,
downloaded_plugins,
)?;
}

Ok(downloaded_plugins)
}
}

impl FromStr for DownloadManifest {
Expand All @@ -458,6 +303,7 @@ impl FromStr for DownloadManifest {
mod test {
use super::*;
use std::str::FromStr;
use url::Url;

#[test]
fn test_parsing_hash_algorithm() {
Expand Down Expand Up @@ -536,7 +382,7 @@ mod test {
let raw_url = "https://github.com/mitre/hipcheck/releases/download/hipcheck-v3.4.0/hipcheck-x86_64-apple-darwin.tar.xz";
let node = KdlNode::from_str(format!(r#"url "{}""#, raw_url).as_str()).unwrap();
assert_eq!(
url::Url::parse_node(&node).unwrap(),
Url::parse_node(&node).unwrap(),
Url::parse(raw_url).unwrap()
);
}
Expand Down
2 changes: 1 addition & 1 deletion hipcheck/src/plugin/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub use crate::plugin::{get_plugin_key, manager::*, plugin_id::PluginId, types::
pub use arch::{get_current_arch, try_set_arch, Arch};
pub use download_manifest::{ArchiveFormat, DownloadManifest, HashAlgorithm, HashWithDigest};
pub use plugin_manifest::{PluginManifest, PluginName, PluginPublisher, PluginVersion};
pub use retrieval::retrieve_plugins;
pub use retrieval::{retrieve_plugins, MITRE_LEGACY_PLUGINS};
use serde_json::Value;
use std::collections::HashMap;
use tokio::sync::Mutex;
Expand Down
Loading

0 comments on commit 86b65ae

Please sign in to comment.