M examples/update_specs.rs => examples/update_specs.rs +1 -1
@@ 11,7 11,7 @@ use hyper::Method;
use sha2::{Digest, Sha256};
use tracing::{self, info};
-use kapiti::{http::Fetcher, hyper_smol, logging};
+use kapiti::{fetcher::Fetcher, hyper_smol, logging};
fn main() -> Result<()> {
logging::init_logging();
M src/client/https.rs => src/client/https.rs +1 -1
@@ 15,7 15,7 @@ use tracing::debug;
use crate::client::DnsClient;
use crate::codec::{decoder::DNSMessageDecoder, encoder::DNSMessageEncoder, message};
-use crate::http::Fetcher;
+use crate::fetcher::Fetcher;
use crate::hyper_smol;
use crate::resolver;
use crate::specs::message::Message;
R src/http.rs => src/fetcher.rs +0 -0
M src/filter/filter.rs => src/filter/filter.rs +4 -4
@@ 5,8 5,8 @@ use anyhow::{Context, Result};
use hyper::Client;
use sha2::{Digest, Sha256};
-use crate::filter::{downloader, path, reader};
-use crate::{http, hyper_smol};
+use crate::filter::{path, reader, updater};
+use crate::{fetcher, hyper_smol};
/// An iterator that goes over the parent domains of a provided child domain.
/// For example, www.domain.com => [www.domain.com, domain.com, com]
@@ 116,7 116,7 @@ pub async fn update_url(
uri_string: &String,
timeout_ms: u64,
) -> Result<(String, bool)> {
- let fetcher = http::Fetcher::new(10 * 1024 * 1024, None);
+ let fetcher = fetcher::Fetcher::new(10 * 1024 * 1024, None);
// We download files to the exact SHA of the URL string we were provided.
// This is an easy way to avoid filename collisions in URLs: example1.com/hosts vs example2.com/hosts
// If the user changes the URL string then that changes the SHA, perfect for "cache invalidation" purposes.
@@ 126,7 126,7 @@ pub async fn update_url(
hosts_path_sha,
path::ZSTD_EXTENSION
));
- let downloaded = downloader::update_file(
+ let downloaded = updater::update_file(
fetch_client,
&fetcher,
uri_string,
M src/filter/mod.rs => src/filter/mod.rs +2 -2
@@ 1,8 1,8 @@
-/// Downloads blocklists/host files to disk
-pub mod downloader;
/// Main filter API. Manages fetching/reading files and resolving results across them
pub mod filter;
/// Utilities relating to paths of fetched files
pub mod path;
/// Reads blocklists and/or host files from disk
pub mod reader;
+/// Downloads HTTP blocklists/host files to disk
+pub mod updater;
R src/filter/downloader.rs => src/filter/updater.rs +1 -1
@@ 12,8 12,8 @@ use hyper::header;
use hyper::{Client, HeaderMap, Method};
use tracing::{debug, info, level_enabled, trace, warn, Level};
+use crate::fetcher::Fetcher;
use crate::filter::path;
-use crate::http::Fetcher;
use crate::hyper_smol;
/// Downloads the specified URL to the specified path using the provided client.
M src/lib.rs => src/lib.rs +3 -3
@@ 10,12 10,12 @@ pub mod codec;
/// Utilities relating to Kapiti toml configuration.
pub mod config;
+/// Client for downloading data over HTTP to local disk.
+pub mod fetcher;
+
/// Filters/overrides to be applied before querying upstream DNS servers
pub mod filter;
-/// Utilities relating to downloading data over HTTP to local disk.
-pub mod http;
-
/// Implements support for running the hyper HTTP library against smol, not tokio.
/// This allows me to stop blowing hours of work every 4-6 months to keep Kapiti on the Tokio upgrade treadmill.
/// The hyper integration is also set up to resolve hosts against ourselves, instead of the system resolver.