This commit is contained in:
despiegk 2025-05-13 09:19:45 +03:00
parent 7fa4125dc0
commit dbd44043cb
3 changed files with 148 additions and 76 deletions

View File

@ -15,10 +15,10 @@ use ipfs_api::{IpfsApi, IpfsClient};
pub struct Collection {
/// Base path of the collection
pub path: PathBuf,
/// Name of the collection (namefixed)
pub name: String,
/// Redis storage backend
pub storage: RedisStorage,
}
@ -27,10 +27,10 @@ pub struct Collection {
pub struct CollectionBuilder {
/// Base path of the collection
path: PathBuf,
/// Name of the collection (namefixed)
name: String,
/// Redis storage backend
storage: Option<RedisStorage>,
}
@ -53,7 +53,7 @@ impl Collection {
storage: None,
}
}
/// Scan walks over the path and finds all files and .md files
/// It stores the relative positions in Redis
///
@ -62,7 +62,7 @@ impl Collection {
/// Ok(()) on success or an error
pub fn scan(&self) -> Result<()> {
println!("DEBUG: Scanning collection '{}' at path {:?}", self.name, self.path);
// Delete existing collection data if any
println!("DEBUG: Deleting existing collection data from Redis key 'collections:{}'", self.name);
self.storage.delete_collection(&self.name)?;
@ -71,11 +71,11 @@ impl Collection {
.unwrap_or_else(|_| self.path.clone())
.to_string_lossy()
.to_string();
println!("DEBUG: Storing collection path in Redis key 'collections:{}:path'", self.name);
self.storage.store_collection_path(&self.name, &absolute_path)?;
self.storage.store_collection_path(&self.name, &self.path.to_string_lossy())?;
// Walk through the directory
let walker = WalkDir::new(&self.path);
for entry_result in walker {
@ -88,18 +88,18 @@ impl Collection {
continue;
}
};
// Skip directories
if entry.file_type().is_dir() {
continue;
}
// Skip files that start with a dot (.)
let file_name = entry.file_name().to_string_lossy();
if file_name.starts_with(".") {
continue;
}
// Get the relative path from the base path
let rel_path = match entry.path().strip_prefix(&self.path) {
Ok(path) => path,
@ -109,11 +109,11 @@ impl Collection {
continue;
}
};
// Get the filename and apply namefix
let filename = entry.file_name().to_string_lossy().to_string();
let namefixed_filename = name_fix(&filename);
// Determine if this is a document (markdown file) or an image
let is_markdown = filename.to_lowercase().ends_with(".md");
let is_image = filename.to_lowercase().ends_with(".png") ||
@ -121,7 +121,7 @@ impl Collection {
filename.to_lowercase().ends_with(".jpeg") ||
filename.to_lowercase().ends_with(".gif") ||
filename.to_lowercase().ends_with(".svg");
let file_type = if is_markdown {
"document"
} else if is_image {
@ -129,22 +129,22 @@ impl Collection {
} else {
"file"
};
// Store in Redis using the namefixed filename as the key
// Store the original relative path to preserve case and special characters
println!("DEBUG: Storing {} '{}' in Redis key 'collections:{}' with key '{}' and value '{}'",
file_type, filename, self.name, namefixed_filename, rel_path.to_string_lossy());
self.storage.store_collection_entry(
&self.name,
&namefixed_filename,
&rel_path.to_string_lossy()
)?;
}
Ok(())
}
/// Get a page by name and return its markdown content
///
/// # Arguments
@ -157,14 +157,14 @@ impl Collection {
pub fn page_get(&self, page_name: &str) -> Result<String> {
// Apply namefix to the page name
let namefixed_page_name = name_fix(page_name);
// Ensure it has .md extension
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
// Get the relative path from Redis
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
// Check if the path is valid
if self.path.as_os_str().is_empty() {
// If the path is empty, we're working with a collection loaded from Redis
@ -174,18 +174,18 @@ impl Collection {
format!("File path not available for {} in collection {}", page_name, self.name)
)));
}
// Read the file
let full_path = self.path.join(rel_path);
let content = fs::read_to_string(full_path)
.map_err(|e| DocTreeError::IoError(e))?;
// Skip include processing at this level to avoid infinite recursion
// Include processing will be done at the higher level
Ok(content)
}
/// Create or update a page in the collection
///
/// # Arguments
@ -199,27 +199,27 @@ impl Collection {
pub fn page_set(&self, page_name: &str, content: &str) -> Result<()> {
// Apply namefix to the page name
let namefixed_page_name = name_fix(page_name);
// Ensure it has .md extension
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
// Create the full path
let full_path = self.path.join(&namefixed_page_name);
// Create directories if needed
if let Some(parent) = full_path.parent() {
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
}
// Write content to file
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
// Update Redis
self.storage.store_collection_entry(&self.name, &namefixed_page_name, &namefixed_page_name)?;
Ok(())
}
/// Delete a page from the collection
///
/// # Arguments
@ -232,24 +232,24 @@ impl Collection {
pub fn page_delete(&self, page_name: &str) -> Result<()> {
// Apply namefix to the page name
let namefixed_page_name = name_fix(page_name);
// Ensure it has .md extension
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
// Get the relative path from Redis
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
// Delete the file
let full_path = self.path.join(rel_path);
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
// Remove from Redis
self.storage.delete_collection_entry(&self.name, &namefixed_page_name)?;
Ok(())
}
/// List all pages in the collection
///
/// # Returns
@ -258,15 +258,15 @@ impl Collection {
pub fn page_list(&self) -> Result<Vec<String>> {
// Get all keys from Redis
let keys = self.storage.list_collection_entries(&self.name)?;
// Filter to only include .md files
let pages = keys.into_iter()
.filter(|key| key.ends_with(".md"))
.collect();
Ok(pages)
}
/// Get the URL for a file
///
/// # Arguments
@ -279,17 +279,17 @@ impl Collection {
pub fn file_get_url(&self, file_name: &str) -> Result<String> {
// Apply namefix to the file name
let namefixed_file_name = name_fix(file_name);
// Get the relative path from Redis
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
// Construct a URL for the file
let url = format!("/collections/{}/files/{}", self.name, rel_path);
Ok(url)
}
/// Add or update a file in the collection
///
/// # Arguments
@ -303,24 +303,24 @@ impl Collection {
pub fn file_set(&self, file_name: &str, content: &[u8]) -> Result<()> {
// Apply namefix to the file name
let namefixed_file_name = name_fix(file_name);
// Create the full path
let full_path = self.path.join(&namefixed_file_name);
// Create directories if needed
if let Some(parent) = full_path.parent() {
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
}
// Write content to file
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
// Update Redis
self.storage.store_collection_entry(&self.name, &namefixed_file_name, &namefixed_file_name)?;
Ok(())
}
/// Delete a file from the collection
///
/// # Arguments
@ -333,21 +333,21 @@ impl Collection {
pub fn file_delete(&self, file_name: &str) -> Result<()> {
// Apply namefix to the file name
let namefixed_file_name = name_fix(file_name);
// Get the relative path from Redis
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
// Delete the file
let full_path = self.path.join(rel_path);
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
// Remove from Redis
self.storage.delete_collection_entry(&self.name, &namefixed_file_name)?;
Ok(())
}
/// List all files (non-markdown) in the collection
///
/// # Returns
@ -356,15 +356,15 @@ impl Collection {
pub fn file_list(&self) -> Result<Vec<String>> {
// Get all keys from Redis
let keys = self.storage.list_collection_entries(&self.name)?;
// Filter to exclude .md files
let files = keys.into_iter()
.filter(|key| !key.ends_with(".md"))
.collect();
Ok(files)
}
/// Get the relative path of a page in the collection
///
/// # Arguments
@ -377,15 +377,15 @@ impl Collection {
pub fn page_get_path(&self, page_name: &str) -> Result<String> {
// Apply namefix to the page name
let namefixed_page_name = name_fix(page_name);
// Ensure it has .md extension
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
// Get the relative path from Redis
self.storage.get_collection_entry(&self.name, &namefixed_page_name)
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))
}
/// Get a page by name and return its HTML content
///
/// # Arguments
@ -399,20 +399,20 @@ impl Collection {
pub fn page_get_html(&self, page_name: &str, doctree: Option<&crate::doctree::DocTree>) -> Result<String> {
// Get the markdown content
let markdown = self.page_get(page_name)?;
// Process includes if doctree is provided
let processed_markdown = if let Some(dt) = doctree {
process_includes(&markdown, &self.name, dt)?
} else {
markdown
};
// Convert markdown to HTML
let html = markdown_to_html(&processed_markdown);
Ok(html)
}
/// Get information about the Collection
///
/// # Returns
@ -424,9 +424,8 @@ impl Collection {
info.insert("path".to_string(), self.path.to_string_lossy().to_string());
info
}
}
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
/// Exports files and images from the collection to IPFS synchronously, encrypting them, and generating a CSV manifest.
///
/// # Arguments
///
@ -435,8 +434,16 @@ impl Collection {
/// # Returns
///
/// Ok(()) on success or an error.
impl Collection {
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
pub fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
// Create a new tokio runtime and block on the async export function
tokio::runtime::Runtime::new()?.block_on(async {
self.export_to_ipfs_async(output_csv_path).await
})?;
Ok(())
}
/// Exports files and images from the collection to IPFS asynchronously, encrypts them, and generates a CSV manifest.
///
/// # Arguments
///
@ -445,7 +452,7 @@ impl Collection {
/// # Returns
///
/// Ok(()) on success or an error.
pub async fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
pub async fn export_to_ipfs_async(&self, output_csv_path: &Path) -> Result<()> {
use blake3::Hasher;
// use chacha20poly1305::{ChaCha20Poly1305, Aead};
use ipfs_api::IpfsClient;
@ -570,7 +577,7 @@ impl CollectionBuilder {
self.storage = Some(storage);
self
}
/// Build the Collection
///
/// # Returns
@ -580,13 +587,13 @@ impl CollectionBuilder {
let storage = self.storage.ok_or_else(|| {
DocTreeError::MissingParameter("storage".to_string())
})?;
let collection = Collection {
path: self.path,
name: self.name,
storage,
};
Ok(collection)
}
}

View File

@ -1,3 +1,4 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
@ -551,7 +552,7 @@ impl DocTree {
for (name, collection) in &self.collections {
let csv_file_path = output_dir.join(format!("{}.csv", name));
println!("DEBUG: Exporting collection '{}' to IPFS and generating CSV at {:?}", name, csv_file_path);
if let Err(e) = collection.export_to_ipfs(&csv_file_path).await {
if let Err(e) = collection.export_to_ipfs(&csv_file_path) {
eprintln!("Error exporting collection '{}': {}", name, e);
// Continue with the next collection
}
@ -559,6 +560,26 @@ impl DocTree {
Ok(())
}
/// Exports a specific collection to IPFS synchronously, encrypting its files and generating a CSV manifest.
///
/// # Arguments
///
/// * `collection_name` - The name of the collection to export.
/// * `output_csv_path` - The path to save the output CSV file.
///
/// # Returns
///
/// Ok(()) on success or an error.
pub fn export_collection_to_ipfs(&self, collection_name: &str, output_csv_path: &Path) -> Result<()> {
// Get the collection
let collection = self.get_collection(collection_name)?;
// Create a new tokio runtime and block on the async export function
collection.export_to_ipfs(output_csv_path);
Ok(())
}
}
impl DocTreeBuilder {

View File

@ -69,6 +69,13 @@ fn main() -> Result<()> {
.about("Delete all collections from Redis")
.arg(Arg::with_name("doctree").long("doctree").takes_value(true).help("Name of the doctree (default: 'default')")),
)
.subcommand(
SubCommand::with_name("export_to_ipfs")
.about("Export a collection to IPFS")
.arg(Arg::with_name("collection").required(true).help("Name of the collection"))
.arg(Arg::with_name("output").required(true).help("Output directory for IPFS export"))
.arg(Arg::with_name("doctree").long("doctree").takes_value(true).help("Name of the doctree (default: 'default')")),
)
.get_matches();
// Check if debug mode is enabled
@ -342,6 +349,43 @@ fn main() -> Result<()> {
println!("Deleting collection '{}' from Redis in doctree '{}'...", collection, doctree_name);
doctree.delete_collection(collection)?;
println!("Collection '{}' deleted successfully", collection);
} else if let Some(matches) = matches.subcommand_matches("export_to_ipfs") {
let collection_name = matches.value_of("collection").unwrap();
let output_path = matches.value_of("output").unwrap();
let doctree_name = matches.value_of("doctree").unwrap_or("default");
if debug_mode {
println!("DEBUG: Exporting collection '{}' from doctree '{}' to IPFS output path '{}'",
collection_name, doctree_name, output_path);
}
// Create a storage with the specified doctree name
let storage = RedisStorage::new("redis://localhost:6379")?;
storage.set_doctree_name(doctree_name);
storage.set_debug(debug_mode);
if debug_mode {
println!("DEBUG: Connected to Redis storage");
}
// Create a DocTree with the specified doctree name
let mut doctree = DocTree::builder()
.with_storage(storage)
.with_doctree_name(doctree_name)
.build()?;
// Load collections from Redis
doctree.load_collections_from_redis()?;
// Get the collection
let collection = doctree.get_collection(collection_name)?;
// Call the synchronous export_collection_to_ipfs_sync function from the doctree crate
let output_path = Path::new(output_path);
doctree.export_collection_to_ipfs(collection_name, output_path)?;
println!("Successfully exported collection '{}' to IPFS and generated metadata CSV at {:?}.", collection_name, output_path.join(format!("{}.csv", collection_name)));
} else if let Some(matches) = matches.subcommand_matches("reset") {
let doctree_name = matches.value_of("doctree").unwrap_or("default");
@ -370,6 +414,6 @@ fn main() -> Result<()> {
} else {
println!("No command specified. Use --help for usage information.");
}
Ok(())
}