...
This commit is contained in:
parent
7fa4125dc0
commit
dbd44043cb
@ -15,10 +15,10 @@ use ipfs_api::{IpfsApi, IpfsClient};
|
|||||||
pub struct Collection {
|
pub struct Collection {
|
||||||
/// Base path of the collection
|
/// Base path of the collection
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
|
|
||||||
/// Name of the collection (namefixed)
|
/// Name of the collection (namefixed)
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
||||||
/// Redis storage backend
|
/// Redis storage backend
|
||||||
pub storage: RedisStorage,
|
pub storage: RedisStorage,
|
||||||
}
|
}
|
||||||
@ -27,10 +27,10 @@ pub struct Collection {
|
|||||||
pub struct CollectionBuilder {
|
pub struct CollectionBuilder {
|
||||||
/// Base path of the collection
|
/// Base path of the collection
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
|
||||||
/// Name of the collection (namefixed)
|
/// Name of the collection (namefixed)
|
||||||
name: String,
|
name: String,
|
||||||
|
|
||||||
/// Redis storage backend
|
/// Redis storage backend
|
||||||
storage: Option<RedisStorage>,
|
storage: Option<RedisStorage>,
|
||||||
}
|
}
|
||||||
@ -53,7 +53,7 @@ impl Collection {
|
|||||||
storage: None,
|
storage: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Scan walks over the path and finds all files and .md files
|
/// Scan walks over the path and finds all files and .md files
|
||||||
/// It stores the relative positions in Redis
|
/// It stores the relative positions in Redis
|
||||||
///
|
///
|
||||||
@ -62,7 +62,7 @@ impl Collection {
|
|||||||
/// Ok(()) on success or an error
|
/// Ok(()) on success or an error
|
||||||
pub fn scan(&self) -> Result<()> {
|
pub fn scan(&self) -> Result<()> {
|
||||||
println!("DEBUG: Scanning collection '{}' at path {:?}", self.name, self.path);
|
println!("DEBUG: Scanning collection '{}' at path {:?}", self.name, self.path);
|
||||||
|
|
||||||
// Delete existing collection data if any
|
// Delete existing collection data if any
|
||||||
println!("DEBUG: Deleting existing collection data from Redis key 'collections:{}'", self.name);
|
println!("DEBUG: Deleting existing collection data from Redis key 'collections:{}'", self.name);
|
||||||
self.storage.delete_collection(&self.name)?;
|
self.storage.delete_collection(&self.name)?;
|
||||||
@ -71,11 +71,11 @@ impl Collection {
|
|||||||
.unwrap_or_else(|_| self.path.clone())
|
.unwrap_or_else(|_| self.path.clone())
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
println!("DEBUG: Storing collection path in Redis key 'collections:{}:path'", self.name);
|
println!("DEBUG: Storing collection path in Redis key 'collections:{}:path'", self.name);
|
||||||
self.storage.store_collection_path(&self.name, &absolute_path)?;
|
self.storage.store_collection_path(&self.name, &absolute_path)?;
|
||||||
self.storage.store_collection_path(&self.name, &self.path.to_string_lossy())?;
|
self.storage.store_collection_path(&self.name, &self.path.to_string_lossy())?;
|
||||||
|
|
||||||
// Walk through the directory
|
// Walk through the directory
|
||||||
let walker = WalkDir::new(&self.path);
|
let walker = WalkDir::new(&self.path);
|
||||||
for entry_result in walker {
|
for entry_result in walker {
|
||||||
@ -88,18 +88,18 @@ impl Collection {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Skip directories
|
// Skip directories
|
||||||
if entry.file_type().is_dir() {
|
if entry.file_type().is_dir() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip files that start with a dot (.)
|
// Skip files that start with a dot (.)
|
||||||
let file_name = entry.file_name().to_string_lossy();
|
let file_name = entry.file_name().to_string_lossy();
|
||||||
if file_name.starts_with(".") {
|
if file_name.starts_with(".") {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the relative path from the base path
|
// Get the relative path from the base path
|
||||||
let rel_path = match entry.path().strip_prefix(&self.path) {
|
let rel_path = match entry.path().strip_prefix(&self.path) {
|
||||||
Ok(path) => path,
|
Ok(path) => path,
|
||||||
@ -109,11 +109,11 @@ impl Collection {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get the filename and apply namefix
|
// Get the filename and apply namefix
|
||||||
let filename = entry.file_name().to_string_lossy().to_string();
|
let filename = entry.file_name().to_string_lossy().to_string();
|
||||||
let namefixed_filename = name_fix(&filename);
|
let namefixed_filename = name_fix(&filename);
|
||||||
|
|
||||||
// Determine if this is a document (markdown file) or an image
|
// Determine if this is a document (markdown file) or an image
|
||||||
let is_markdown = filename.to_lowercase().ends_with(".md");
|
let is_markdown = filename.to_lowercase().ends_with(".md");
|
||||||
let is_image = filename.to_lowercase().ends_with(".png") ||
|
let is_image = filename.to_lowercase().ends_with(".png") ||
|
||||||
@ -121,7 +121,7 @@ impl Collection {
|
|||||||
filename.to_lowercase().ends_with(".jpeg") ||
|
filename.to_lowercase().ends_with(".jpeg") ||
|
||||||
filename.to_lowercase().ends_with(".gif") ||
|
filename.to_lowercase().ends_with(".gif") ||
|
||||||
filename.to_lowercase().ends_with(".svg");
|
filename.to_lowercase().ends_with(".svg");
|
||||||
|
|
||||||
let file_type = if is_markdown {
|
let file_type = if is_markdown {
|
||||||
"document"
|
"document"
|
||||||
} else if is_image {
|
} else if is_image {
|
||||||
@ -129,22 +129,22 @@ impl Collection {
|
|||||||
} else {
|
} else {
|
||||||
"file"
|
"file"
|
||||||
};
|
};
|
||||||
|
|
||||||
// Store in Redis using the namefixed filename as the key
|
// Store in Redis using the namefixed filename as the key
|
||||||
// Store the original relative path to preserve case and special characters
|
// Store the original relative path to preserve case and special characters
|
||||||
println!("DEBUG: Storing {} '{}' in Redis key 'collections:{}' with key '{}' and value '{}'",
|
println!("DEBUG: Storing {} '{}' in Redis key 'collections:{}' with key '{}' and value '{}'",
|
||||||
file_type, filename, self.name, namefixed_filename, rel_path.to_string_lossy());
|
file_type, filename, self.name, namefixed_filename, rel_path.to_string_lossy());
|
||||||
|
|
||||||
self.storage.store_collection_entry(
|
self.storage.store_collection_entry(
|
||||||
&self.name,
|
&self.name,
|
||||||
&namefixed_filename,
|
&namefixed_filename,
|
||||||
&rel_path.to_string_lossy()
|
&rel_path.to_string_lossy()
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a page by name and return its markdown content
|
/// Get a page by name and return its markdown content
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -157,14 +157,14 @@ impl Collection {
|
|||||||
pub fn page_get(&self, page_name: &str) -> Result<String> {
|
pub fn page_get(&self, page_name: &str) -> Result<String> {
|
||||||
// Apply namefix to the page name
|
// Apply namefix to the page name
|
||||||
let namefixed_page_name = name_fix(page_name);
|
let namefixed_page_name = name_fix(page_name);
|
||||||
|
|
||||||
// Ensure it has .md extension
|
// Ensure it has .md extension
|
||||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||||
|
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
||||||
|
|
||||||
// Check if the path is valid
|
// Check if the path is valid
|
||||||
if self.path.as_os_str().is_empty() {
|
if self.path.as_os_str().is_empty() {
|
||||||
// If the path is empty, we're working with a collection loaded from Redis
|
// If the path is empty, we're working with a collection loaded from Redis
|
||||||
@ -174,18 +174,18 @@ impl Collection {
|
|||||||
format!("File path not available for {} in collection {}", page_name, self.name)
|
format!("File path not available for {} in collection {}", page_name, self.name)
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the file
|
// Read the file
|
||||||
let full_path = self.path.join(rel_path);
|
let full_path = self.path.join(rel_path);
|
||||||
let content = fs::read_to_string(full_path)
|
let content = fs::read_to_string(full_path)
|
||||||
.map_err(|e| DocTreeError::IoError(e))?;
|
.map_err(|e| DocTreeError::IoError(e))?;
|
||||||
|
|
||||||
// Skip include processing at this level to avoid infinite recursion
|
// Skip include processing at this level to avoid infinite recursion
|
||||||
// Include processing will be done at the higher level
|
// Include processing will be done at the higher level
|
||||||
|
|
||||||
Ok(content)
|
Ok(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create or update a page in the collection
|
/// Create or update a page in the collection
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -199,27 +199,27 @@ impl Collection {
|
|||||||
pub fn page_set(&self, page_name: &str, content: &str) -> Result<()> {
|
pub fn page_set(&self, page_name: &str, content: &str) -> Result<()> {
|
||||||
// Apply namefix to the page name
|
// Apply namefix to the page name
|
||||||
let namefixed_page_name = name_fix(page_name);
|
let namefixed_page_name = name_fix(page_name);
|
||||||
|
|
||||||
// Ensure it has .md extension
|
// Ensure it has .md extension
|
||||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||||
|
|
||||||
// Create the full path
|
// Create the full path
|
||||||
let full_path = self.path.join(&namefixed_page_name);
|
let full_path = self.path.join(&namefixed_page_name);
|
||||||
|
|
||||||
// Create directories if needed
|
// Create directories if needed
|
||||||
if let Some(parent) = full_path.parent() {
|
if let Some(parent) = full_path.parent() {
|
||||||
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
|
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write content to file
|
// Write content to file
|
||||||
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
||||||
|
|
||||||
// Update Redis
|
// Update Redis
|
||||||
self.storage.store_collection_entry(&self.name, &namefixed_page_name, &namefixed_page_name)?;
|
self.storage.store_collection_entry(&self.name, &namefixed_page_name, &namefixed_page_name)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a page from the collection
|
/// Delete a page from the collection
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -232,24 +232,24 @@ impl Collection {
|
|||||||
pub fn page_delete(&self, page_name: &str) -> Result<()> {
|
pub fn page_delete(&self, page_name: &str) -> Result<()> {
|
||||||
// Apply namefix to the page name
|
// Apply namefix to the page name
|
||||||
let namefixed_page_name = name_fix(page_name);
|
let namefixed_page_name = name_fix(page_name);
|
||||||
|
|
||||||
// Ensure it has .md extension
|
// Ensure it has .md extension
|
||||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||||
|
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
||||||
|
|
||||||
// Delete the file
|
// Delete the file
|
||||||
let full_path = self.path.join(rel_path);
|
let full_path = self.path.join(rel_path);
|
||||||
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
||||||
|
|
||||||
// Remove from Redis
|
// Remove from Redis
|
||||||
self.storage.delete_collection_entry(&self.name, &namefixed_page_name)?;
|
self.storage.delete_collection_entry(&self.name, &namefixed_page_name)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all pages in the collection
|
/// List all pages in the collection
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
@ -258,15 +258,15 @@ impl Collection {
|
|||||||
pub fn page_list(&self) -> Result<Vec<String>> {
|
pub fn page_list(&self) -> Result<Vec<String>> {
|
||||||
// Get all keys from Redis
|
// Get all keys from Redis
|
||||||
let keys = self.storage.list_collection_entries(&self.name)?;
|
let keys = self.storage.list_collection_entries(&self.name)?;
|
||||||
|
|
||||||
// Filter to only include .md files
|
// Filter to only include .md files
|
||||||
let pages = keys.into_iter()
|
let pages = keys.into_iter()
|
||||||
.filter(|key| key.ends_with(".md"))
|
.filter(|key| key.ends_with(".md"))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(pages)
|
Ok(pages)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the URL for a file
|
/// Get the URL for a file
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -279,17 +279,17 @@ impl Collection {
|
|||||||
pub fn file_get_url(&self, file_name: &str) -> Result<String> {
|
pub fn file_get_url(&self, file_name: &str) -> Result<String> {
|
||||||
// Apply namefix to the file name
|
// Apply namefix to the file name
|
||||||
let namefixed_file_name = name_fix(file_name);
|
let namefixed_file_name = name_fix(file_name);
|
||||||
|
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
||||||
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
||||||
|
|
||||||
// Construct a URL for the file
|
// Construct a URL for the file
|
||||||
let url = format!("/collections/{}/files/{}", self.name, rel_path);
|
let url = format!("/collections/{}/files/{}", self.name, rel_path);
|
||||||
|
|
||||||
Ok(url)
|
Ok(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add or update a file in the collection
|
/// Add or update a file in the collection
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -303,24 +303,24 @@ impl Collection {
|
|||||||
pub fn file_set(&self, file_name: &str, content: &[u8]) -> Result<()> {
|
pub fn file_set(&self, file_name: &str, content: &[u8]) -> Result<()> {
|
||||||
// Apply namefix to the file name
|
// Apply namefix to the file name
|
||||||
let namefixed_file_name = name_fix(file_name);
|
let namefixed_file_name = name_fix(file_name);
|
||||||
|
|
||||||
// Create the full path
|
// Create the full path
|
||||||
let full_path = self.path.join(&namefixed_file_name);
|
let full_path = self.path.join(&namefixed_file_name);
|
||||||
|
|
||||||
// Create directories if needed
|
// Create directories if needed
|
||||||
if let Some(parent) = full_path.parent() {
|
if let Some(parent) = full_path.parent() {
|
||||||
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
|
fs::create_dir_all(parent).map_err(DocTreeError::IoError)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write content to file
|
// Write content to file
|
||||||
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
||||||
|
|
||||||
// Update Redis
|
// Update Redis
|
||||||
self.storage.store_collection_entry(&self.name, &namefixed_file_name, &namefixed_file_name)?;
|
self.storage.store_collection_entry(&self.name, &namefixed_file_name, &namefixed_file_name)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a file from the collection
|
/// Delete a file from the collection
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -333,21 +333,21 @@ impl Collection {
|
|||||||
pub fn file_delete(&self, file_name: &str) -> Result<()> {
|
pub fn file_delete(&self, file_name: &str) -> Result<()> {
|
||||||
// Apply namefix to the file name
|
// Apply namefix to the file name
|
||||||
let namefixed_file_name = name_fix(file_name);
|
let namefixed_file_name = name_fix(file_name);
|
||||||
|
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
||||||
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
||||||
|
|
||||||
// Delete the file
|
// Delete the file
|
||||||
let full_path = self.path.join(rel_path);
|
let full_path = self.path.join(rel_path);
|
||||||
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
||||||
|
|
||||||
// Remove from Redis
|
// Remove from Redis
|
||||||
self.storage.delete_collection_entry(&self.name, &namefixed_file_name)?;
|
self.storage.delete_collection_entry(&self.name, &namefixed_file_name)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all files (non-markdown) in the collection
|
/// List all files (non-markdown) in the collection
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
@ -356,15 +356,15 @@ impl Collection {
|
|||||||
pub fn file_list(&self) -> Result<Vec<String>> {
|
pub fn file_list(&self) -> Result<Vec<String>> {
|
||||||
// Get all keys from Redis
|
// Get all keys from Redis
|
||||||
let keys = self.storage.list_collection_entries(&self.name)?;
|
let keys = self.storage.list_collection_entries(&self.name)?;
|
||||||
|
|
||||||
// Filter to exclude .md files
|
// Filter to exclude .md files
|
||||||
let files = keys.into_iter()
|
let files = keys.into_iter()
|
||||||
.filter(|key| !key.ends_with(".md"))
|
.filter(|key| !key.ends_with(".md"))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(files)
|
Ok(files)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the relative path of a page in the collection
|
/// Get the relative path of a page in the collection
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -377,15 +377,15 @@ impl Collection {
|
|||||||
pub fn page_get_path(&self, page_name: &str) -> Result<String> {
|
pub fn page_get_path(&self, page_name: &str) -> Result<String> {
|
||||||
// Apply namefix to the page name
|
// Apply namefix to the page name
|
||||||
let namefixed_page_name = name_fix(page_name);
|
let namefixed_page_name = name_fix(page_name);
|
||||||
|
|
||||||
// Ensure it has .md extension
|
// Ensure it has .md extension
|
||||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||||
|
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))
|
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a page by name and return its HTML content
|
/// Get a page by name and return its HTML content
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
@ -399,20 +399,20 @@ impl Collection {
|
|||||||
pub fn page_get_html(&self, page_name: &str, doctree: Option<&crate::doctree::DocTree>) -> Result<String> {
|
pub fn page_get_html(&self, page_name: &str, doctree: Option<&crate::doctree::DocTree>) -> Result<String> {
|
||||||
// Get the markdown content
|
// Get the markdown content
|
||||||
let markdown = self.page_get(page_name)?;
|
let markdown = self.page_get(page_name)?;
|
||||||
|
|
||||||
// Process includes if doctree is provided
|
// Process includes if doctree is provided
|
||||||
let processed_markdown = if let Some(dt) = doctree {
|
let processed_markdown = if let Some(dt) = doctree {
|
||||||
process_includes(&markdown, &self.name, dt)?
|
process_includes(&markdown, &self.name, dt)?
|
||||||
} else {
|
} else {
|
||||||
markdown
|
markdown
|
||||||
};
|
};
|
||||||
|
|
||||||
// Convert markdown to HTML
|
// Convert markdown to HTML
|
||||||
let html = markdown_to_html(&processed_markdown);
|
let html = markdown_to_html(&processed_markdown);
|
||||||
|
|
||||||
Ok(html)
|
Ok(html)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get information about the Collection
|
/// Get information about the Collection
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
@ -424,9 +424,8 @@ impl Collection {
|
|||||||
info.insert("path".to_string(), self.path.to_string_lossy().to_string());
|
info.insert("path".to_string(), self.path.to_string_lossy().to_string());
|
||||||
info
|
info
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
|
/// Exports files and images from the collection to IPFS synchronously, encrypting them, and generating a CSV manifest.
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
@ -435,8 +434,16 @@ impl Collection {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// Ok(()) on success or an error.
|
/// Ok(()) on success or an error.
|
||||||
impl Collection {
|
pub fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
|
||||||
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
|
// Create a new tokio runtime and block on the async export function
|
||||||
|
tokio::runtime::Runtime::new()?.block_on(async {
|
||||||
|
self.export_to_ipfs_async(output_csv_path).await
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Exports files and images from the collection to IPFS asynchronously, encrypts them, and generates a CSV manifest.
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
@ -445,7 +452,7 @@ impl Collection {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// Ok(()) on success or an error.
|
/// Ok(()) on success or an error.
|
||||||
pub async fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
|
pub async fn export_to_ipfs_async(&self, output_csv_path: &Path) -> Result<()> {
|
||||||
use blake3::Hasher;
|
use blake3::Hasher;
|
||||||
// use chacha20poly1305::{ChaCha20Poly1305, Aead};
|
// use chacha20poly1305::{ChaCha20Poly1305, Aead};
|
||||||
use ipfs_api::IpfsClient;
|
use ipfs_api::IpfsClient;
|
||||||
@ -570,7 +577,7 @@ impl CollectionBuilder {
|
|||||||
self.storage = Some(storage);
|
self.storage = Some(storage);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build the Collection
|
/// Build the Collection
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
@ -580,13 +587,13 @@ impl CollectionBuilder {
|
|||||||
let storage = self.storage.ok_or_else(|| {
|
let storage = self.storage.ok_or_else(|| {
|
||||||
DocTreeError::MissingParameter("storage".to_string())
|
DocTreeError::MissingParameter("storage".to_string())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let collection = Collection {
|
let collection = Collection {
|
||||||
path: self.path,
|
path: self.path,
|
||||||
name: self.name,
|
name: self.name,
|
||||||
storage,
|
storage,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(collection)
|
Ok(collection)
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
@ -551,7 +552,7 @@ impl DocTree {
|
|||||||
for (name, collection) in &self.collections {
|
for (name, collection) in &self.collections {
|
||||||
let csv_file_path = output_dir.join(format!("{}.csv", name));
|
let csv_file_path = output_dir.join(format!("{}.csv", name));
|
||||||
println!("DEBUG: Exporting collection '{}' to IPFS and generating CSV at {:?}", name, csv_file_path);
|
println!("DEBUG: Exporting collection '{}' to IPFS and generating CSV at {:?}", name, csv_file_path);
|
||||||
if let Err(e) = collection.export_to_ipfs(&csv_file_path).await {
|
if let Err(e) = collection.export_to_ipfs(&csv_file_path) {
|
||||||
eprintln!("Error exporting collection '{}': {}", name, e);
|
eprintln!("Error exporting collection '{}': {}", name, e);
|
||||||
// Continue with the next collection
|
// Continue with the next collection
|
||||||
}
|
}
|
||||||
@ -559,6 +560,26 @@ impl DocTree {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Exports a specific collection to IPFS synchronously, encrypting its files and generating a CSV manifest.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `collection_name` - The name of the collection to export.
|
||||||
|
/// * `output_csv_path` - The path to save the output CSV file.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Ok(()) on success or an error.
|
||||||
|
pub fn export_collection_to_ipfs(&self, collection_name: &str, output_csv_path: &Path) -> Result<()> {
|
||||||
|
// Get the collection
|
||||||
|
let collection = self.get_collection(collection_name)?;
|
||||||
|
|
||||||
|
// Create a new tokio runtime and block on the async export function
|
||||||
|
collection.export_to_ipfs(output_csv_path);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DocTreeBuilder {
|
impl DocTreeBuilder {
|
||||||
|
@ -69,6 +69,13 @@ fn main() -> Result<()> {
|
|||||||
.about("Delete all collections from Redis")
|
.about("Delete all collections from Redis")
|
||||||
.arg(Arg::with_name("doctree").long("doctree").takes_value(true).help("Name of the doctree (default: 'default')")),
|
.arg(Arg::with_name("doctree").long("doctree").takes_value(true).help("Name of the doctree (default: 'default')")),
|
||||||
)
|
)
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("export_to_ipfs")
|
||||||
|
.about("Export a collection to IPFS")
|
||||||
|
.arg(Arg::with_name("collection").required(true).help("Name of the collection"))
|
||||||
|
.arg(Arg::with_name("output").required(true).help("Output directory for IPFS export"))
|
||||||
|
.arg(Arg::with_name("doctree").long("doctree").takes_value(true).help("Name of the doctree (default: 'default')")),
|
||||||
|
)
|
||||||
.get_matches();
|
.get_matches();
|
||||||
|
|
||||||
// Check if debug mode is enabled
|
// Check if debug mode is enabled
|
||||||
@ -342,6 +349,43 @@ fn main() -> Result<()> {
|
|||||||
println!("Deleting collection '{}' from Redis in doctree '{}'...", collection, doctree_name);
|
println!("Deleting collection '{}' from Redis in doctree '{}'...", collection, doctree_name);
|
||||||
doctree.delete_collection(collection)?;
|
doctree.delete_collection(collection)?;
|
||||||
println!("Collection '{}' deleted successfully", collection);
|
println!("Collection '{}' deleted successfully", collection);
|
||||||
|
} else if let Some(matches) = matches.subcommand_matches("export_to_ipfs") {
|
||||||
|
let collection_name = matches.value_of("collection").unwrap();
|
||||||
|
let output_path = matches.value_of("output").unwrap();
|
||||||
|
let doctree_name = matches.value_of("doctree").unwrap_or("default");
|
||||||
|
|
||||||
|
if debug_mode {
|
||||||
|
println!("DEBUG: Exporting collection '{}' from doctree '{}' to IPFS output path '{}'",
|
||||||
|
collection_name, doctree_name, output_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a storage with the specified doctree name
|
||||||
|
let storage = RedisStorage::new("redis://localhost:6379")?;
|
||||||
|
storage.set_doctree_name(doctree_name);
|
||||||
|
storage.set_debug(debug_mode);
|
||||||
|
|
||||||
|
if debug_mode {
|
||||||
|
println!("DEBUG: Connected to Redis storage");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a DocTree with the specified doctree name
|
||||||
|
let mut doctree = DocTree::builder()
|
||||||
|
.with_storage(storage)
|
||||||
|
.with_doctree_name(doctree_name)
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
// Load collections from Redis
|
||||||
|
doctree.load_collections_from_redis()?;
|
||||||
|
|
||||||
|
// Get the collection
|
||||||
|
let collection = doctree.get_collection(collection_name)?;
|
||||||
|
|
||||||
|
// Call the synchronous export_collection_to_ipfs_sync function from the doctree crate
|
||||||
|
let output_path = Path::new(output_path);
|
||||||
|
doctree.export_collection_to_ipfs(collection_name, output_path)?;
|
||||||
|
|
||||||
|
println!("Successfully exported collection '{}' to IPFS and generated metadata CSV at {:?}.", collection_name, output_path.join(format!("{}.csv", collection_name)));
|
||||||
|
|
||||||
} else if let Some(matches) = matches.subcommand_matches("reset") {
|
} else if let Some(matches) = matches.subcommand_matches("reset") {
|
||||||
let doctree_name = matches.value_of("doctree").unwrap_or("default");
|
let doctree_name = matches.value_of("doctree").unwrap_or("default");
|
||||||
|
|
||||||
@ -370,6 +414,6 @@ fn main() -> Result<()> {
|
|||||||
} else {
|
} else {
|
||||||
println!("No command specified. Use --help for usage information.");
|
println!("No command specified. Use --help for usage information.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user