...
This commit is contained in:
parent
d609aa8094
commit
29ccc54a4d
@ -463,13 +463,33 @@ impl Collection {
|
|||||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||||
|
|
||||||
|
|
||||||
|
// Create the output directory if it doesn't exist
|
||||||
// Create the output directory if it doesn't exist
|
// Create the output directory if it doesn't exist
|
||||||
if let Some(parent) = output_csv_path.parent() {
|
if let Some(parent) = output_csv_path.parent() {
|
||||||
|
if parent.exists() && parent.is_file() {
|
||||||
|
println!("DEBUG: Removing conflicting file at output directory path: {:?}", parent);
|
||||||
|
tokio::fs::remove_file(parent).await.map_err(DocTreeError::IoError)?;
|
||||||
|
println!("DEBUG: Conflicting file removed.");
|
||||||
|
}
|
||||||
|
if !parent.is_dir() {
|
||||||
|
println!("DEBUG: Ensuring output directory exists: {:?}", parent);
|
||||||
tokio::fs::create_dir_all(parent).await.map_err(DocTreeError::IoError)?;
|
tokio::fs::create_dir_all(parent).await.map_err(DocTreeError::IoError)?;
|
||||||
|
println!("DEBUG: Output directory ensured.");
|
||||||
|
} else {
|
||||||
|
println!("DEBUG: Output directory already exists: {:?}", parent);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the CSV writer
|
// Create the CSV writer
|
||||||
let mut writer = Writer::from_path(output_csv_path).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
println!("DEBUG: Creating or overwriting CSV file at {:?}", output_csv_path);
|
||||||
|
let file = std::fs::OpenOptions::new()
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.truncate(true) // Add truncate option to overwrite if exists
|
||||||
|
.open(output_csv_path)
|
||||||
|
.map_err(DocTreeError::IoError)?;
|
||||||
|
let mut writer = Writer::from_writer(file);
|
||||||
|
println!("DEBUG: CSV writer created successfully");
|
||||||
|
|
||||||
// Write the CSV header
|
// Write the CSV header
|
||||||
writer.write_record(&["collectionname", "filename", "blakehash", "ipfshash", "size"]).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
writer.write_record(&["collectionname", "filename", "blakehash", "ipfshash", "size"]).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||||
@ -486,10 +506,13 @@ impl Collection {
|
|||||||
let mut entries = pages;
|
let mut entries = pages;
|
||||||
entries.extend(files);
|
entries.extend(files);
|
||||||
|
|
||||||
|
println!("DEBUG: Starting to process collection entries for IPFS export");
|
||||||
for entry_name in entries {
|
for entry_name in entries {
|
||||||
|
println!("DEBUG: Processing entry: {}", entry_name);
|
||||||
// Get the relative path from Redis
|
// Get the relative path from Redis
|
||||||
let relative_path = self.storage.get_collection_entry(&self.name, &entry_name)
|
let relative_path = self.storage.get_collection_entry(&self.name, &entry_name)
|
||||||
.map_err(|_| DocTreeError::FileNotFound(entry_name.clone()))?;
|
.map_err(|_| DocTreeError::FileNotFound(entry_name.clone()))?;
|
||||||
|
println!("DEBUG: Retrieved relative path: {}", relative_path);
|
||||||
|
|
||||||
let file_path = self.path.join(&relative_path);
|
let file_path = self.path.join(&relative_path);
|
||||||
|
|
||||||
@ -534,16 +557,22 @@ impl Collection {
|
|||||||
// };
|
// };
|
||||||
|
|
||||||
// Add encrypted content to IPFS
|
// Add encrypted content to IPFS
|
||||||
|
println!("DEBUG: Adding file to IPFS: {:?}", file_path);
|
||||||
let ipfs_path = match ipfs.add(std::io::Cursor::new(content)).await {
|
let ipfs_path = match ipfs.add(std::io::Cursor::new(content)).await {
|
||||||
Ok(path) => path,
|
Ok(path) => {
|
||||||
|
println!("DEBUG: Successfully added file to IPFS. Hash: {}", path.hash);
|
||||||
|
path
|
||||||
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("Error adding file to IPFS {:?}: {}", file_path, e);
|
eprintln!("Error adding file to IPFS {:?}: {}", file_path, e);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let ipfs_hash = ipfs_path.hash.to_string();
|
let ipfs_hash = ipfs_path.hash.to_string();
|
||||||
|
println!("DEBUG: IPFS hash: {}", ipfs_hash);
|
||||||
|
|
||||||
// Write record to CSV
|
// Write record to CSV
|
||||||
|
println!("DEBUG: Writing CSV record for {:?}", file_path);
|
||||||
if let Err(e) = writer.write_record(&[
|
if let Err(e) = writer.write_record(&[
|
||||||
&self.name,
|
&self.name,
|
||||||
&relative_path,
|
&relative_path,
|
||||||
@ -554,10 +583,13 @@ impl Collection {
|
|||||||
eprintln!("Error writing CSV record for {:?}: {}", file_path, e);
|
eprintln!("Error writing CSV record for {:?}: {}", file_path, e);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
println!("DEBUG: Successfully wrote CSV record for {:?}", file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Flush the CSV writer
|
// Flush the CSV writer
|
||||||
|
println!("DEBUG: Flushing CSV writer");
|
||||||
writer.flush().map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
writer.flush().map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||||
|
println!("DEBUG: CSV writer flushed successfully");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -576,7 +576,8 @@ impl DocTree {
|
|||||||
let collection = self.get_collection(collection_name)?;
|
let collection = self.get_collection(collection_name)?;
|
||||||
|
|
||||||
// Create a new tokio runtime and block on the async export function
|
// Create a new tokio runtime and block on the async export function
|
||||||
collection.export_to_ipfs(output_csv_path);
|
let csv_file_path = output_csv_path.join(format!("{}.csv", collection_name));
|
||||||
|
collection.export_to_ipfs(&csv_file_path)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -246,7 +246,7 @@ fn main() -> Result<()> {
|
|||||||
for page in pages {
|
for page in pages {
|
||||||
match collection.page_get_path(&page) {
|
match collection.page_get_path(&page) {
|
||||||
Ok(path) => {
|
Ok(path) => {
|
||||||
println!(" - {} => Redis: {}:collections:{} / {}", path, doctree_name, collection.name, page);
|
println!(" - {}", path);
|
||||||
},
|
},
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
println!(" - {}", page);
|
println!(" - {}", page);
|
||||||
@ -271,7 +271,7 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
println!(" Images ({}):", images.len());
|
println!(" Images ({}):", images.len());
|
||||||
for image in images {
|
for image in images {
|
||||||
println!(" - {} => Redis: {}:collections:{} / {}", image, doctree_name, collection.name, image);
|
println!(" - {}", image);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter other files
|
// Filter other files
|
||||||
@ -285,7 +285,7 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
println!(" Other Files ({}):", other_files.len());
|
println!(" Other Files ({}):", other_files.len());
|
||||||
for file in other_files {
|
for file in other_files {
|
||||||
println!(" - {} => Redis: {}:collections:{} / {}", file, doctree_name, collection.name, file);
|
println!(" - {}", file);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(e) => println!(" Error listing files: {}", e),
|
Err(e) => println!(" Error listing files: {}", e),
|
||||||
|
Loading…
Reference in New Issue
Block a user