Compare commits
3 Commits
main
...
developmen
Author | SHA1 | Date | |
---|---|---|---|
91b0247e68 | |||
|
f9d338a8f1 | ||
|
ea25db7d29 |
4
.gitignore
vendored
4
.gitignore
vendored
@ -62,4 +62,6 @@ docusaurus.config.ts
|
||||
sidebars.ts
|
||||
|
||||
tsconfig.json
|
||||
sccache.log
|
||||
sccache.log
|
||||
*webmeta.json
|
||||
.vscode
|
@ -211,7 +211,7 @@ Add doctree to your Cargo.toml:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
doctree = { git = "https://git.ourworld.tf/herocode/doctree", branch = "main", package = "doctree", path = "doctree/src" }
|
||||
doctree = { git = "https://git.threefold.info/herocode/doctree", branch = "main", package = "doctree", path = "doctree/src" }
|
||||
```
|
||||
|
||||
Basic usage:
|
||||
|
@ -15,7 +15,7 @@ toml = "0.7.3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
redis = { version = "0.23.0", features = ["tokio-comp"] }
|
||||
tokio = { version = "1.28.0", features = ["full"] }
|
||||
sal = { git = "https://git.ourworld.tf/herocode/sal.git" }
|
||||
sal = { git = "https://git.threefold.info/herocode/sal.git" }
|
||||
chacha20poly1305 = "0.10.1"
|
||||
blake3 = "1.3.1"
|
||||
csv = "1.1"
|
||||
|
@ -1,12 +1,11 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use walkdir::WalkDir;
|
||||
use std::fs;
|
||||
|
||||
use crate::error::{DocTreeError, Result};
|
||||
use crate::storage::RedisStorage;
|
||||
use crate::utils::{name_fix, markdown_to_html, ensure_md_extension};
|
||||
use crate::include::process_includes;
|
||||
use rand::Rng;
|
||||
use crate::storage::RedisStorage;
|
||||
use crate::utils::{ensure_md_extension, markdown_to_html, name_fix};
|
||||
use ipfs_api::{IpfsApi, IpfsClient};
|
||||
// use chacha20poly1305::aead::NewAead;
|
||||
|
||||
@ -61,10 +60,16 @@ impl Collection {
|
||||
///
|
||||
/// Ok(()) on success or an error
|
||||
pub fn scan(&self) -> Result<()> {
|
||||
println!("DEBUG: Scanning collection '{}' at path {:?}", self.name, self.path);
|
||||
println!(
|
||||
"DEBUG: Scanning collection '{}' at path {:?}",
|
||||
self.name, self.path
|
||||
);
|
||||
|
||||
// Delete existing collection data if any
|
||||
println!("DEBUG: Deleting existing collection data from Redis key 'collections:{}'", self.name);
|
||||
println!(
|
||||
"DEBUG: Deleting existing collection data from Redis key 'collections:{}'",
|
||||
self.name
|
||||
);
|
||||
self.storage.delete_collection(&self.name)?;
|
||||
// Store the collection's full absolute path in Redis
|
||||
let absolute_path = std::fs::canonicalize(&self.path)
|
||||
@ -72,9 +77,14 @@ impl Collection {
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
println!("DEBUG: Storing collection path in Redis key 'collections:{}:path'", self.name);
|
||||
self.storage.store_collection_path(&self.name, &absolute_path)?;
|
||||
self.storage.store_collection_path(&self.name, &self.path.to_string_lossy())?;
|
||||
println!(
|
||||
"DEBUG: Storing collection path in Redis key 'collections:{}:path'",
|
||||
self.name
|
||||
);
|
||||
self.storage
|
||||
.store_collection_path(&self.name, &absolute_path)?;
|
||||
self.storage
|
||||
.store_collection_path(&self.name, &self.path.to_string_lossy())?;
|
||||
|
||||
// Walk through the directory
|
||||
let walker = WalkDir::new(&self.path);
|
||||
@ -116,11 +126,11 @@ impl Collection {
|
||||
|
||||
// Determine if this is a document (markdown file) or an image
|
||||
let is_markdown = filename.to_lowercase().ends_with(".md");
|
||||
let is_image = filename.to_lowercase().ends_with(".png") ||
|
||||
filename.to_lowercase().ends_with(".jpg") ||
|
||||
filename.to_lowercase().ends_with(".jpeg") ||
|
||||
filename.to_lowercase().ends_with(".gif") ||
|
||||
filename.to_lowercase().ends_with(".svg");
|
||||
let is_image = filename.to_lowercase().ends_with(".png")
|
||||
|| filename.to_lowercase().ends_with(".jpg")
|
||||
|| filename.to_lowercase().ends_with(".jpeg")
|
||||
|| filename.to_lowercase().ends_with(".gif")
|
||||
|| filename.to_lowercase().ends_with(".svg");
|
||||
|
||||
let file_type = if is_markdown {
|
||||
"document"
|
||||
@ -132,13 +142,19 @@ impl Collection {
|
||||
|
||||
// Store in Redis using the namefixed filename as the key
|
||||
// Store the original relative path to preserve case and special characters
|
||||
println!("DEBUG: Storing {} '{}' in Redis key 'collections:{}' with key '{}' and value '{}'",
|
||||
file_type, filename, self.name, namefixed_filename, rel_path.to_string_lossy());
|
||||
println!(
|
||||
"DEBUG: Storing {} '{}' in Redis key 'collections:{}' with key '{}' and value '{}'",
|
||||
file_type,
|
||||
filename,
|
||||
self.name,
|
||||
namefixed_filename,
|
||||
rel_path.to_string_lossy()
|
||||
);
|
||||
|
||||
self.storage.store_collection_entry(
|
||||
&self.name,
|
||||
&namefixed_filename,
|
||||
&rel_path.to_string_lossy()
|
||||
&rel_path.to_string_lossy(),
|
||||
)?;
|
||||
}
|
||||
|
||||
@ -162,7 +178,9 @@ impl Collection {
|
||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||
|
||||
// Get the relative path from Redis
|
||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
let rel_path = self
|
||||
.storage
|
||||
.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
||||
|
||||
// Check if the path is valid
|
||||
@ -171,14 +189,16 @@ impl Collection {
|
||||
// Return an error since the actual file path is not available
|
||||
return Err(DocTreeError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("File path not available for {} in collection {}", page_name, self.name)
|
||||
format!(
|
||||
"File path not available for {} in collection {}",
|
||||
page_name, self.name
|
||||
),
|
||||
)));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let full_path = self.path.join(rel_path);
|
||||
let content = fs::read_to_string(full_path)
|
||||
.map_err(|e| DocTreeError::IoError(e))?;
|
||||
let content = fs::read_to_string(full_path).map_err(|e| DocTreeError::IoError(e))?;
|
||||
|
||||
// Skip include processing at this level to avoid infinite recursion
|
||||
// Include processing will be done at the higher level
|
||||
@ -215,7 +235,11 @@ impl Collection {
|
||||
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
||||
|
||||
// Update Redis
|
||||
self.storage.store_collection_entry(&self.name, &namefixed_page_name, &namefixed_page_name)?;
|
||||
self.storage.store_collection_entry(
|
||||
&self.name,
|
||||
&namefixed_page_name,
|
||||
&namefixed_page_name,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -237,7 +261,9 @@ impl Collection {
|
||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||
|
||||
// Get the relative path from Redis
|
||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
let rel_path = self
|
||||
.storage
|
||||
.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))?;
|
||||
|
||||
// Delete the file
|
||||
@ -245,7 +271,8 @@ impl Collection {
|
||||
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
||||
|
||||
// Remove from Redis
|
||||
self.storage.delete_collection_entry(&self.name, &namefixed_page_name)?;
|
||||
self.storage
|
||||
.delete_collection_entry(&self.name, &namefixed_page_name)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -260,7 +287,8 @@ impl Collection {
|
||||
let keys = self.storage.list_collection_entries(&self.name)?;
|
||||
|
||||
// Filter to only include .md files
|
||||
let pages = keys.into_iter()
|
||||
let pages = keys
|
||||
.into_iter()
|
||||
.filter(|key| key.ends_with(".md"))
|
||||
.collect();
|
||||
|
||||
@ -281,7 +309,9 @@ impl Collection {
|
||||
let namefixed_file_name = name_fix(file_name);
|
||||
|
||||
// Get the relative path from Redis
|
||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
||||
let rel_path = self
|
||||
.storage
|
||||
.get_collection_entry(&self.name, &namefixed_file_name)
|
||||
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
||||
|
||||
// Construct a URL for the file
|
||||
@ -316,7 +346,11 @@ impl Collection {
|
||||
fs::write(&full_path, content).map_err(DocTreeError::IoError)?;
|
||||
|
||||
// Update Redis
|
||||
self.storage.store_collection_entry(&self.name, &namefixed_file_name, &namefixed_file_name)?;
|
||||
self.storage.store_collection_entry(
|
||||
&self.name,
|
||||
&namefixed_file_name,
|
||||
&namefixed_file_name,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -335,7 +369,9 @@ impl Collection {
|
||||
let namefixed_file_name = name_fix(file_name);
|
||||
|
||||
// Get the relative path from Redis
|
||||
let rel_path = self.storage.get_collection_entry(&self.name, &namefixed_file_name)
|
||||
let rel_path = self
|
||||
.storage
|
||||
.get_collection_entry(&self.name, &namefixed_file_name)
|
||||
.map_err(|_| DocTreeError::FileNotFound(file_name.to_string()))?;
|
||||
|
||||
// Delete the file
|
||||
@ -343,7 +379,8 @@ impl Collection {
|
||||
fs::remove_file(full_path).map_err(DocTreeError::IoError)?;
|
||||
|
||||
// Remove from Redis
|
||||
self.storage.delete_collection_entry(&self.name, &namefixed_file_name)?;
|
||||
self.storage
|
||||
.delete_collection_entry(&self.name, &namefixed_file_name)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -358,7 +395,8 @@ impl Collection {
|
||||
let keys = self.storage.list_collection_entries(&self.name)?;
|
||||
|
||||
// Filter to exclude .md files
|
||||
let files = keys.into_iter()
|
||||
let files = keys
|
||||
.into_iter()
|
||||
.filter(|key| !key.ends_with(".md"))
|
||||
.collect();
|
||||
|
||||
@ -382,7 +420,8 @@ impl Collection {
|
||||
let namefixed_page_name = ensure_md_extension(&namefixed_page_name);
|
||||
|
||||
// Get the relative path from Redis
|
||||
self.storage.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
self.storage
|
||||
.get_collection_entry(&self.name, &namefixed_page_name)
|
||||
.map_err(|_| DocTreeError::PageNotFound(page_name.to_string()))
|
||||
}
|
||||
|
||||
@ -396,7 +435,11 @@ impl Collection {
|
||||
/// # Returns
|
||||
///
|
||||
/// The HTML content of the page or an error
|
||||
pub fn page_get_html(&self, page_name: &str, doctree: Option<&crate::doctree::DocTree>) -> Result<String> {
|
||||
pub fn page_get_html(
|
||||
&self,
|
||||
page_name: &str,
|
||||
doctree: Option<&crate::doctree::DocTree>,
|
||||
) -> Result<String> {
|
||||
// Get the markdown content
|
||||
let markdown = self.page_get(page_name)?;
|
||||
|
||||
@ -436,9 +479,8 @@ impl Collection {
|
||||
/// Ok(()) on success or an error.
|
||||
pub fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
|
||||
// Create a new tokio runtime and block on the async export function
|
||||
tokio::runtime::Runtime::new()?.block_on(async {
|
||||
self.export_to_ipfs_async(output_csv_path).await
|
||||
})?;
|
||||
tokio::runtime::Runtime::new()?
|
||||
.block_on(async { self.export_to_ipfs_async(output_csv_path).await })?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -455,25 +497,31 @@ impl Collection {
|
||||
pub async fn export_to_ipfs_async(&self, output_csv_path: &Path) -> Result<()> {
|
||||
use blake3::Hasher;
|
||||
// use chacha20poly1305::{ChaCha20Poly1305, Aead};
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
use csv::Writer;
|
||||
use ipfs_api::IpfsClient;
|
||||
use rand::rngs::OsRng;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use csv::Writer;
|
||||
use rand::rngs::OsRng;
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
|
||||
|
||||
// Create the output directory if it doesn't exist
|
||||
// Create the output directory if it doesn't exist
|
||||
if let Some(parent) = output_csv_path.parent() {
|
||||
if parent.exists() && parent.is_file() {
|
||||
println!("DEBUG: Removing conflicting file at output directory path: {:?}", parent);
|
||||
tokio::fs::remove_file(parent).await.map_err(DocTreeError::IoError)?;
|
||||
println!(
|
||||
"DEBUG: Removing conflicting file at output directory path: {:?}",
|
||||
parent
|
||||
);
|
||||
tokio::fs::remove_file(parent)
|
||||
.await
|
||||
.map_err(DocTreeError::IoError)?;
|
||||
println!("DEBUG: Conflicting file removed.");
|
||||
}
|
||||
if !parent.is_dir() {
|
||||
println!("DEBUG: Ensuring output directory exists: {:?}", parent);
|
||||
tokio::fs::create_dir_all(parent).await.map_err(DocTreeError::IoError)?;
|
||||
tokio::fs::create_dir_all(parent)
|
||||
.await
|
||||
.map_err(DocTreeError::IoError)?;
|
||||
println!("DEBUG: Output directory ensured.");
|
||||
} else {
|
||||
println!("DEBUG: Output directory already exists: {:?}", parent);
|
||||
@ -481,7 +529,10 @@ impl Collection {
|
||||
}
|
||||
|
||||
// Create the CSV writer
|
||||
println!("DEBUG: Creating or overwriting CSV file at {:?}", output_csv_path);
|
||||
println!(
|
||||
"DEBUG: Creating or overwriting CSV file at {:?}",
|
||||
output_csv_path
|
||||
);
|
||||
let file = std::fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
@ -492,7 +543,15 @@ impl Collection {
|
||||
println!("DEBUG: CSV writer created successfully");
|
||||
|
||||
// Write the CSV header
|
||||
writer.write_record(&["collectionname", "filename", "blakehash", "ipfshash", "size"]).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
writer
|
||||
.write_record(&[
|
||||
"collectionname",
|
||||
"filename",
|
||||
"blakehash",
|
||||
"ipfshash",
|
||||
"size",
|
||||
])
|
||||
.map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
|
||||
// Connect to IPFS
|
||||
// let ipfs = IpfsClient::new("127.0.0.1:5001").await.map_err(|e| DocTreeError::IpfsError(e.to_string()))?;
|
||||
@ -510,7 +569,9 @@ impl Collection {
|
||||
for entry_name in entries {
|
||||
println!("DEBUG: Processing entry: {}", entry_name);
|
||||
// Get the relative path from Redis
|
||||
let relative_path = self.storage.get_collection_entry(&self.name, &entry_name)
|
||||
let relative_path = self
|
||||
.storage
|
||||
.get_collection_entry(&self.name, &entry_name)
|
||||
.map_err(|_| DocTreeError::FileNotFound(entry_name.clone()))?;
|
||||
println!("DEBUG: Retrieved relative path: {}", relative_path);
|
||||
|
||||
@ -560,9 +621,12 @@ impl Collection {
|
||||
println!("DEBUG: Adding file to IPFS: {:?}", file_path);
|
||||
let ipfs_path = match ipfs.add(std::io::Cursor::new(content)).await {
|
||||
Ok(path) => {
|
||||
println!("DEBUG: Successfully added file to IPFS. Hash: {}", path.hash);
|
||||
println!(
|
||||
"DEBUG: Successfully added file to IPFS. Hash: {}",
|
||||
path.hash
|
||||
);
|
||||
path
|
||||
},
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error adding file to IPFS {:?}: {}", file_path, e);
|
||||
continue;
|
||||
@ -588,7 +652,9 @@ impl Collection {
|
||||
|
||||
// Flush the CSV writer
|
||||
println!("DEBUG: Flushing CSV writer");
|
||||
writer.flush().map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
writer
|
||||
.flush()
|
||||
.map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
println!("DEBUG: CSV writer flushed successfully");
|
||||
|
||||
Ok(())
|
||||
@ -616,9 +682,9 @@ impl CollectionBuilder {
|
||||
///
|
||||
/// A new Collection or an error
|
||||
pub fn build(self) -> Result<Collection> {
|
||||
let storage = self.storage.ok_or_else(|| {
|
||||
DocTreeError::MissingParameter("storage".to_string())
|
||||
})?;
|
||||
let storage = self
|
||||
.storage
|
||||
.ok_or_else(|| DocTreeError::MissingParameter("storage".to_string()))?;
|
||||
|
||||
let collection = Collection {
|
||||
path: self.path,
|
||||
@ -628,4 +694,4 @@ impl CollectionBuilder {
|
||||
|
||||
Ok(collection)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,23 +2,23 @@
|
||||
{
|
||||
name: docs_hero
|
||||
#existing docusaurus site can be used as collection as long as no duplicates
|
||||
url: https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/aibox/docs
|
||||
url: https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/aibox/docs
|
||||
description: Documentation for the ThreeFold Hero project.
|
||||
}
|
||||
|
||||
{
|
||||
name: biz
|
||||
url: https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections/aaa
|
||||
url: https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections/aaa
|
||||
description: Business documentation.
|
||||
}
|
||||
|
||||
{
|
||||
name: products
|
||||
url: https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections/vvv
|
||||
url: https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections/vvv
|
||||
description: Information about ThreeFold products.
|
||||
}
|
||||
{
|
||||
scan: true
|
||||
url: https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections
|
||||
url: https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/aibox/collections
|
||||
}
|
||||
]
|
||||
|
173
impl_plan.md
Normal file
173
impl_plan.md
Normal file
@ -0,0 +1,173 @@
|
||||
# DocTree WebBuilder Implementation Plan
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the implementation plan for the WebBuilder component of the DocTree project. The WebBuilder is designed to process hjson configuration files (like those in `examples/doctreenew/sites/demo1/`) and generate a `webmeta.json` file that can be used by a browser-based website generator.
|
||||
|
||||
## Current Status
|
||||
|
||||
### What's Implemented:
|
||||
|
||||
1. **DocTree Core Functionality**:
|
||||
- The main DocTree library with functionality for scanning directories, managing collections, processing includes, and converting markdown to HTML
|
||||
- Redis storage backend for storing document metadata
|
||||
- Command-line interface (doctreecmd) for interacting with collections
|
||||
|
||||
2. **Example Structure for the New Approach**:
|
||||
- Example hjson configuration files in `examples/doctreenew/sites/demo1/`
|
||||
- This includes `main.hjson`, `header.hjson`, `footer.hjson`, `collection.hjson`, and `pages/mypages1.hjson`
|
||||
|
||||
3. **Specification Document**:
|
||||
- Detailed specification in `webbuilder/src/builder/specs.md`
|
||||
- Example output format in `webbuilder/src/builder/webmeta.json`
|
||||
|
||||
### What's Not Yet Implemented:
|
||||
|
||||
1. **WebBuilder Implementation**:
|
||||
- The actual Rust code for the webbuilder component
|
||||
|
||||
2. **Hjson Parsing**:
|
||||
- Code to parse the hjson files in the doctreenew directory
|
||||
|
||||
3. **Git Repository Integration**:
|
||||
- Functionality to download referenced collections from Git repositories
|
||||
|
||||
4. **IPFS Export**:
|
||||
- Complete functionality to export assets to IPFS
|
||||
|
||||
5. **Browser-Based Generator**:
|
||||
- The browser-based website generator that would use the webmeta.json file
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: Core WebBuilder Implementation (2-3 weeks)
|
||||
|
||||
1. **Setup Project Structure**:
|
||||
- Create necessary modules and files in `webbuilder/src/`
|
||||
- Define main data structures and traits
|
||||
|
||||
2. **Implement Hjson Parsing**:
|
||||
- Add hjson crate dependency
|
||||
- Create parsers for each hjson file type (main, header, footer, collection, pages)
|
||||
- Implement validation for hjson files
|
||||
|
||||
3. **Implement Site Structure Builder**:
|
||||
- Create a module to combine parsed hjson data into a cohesive site structure
|
||||
- Implement navigation generation based on page definitions
|
||||
|
||||
4. **Implement WebMeta Generator**:
|
||||
- Create functionality to generate the webmeta.json file
|
||||
- Ensure all required metadata is included
|
||||
|
||||
### Phase 2: Git Integration and Collection Processing (2 weeks)
|
||||
|
||||
1. **Implement Git Repository Integration**:
|
||||
- Add git2 crate dependency
|
||||
- Create functionality to clone/pull repositories based on collection.hjson
|
||||
- Implement caching to avoid unnecessary downloads
|
||||
|
||||
2. **Integrate with DocTree Library**:
|
||||
- Create an adapter to use DocTree functionality with hjson-defined collections
|
||||
- Implement processing of includes between documents
|
||||
|
||||
3. **Implement Content Processing**:
|
||||
- Create functionality to process markdown content
|
||||
- Handle special directives or custom syntax
|
||||
|
||||
### Phase 3: IPFS Integration (2 weeks)
|
||||
|
||||
1. **Enhance IPFS Integration**:
|
||||
- Complete the IPFS export functionality in DocTree
|
||||
- Create a module to handle IPFS uploads
|
||||
|
||||
2. **Implement Asset Management**:
|
||||
- Create functionality to track and process assets (images, CSS, etc.)
|
||||
- Ensure proper IPFS linking
|
||||
|
||||
3. **Implement Content Hashing**:
|
||||
- Add Blake hash calculation for content integrity verification
|
||||
- Store hashes in webmeta.json
|
||||
|
||||
### Phase 4: CLI and Testing (1-2 weeks)
|
||||
|
||||
1. **Implement Command-Line Interface**:
|
||||
- Create a CLI for the webbuilder
|
||||
- Add commands for building, validating, and deploying sites
|
||||
|
||||
2. **Write Comprehensive Tests**:
|
||||
- Unit tests for each component
|
||||
- Integration tests for the full workflow
|
||||
- Test with example sites
|
||||
|
||||
3. **Documentation**:
|
||||
- Update README with usage instructions
|
||||
- Create detailed API documentation
|
||||
- Add examples and tutorials
|
||||
|
||||
### Phase 5: Browser-Based Generator (Optional, 3-4 weeks)
|
||||
|
||||
1. **Design Browser Component**:
|
||||
- Create a JavaScript/TypeScript library to consume webmeta.json
|
||||
- Design component architecture
|
||||
|
||||
2. **Implement Content Rendering**:
|
||||
- Create components to render markdown content
|
||||
- Implement navigation and site structure
|
||||
|
||||
3. **Implement IPFS Integration**:
|
||||
- Add functionality to fetch content from IPFS
|
||||
- Implement content verification using Blake hashes
|
||||
|
||||
4. **Create Demo Site**:
|
||||
- Build a demo site using the browser-based generator
|
||||
- Showcase features and capabilities
|
||||
|
||||
## Technical Details
|
||||
|
||||
### Key Dependencies
|
||||
|
||||
- **hjson**: For parsing hjson configuration files
|
||||
- **git2**: For Git repository integration
|
||||
- **ipfs-api**: For IPFS integration
|
||||
- **blake3**: For content hashing
|
||||
- **clap**: For command-line interface
|
||||
- **tokio**: For async operations
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. Parse hjson files from input directory
|
||||
2. Download referenced Git repositories
|
||||
3. Process content with DocTree
|
||||
4. Export assets to IPFS
|
||||
5. Generate webmeta.json
|
||||
6. (Optional) Upload webmeta.json to IPFS
|
||||
|
||||
### Key Challenges
|
||||
|
||||
1. **Git Integration**: Handling authentication, rate limits, and large repositories
|
||||
2. **IPFS Performance**: Optimizing IPFS uploads for large sites
|
||||
3. **Content Processing**: Ensuring proper handling of includes and special syntax
|
||||
4. **Browser Compatibility**: Ensuring the browser-based generator works across different browsers
|
||||
|
||||
## Milestones and Timeline
|
||||
|
||||
1. **Core WebBuilder Implementation**: Weeks 1-3
|
||||
2. **Git Integration and Collection Processing**: Weeks 4-5
|
||||
3. **IPFS Integration**: Weeks 6-7
|
||||
4. **CLI and Testing**: Weeks 8-9
|
||||
5. **Browser-Based Generator (Optional)**: Weeks 10-13
|
||||
|
||||
## Resources Required
|
||||
|
||||
1. **Development Resources**:
|
||||
- 1-2 Rust developers
|
||||
- 1 Frontend developer (for browser-based generator)
|
||||
|
||||
2. **Infrastructure**:
|
||||
- IPFS node for testing
|
||||
- Git repositories for testing
|
||||
- CI/CD pipeline
|
||||
|
||||
## Conclusion
|
||||
|
||||
This implementation plan provides a roadmap for developing the WebBuilder component of the DocTree project. By following this plan, we can transform the current specification and example files into a fully functional system for generating websites from hjson configuration files and markdown content.
|
@ -1,24 +1,58 @@
|
||||
[package]
|
||||
name = "doctree"
|
||||
name = "webbuilder"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
description = "A tool for building websites from hjson configuration files and markdown content"
|
||||
authors = ["DocTree Team"]
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "webbuilder"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
# Core dependencies
|
||||
doctree = { path = "../doctree" }
|
||||
walkdir = "2.3.3"
|
||||
pulldown-cmark = "0.9.3"
|
||||
thiserror = "1.0.40"
|
||||
lazy_static = "1.4.0"
|
||||
toml = "0.7.3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
redis = { version = "0.23.0", features = ["tokio-comp"] }
|
||||
tokio = { version = "1.28.0", features = ["full"] }
|
||||
sal = { git = "https://git.ourworld.tf/herocode/sal.git" }
|
||||
chacha20poly1305 = "0.10.1"
|
||||
blake3 = "1.3.1"
|
||||
csv = "1.1"
|
||||
rand = "0.9.1"
|
||||
sal = { git = "https://git.threefold.info/herocode/sal.git" }
|
||||
|
||||
# Hjson parsing
|
||||
deser-hjson = "1.1.0"
|
||||
|
||||
# Git integration is provided by the SAL library
|
||||
|
||||
# IPFS integration
|
||||
ipfs-api-backend-hyper = "0.6"
|
||||
ipfs-api = { version = "0.17.0", default-features = false, features = ["with-hyper-tls"] }
|
||||
|
||||
# Hashing and encryption
|
||||
chacha20poly1305 = "0.10.1"
|
||||
blake3 = "1.3.1"
|
||||
|
||||
# CLI
|
||||
clap = { version = "4.3.0", features = ["derive"] }
|
||||
|
||||
# Utilities
|
||||
anyhow = "1.0.71"
|
||||
log = "0.4.17"
|
||||
env_logger = "0.10.0"
|
||||
csv = "1.1"
|
||||
rand = "0.9.1"
|
||||
url = "2.3.1"
|
||||
|
||||
[dev-dependencies]
|
||||
# Testing
|
||||
tempfile = "3.5.0"
|
||||
mockall = "0.11.4"
|
||||
assert_fs = "1.0.10"
|
||||
predicates = "3.0.3"
|
||||
|
128
webbuilder/README.md
Normal file
128
webbuilder/README.md
Normal file
@ -0,0 +1,128 @@
|
||||
# WebBuilder
|
||||
|
||||
WebBuilder is a library for building websites from configuration files and markdown content. It uses the DocTree library to process markdown content and includes, and exports the result to a webmeta.json file that can be used by a browser-based website generator.
|
||||
|
||||
## Overview
|
||||
|
||||
WebBuilder scans directories for configuration files (in hjson format) and generates a `webmeta.json` file that can be used by a browser-based website generator. It can also clone Git repositories, process markdown content, and upload files to IPFS.
|
||||
|
||||
## Parsing Configuration Files
|
||||
|
||||
WebBuilder supports multiple parsing strategies for configuration files:
|
||||
|
||||
### Unified Parser
|
||||
|
||||
The recommended way to parse configuration files is to use the unified parser, which provides a consistent interface for all parsing strategies:
|
||||
|
||||
```rust
|
||||
use webbuilder::{from_directory_with_strategy, ParsingStrategy};
|
||||
|
||||
// Use the recommended strategy (Hjson)
|
||||
let webbuilder = from_directory_with_strategy("path/to/config", ParsingStrategy::Hjson)?;
|
||||
|
||||
// Or use the auto-detect strategy
|
||||
let webbuilder = from_directory_with_strategy("path/to/config", ParsingStrategy::Auto)?;
|
||||
|
||||
// Or use the simple strategy (legacy)
|
||||
let webbuilder = from_directory_with_strategy("path/to/config", ParsingStrategy::Simple)?;
|
||||
```
|
||||
|
||||
You can also use the convenience functions:
|
||||
|
||||
```rust
|
||||
use webbuilder::{from_directory, parse_site_config_recommended, parse_site_config_auto};
|
||||
|
||||
// Use the recommended strategy (Hjson)
|
||||
let webbuilder = from_directory("path/to/config")?;
|
||||
|
||||
// Or parse the site configuration directly
|
||||
let site_config = parse_site_config_recommended("path/to/config")?;
|
||||
let site_config = parse_site_config_auto("path/to/config")?;
|
||||
```
|
||||
|
||||
### Parsing Strategies
|
||||
|
||||
WebBuilder supports the following parsing strategies:
|
||||
|
||||
- **Hjson**: Uses the `deser-hjson` library to parse hjson files. This is the recommended strategy.
|
||||
- **Simple**: Uses a simple line-by-line parser that doesn't rely on external libraries. This is a legacy strategy.
|
||||
- **Auto**: Tries the Hjson parser first, and falls back to the simple parser if it fails.
|
||||
|
||||
## Building a Website
|
||||
|
||||
Once you have a WebBuilder instance, you can build a website:
|
||||
|
||||
```rust
|
||||
use webbuilder::from_directory;
|
||||
|
||||
// Create a WebBuilder instance
|
||||
let webbuilder = from_directory("path/to/config")?;
|
||||
|
||||
// Build the website
|
||||
let webmeta = webbuilder.build()?;
|
||||
|
||||
// Save the webmeta.json file
|
||||
webmeta.save("webmeta.json")?;
|
||||
|
||||
// Upload the webmeta.json file to IPFS
|
||||
let ipfs_hash = webbuilder.upload_to_ipfs("webmeta.json")?;
|
||||
println!("Uploaded to IPFS: {}", ipfs_hash);
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
WebBuilder expects the following configuration files:
|
||||
|
||||
- `main.hjson`: Main configuration file with site metadata
|
||||
- `header.hjson`: Header configuration
|
||||
- `footer.hjson`: Footer configuration
|
||||
- `collection.hjson`: Collection configuration (Git repositories)
|
||||
- `pages/*.hjson`: Page configuration files
|
||||
|
||||
Example `main.hjson`:
|
||||
|
||||
```hjson
|
||||
{
|
||||
"name": "my-site",
|
||||
"title": "My Site",
|
||||
"description": "My awesome site",
|
||||
"url": "https://example.com",
|
||||
"favicon": "favicon.ico",
|
||||
"keywords": [
|
||||
"website",
|
||||
"awesome"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Example `collection.hjson`:
|
||||
|
||||
```hjson
|
||||
[
|
||||
{
|
||||
"name": "docs",
|
||||
"url": "https://github.com/example/docs.git",
|
||||
"description": "Documentation",
|
||||
"scan": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Example `pages/pages.hjson`:
|
||||
|
||||
```hjson
|
||||
[
|
||||
{
|
||||
"name": "home",
|
||||
"title": "Home",
|
||||
"description": "Home page",
|
||||
"navpath": "/",
|
||||
"collection": "docs",
|
||||
"draft": false
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
324
webbuilder/src/builder/mod.rs
Normal file
324
webbuilder/src/builder/mod.rs
Normal file
@ -0,0 +1,324 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::config::SiteConfig;
|
||||
use crate::error::Result;
|
||||
use crate::parser;
|
||||
|
||||
#[cfg(test)]
|
||||
mod mod_test;
|
||||
|
||||
/// WebMeta represents the output of the WebBuilder
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WebMeta {
|
||||
/// Site metadata
|
||||
pub site_metadata: SiteMetadata,
|
||||
|
||||
/// Pages
|
||||
pub pages: Vec<PageMeta>,
|
||||
|
||||
/// Assets
|
||||
pub assets: std::collections::HashMap<String, AssetMeta>,
|
||||
}
|
||||
|
||||
/// Site metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SiteMetadata {
|
||||
/// Site name
|
||||
pub name: String,
|
||||
|
||||
/// Site title
|
||||
pub title: String,
|
||||
|
||||
/// Site description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Site keywords
|
||||
pub keywords: Option<Vec<String>>,
|
||||
|
||||
/// Site header
|
||||
pub header: Option<serde_json::Value>,
|
||||
|
||||
/// Site footer
|
||||
pub footer: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Page metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PageMeta {
|
||||
/// Page ID
|
||||
pub id: String,
|
||||
|
||||
/// Page title
|
||||
pub title: String,
|
||||
|
||||
/// IPFS key of the page content
|
||||
pub ipfs_key: String,
|
||||
|
||||
/// Blake hash of the page content
|
||||
pub blakehash: String,
|
||||
|
||||
/// Page sections
|
||||
pub sections: Vec<SectionMeta>,
|
||||
|
||||
/// Page assets
|
||||
pub assets: Vec<AssetMeta>,
|
||||
}
|
||||
|
||||
/// Section metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SectionMeta {
|
||||
/// Section type
|
||||
#[serde(rename = "type")]
|
||||
pub section_type: String,
|
||||
|
||||
/// Section content
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
/// Asset metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AssetMeta {
|
||||
/// Asset name
|
||||
pub name: String,
|
||||
|
||||
/// IPFS key of the asset
|
||||
pub ipfs_key: String,
|
||||
}
|
||||
|
||||
impl WebMeta {
|
||||
/// Save the WebMeta to a file
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to save the file to
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Ok(()) on success or an error
|
||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
let json = serde_json::to_string_pretty(self)?;
|
||||
fs::write(path, json)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// WebBuilder is responsible for building a website from hjson configuration files
|
||||
#[derive(Debug)]
|
||||
pub struct WebBuilder {
|
||||
/// Site configuration
|
||||
pub config: SiteConfig,
|
||||
}
|
||||
|
||||
impl WebBuilder {
|
||||
/// Create a new WebBuilder instance from a directory containing hjson configuration files
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing hjson configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new WebBuilder instance or an error
|
||||
pub fn from_directory<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let config = parser::parse_site_config_recommended(path)?;
|
||||
Ok(WebBuilder { config })
|
||||
}
|
||||
|
||||
/// Build the website
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A WebMeta instance or an error
|
||||
pub fn build(&self) -> Result<WebMeta> {
|
||||
// Create site metadata
|
||||
let site_metadata = SiteMetadata {
|
||||
name: self.config.name.clone(),
|
||||
title: self.config.title.clone(),
|
||||
description: self.config.description.clone(),
|
||||
keywords: self.config.keywords.clone(),
|
||||
header: self
|
||||
.config
|
||||
.header
|
||||
.as_ref()
|
||||
.map(|h| serde_json::to_value(h).unwrap_or_default()),
|
||||
footer: self
|
||||
.config
|
||||
.footer
|
||||
.as_ref()
|
||||
.map(|f| serde_json::to_value(f).unwrap_or_default()),
|
||||
};
|
||||
|
||||
// Process collections
|
||||
let mut pages = Vec::new();
|
||||
let assets = std::collections::HashMap::new();
|
||||
|
||||
// Process collections from Git repositories
|
||||
for collection in &self.config.collections {
|
||||
if let Some(url) = &collection.url {
|
||||
// Extract repository name from URL
|
||||
let repo_name = collection.name.clone().unwrap_or_else(|| {
|
||||
url.split('/')
|
||||
.last()
|
||||
.unwrap_or("repo")
|
||||
.trim_end_matches(".git")
|
||||
.to_string()
|
||||
});
|
||||
|
||||
// Clone or pull the Git repository
|
||||
let repo_path = self.config.base_path.join("repos").join(&repo_name);
|
||||
|
||||
// Create the repos directory if it doesn't exist
|
||||
if !repo_path.parent().unwrap().exists() {
|
||||
fs::create_dir_all(repo_path.parent().unwrap())?;
|
||||
}
|
||||
|
||||
// Clone or pull the repository
|
||||
let repo_path = match crate::git::clone_or_pull(url, &repo_path) {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
// Log the error but continue with a placeholder
|
||||
log::warn!("Failed to clone repository {}: {}", url, e);
|
||||
|
||||
// Create a placeholder page for the failed repository
|
||||
let page_id = format!("{}-index", repo_name);
|
||||
let page = PageMeta {
|
||||
id: page_id.clone(),
|
||||
title: format!("{} Index", repo_name),
|
||||
ipfs_key: "QmPlaceholderIpfsKey".to_string(),
|
||||
blakehash: "blake3-placeholder".to_string(),
|
||||
sections: vec![SectionMeta {
|
||||
section_type: "markdown".to_string(),
|
||||
content: format!(
|
||||
"# {} Index\n\nFailed to clone repository: {}\nURL: {}",
|
||||
repo_name, e, url
|
||||
),
|
||||
}],
|
||||
assets: Vec::new(),
|
||||
};
|
||||
|
||||
pages.push(page);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Create a page for the repository
|
||||
let page_id = format!("{}-index", repo_name);
|
||||
let page = PageMeta {
|
||||
id: page_id.clone(),
|
||||
title: format!("{} Index", repo_name),
|
||||
ipfs_key: "QmPlaceholderIpfsKey".to_string(), // Will be replaced with actual IPFS key
|
||||
blakehash: "blake3-placeholder".to_string(), // Will be replaced with actual Blake hash
|
||||
sections: vec![SectionMeta {
|
||||
section_type: "markdown".to_string(),
|
||||
content: format!(
|
||||
"# {} Index\n\nRepository cloned successfully.\nPath: {}\nURL: {}",
|
||||
repo_name, repo_path.display(), url
|
||||
),
|
||||
}],
|
||||
assets: Vec::new(),
|
||||
};
|
||||
|
||||
pages.push(page);
|
||||
}
|
||||
}
|
||||
|
||||
// Process pages from the configuration
|
||||
for page_config in &self.config.pages {
|
||||
// Skip draft pages unless explicitly set to false
|
||||
if page_config.draft.unwrap_or(false) {
|
||||
log::info!("Skipping draft page: {}", page_config.name);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Generate a unique page ID
|
||||
let page_id = format!("page-{}", page_config.name);
|
||||
|
||||
// Find the collection for this page
|
||||
let collection_path = self.config.collections.iter()
|
||||
.find(|c| c.name.as_ref().map_or(false, |name| name == &page_config.collection))
|
||||
.and_then(|c| c.url.as_ref())
|
||||
.map(|url| {
|
||||
let repo_name = url.split('/')
|
||||
.last()
|
||||
.unwrap_or("repo")
|
||||
.trim_end_matches(".git")
|
||||
.to_string();
|
||||
self.config.base_path.join("repos").join(&repo_name)
|
||||
});
|
||||
|
||||
// Create the page content
|
||||
let content = if let Some(collection_path) = collection_path {
|
||||
// Try to find the page content in the collection
|
||||
let page_path = collection_path.join(&page_config.name).with_extension("md");
|
||||
if page_path.exists() {
|
||||
match fs::read_to_string(&page_path) {
|
||||
Ok(content) => content,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to read page content from {}: {}", page_path.display(), e);
|
||||
format!(
|
||||
"# {}\n\n{}\n\n*Failed to read page content from {}*",
|
||||
page_config.title,
|
||||
page_config.description.clone().unwrap_or_default(),
|
||||
page_path.display()
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
format!(
|
||||
"# {}\n\n{}\n\n*Page content not found at {}*",
|
||||
page_config.title,
|
||||
page_config.description.clone().unwrap_or_default(),
|
||||
page_path.display()
|
||||
)
|
||||
}
|
||||
} else {
|
||||
format!(
|
||||
"# {}\n\n{}",
|
||||
page_config.title,
|
||||
page_config.description.clone().unwrap_or_default()
|
||||
)
|
||||
};
|
||||
|
||||
// Calculate the Blake hash of the content
|
||||
let content_bytes = content.as_bytes();
|
||||
let blakehash = format!("blake3-{}", blake3::hash(content_bytes).to_hex());
|
||||
|
||||
// Create the page metadata
|
||||
let page = PageMeta {
|
||||
id: page_id.clone(),
|
||||
title: page_config.title.clone(),
|
||||
ipfs_key: "QmPlaceholderIpfsKey".to_string(), // Will be replaced with actual IPFS key
|
||||
blakehash,
|
||||
sections: vec![SectionMeta {
|
||||
section_type: "markdown".to_string(),
|
||||
content,
|
||||
}],
|
||||
assets: Vec::new(),
|
||||
};
|
||||
|
||||
pages.push(page);
|
||||
}
|
||||
|
||||
// Create the WebMeta
|
||||
Ok(WebMeta {
|
||||
site_metadata,
|
||||
pages,
|
||||
assets,
|
||||
})
|
||||
}
|
||||
|
||||
/// Upload a file to IPFS
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the file to upload
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The IPFS hash of the file or an error
|
||||
pub fn upload_to_ipfs<P: AsRef<Path>>(&self, path: P) -> Result<String> {
|
||||
crate::ipfs::upload_file(path)
|
||||
}
|
||||
}
|
200
webbuilder/src/builder/mod_test.rs
Normal file
200
webbuilder/src/builder/mod_test.rs
Normal file
@ -0,0 +1,200 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::builder::{PageMeta, SectionMeta, SiteMetadata, WebMeta};
|
||||
use crate::config::{CollectionConfig, PageConfig, SiteConfig};
|
||||
use crate::error::WebBuilderError;
|
||||
use crate::WebBuilder;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_config() -> SiteConfig {
|
||||
SiteConfig {
|
||||
name: "test".to_string(),
|
||||
title: "Test Site".to_string(),
|
||||
description: Some("A test site".to_string()),
|
||||
keywords: Some(vec!["test".to_string(), "site".to_string()]),
|
||||
url: Some("https://example.com".to_string()),
|
||||
favicon: Some("favicon.ico".to_string()),
|
||||
header: None,
|
||||
footer: None,
|
||||
collections: vec![CollectionConfig {
|
||||
name: Some("test".to_string()),
|
||||
url: Some("https://git.threefold.info/tfgrid/home.git".to_string()),
|
||||
description: Some("A test collection".to_string()),
|
||||
scan: Some(true),
|
||||
}],
|
||||
pages: vec![PageConfig {
|
||||
name: "home".to_string(),
|
||||
title: "Home".to_string(),
|
||||
description: Some("Home page".to_string()),
|
||||
navpath: "/".to_string(),
|
||||
collection: "test".to_string(),
|
||||
draft: Some(false),
|
||||
}],
|
||||
base_path: PathBuf::from("/path/to/site"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_webmeta_save() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let output_path = temp_dir.path().join("webmeta.json");
|
||||
|
||||
let webmeta = WebMeta {
|
||||
site_metadata: SiteMetadata {
|
||||
name: "test".to_string(),
|
||||
title: "Test Site".to_string(),
|
||||
description: Some("A test site".to_string()),
|
||||
keywords: Some(vec!["test".to_string(), "site".to_string()]),
|
||||
header: None,
|
||||
footer: None,
|
||||
},
|
||||
pages: vec![PageMeta {
|
||||
id: "page-1".to_string(),
|
||||
title: "Page 1".to_string(),
|
||||
ipfs_key: "QmTest1".to_string(),
|
||||
blakehash: "blake3-test1".to_string(),
|
||||
sections: vec![SectionMeta {
|
||||
section_type: "markdown".to_string(),
|
||||
content: "# Page 1\n\nThis is page 1.".to_string(),
|
||||
}],
|
||||
assets: vec![],
|
||||
}],
|
||||
assets: std::collections::HashMap::new(),
|
||||
};
|
||||
|
||||
// Save the webmeta.json file
|
||||
webmeta.save(&output_path).unwrap();
|
||||
|
||||
// Check that the file exists
|
||||
assert!(output_path.exists());
|
||||
|
||||
// Read the file and parse it
|
||||
let content = fs::read_to_string(&output_path).unwrap();
|
||||
let parsed: WebMeta = serde_json::from_str(&content).unwrap();
|
||||
|
||||
// Check that the parsed webmeta matches the original
|
||||
assert_eq!(parsed.site_metadata.name, webmeta.site_metadata.name);
|
||||
assert_eq!(parsed.site_metadata.title, webmeta.site_metadata.title);
|
||||
assert_eq!(
|
||||
parsed.site_metadata.description,
|
||||
webmeta.site_metadata.description
|
||||
);
|
||||
assert_eq!(
|
||||
parsed.site_metadata.keywords,
|
||||
webmeta.site_metadata.keywords
|
||||
);
|
||||
assert_eq!(parsed.pages.len(), webmeta.pages.len());
|
||||
assert_eq!(parsed.pages[0].id, webmeta.pages[0].id);
|
||||
assert_eq!(parsed.pages[0].title, webmeta.pages[0].title);
|
||||
assert_eq!(parsed.pages[0].ipfs_key, webmeta.pages[0].ipfs_key);
|
||||
assert_eq!(parsed.pages[0].blakehash, webmeta.pages[0].blakehash);
|
||||
assert_eq!(
|
||||
parsed.pages[0].sections.len(),
|
||||
webmeta.pages[0].sections.len()
|
||||
);
|
||||
assert_eq!(
|
||||
parsed.pages[0].sections[0].section_type,
|
||||
webmeta.pages[0].sections[0].section_type
|
||||
);
|
||||
assert_eq!(
|
||||
parsed.pages[0].sections[0].content,
|
||||
webmeta.pages[0].sections[0].content
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_webbuilder_build() {
|
||||
// Create a temporary directory for the test
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = temp_dir.path().to_path_buf();
|
||||
|
||||
// Create a modified test config with the temporary directory as base_path
|
||||
let mut config = create_test_config();
|
||||
config.base_path = site_dir.clone();
|
||||
|
||||
// Create the repos directory
|
||||
let repos_dir = site_dir.join("repos");
|
||||
fs::create_dir_all(&repos_dir).unwrap();
|
||||
|
||||
// Create a mock repository directory
|
||||
let repo_dir = repos_dir.join("home");
|
||||
fs::create_dir_all(&repo_dir).unwrap();
|
||||
|
||||
// Create a mock page file in the repository
|
||||
let page_content = "# Home Page\n\nThis is the home page content.";
|
||||
fs::write(repo_dir.join("home.md"), page_content).unwrap();
|
||||
|
||||
// Create the WebBuilder with our config
|
||||
let webbuilder = WebBuilder { config };
|
||||
|
||||
// Mock the git module to avoid actual git operations
|
||||
// This is a simplified test that assumes the git operations would succeed
|
||||
|
||||
// Build the website
|
||||
let webmeta = webbuilder.build().unwrap();
|
||||
|
||||
// Check site metadata
|
||||
assert_eq!(webmeta.site_metadata.name, "test");
|
||||
assert_eq!(webmeta.site_metadata.title, "Test Site");
|
||||
assert_eq!(
|
||||
webmeta.site_metadata.description,
|
||||
Some("A test site".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
webmeta.site_metadata.keywords,
|
||||
Some(vec!["test".to_string(), "site".to_string()])
|
||||
);
|
||||
|
||||
// We expect at least one page from the configuration
|
||||
assert!(webmeta.pages.len() >= 1);
|
||||
|
||||
// Find the page with ID "page-home"
|
||||
let home_page = webmeta.pages.iter().find(|p| p.id == "page-home");
|
||||
|
||||
// Check that we found the page
|
||||
assert!(home_page.is_some());
|
||||
|
||||
let home_page = home_page.unwrap();
|
||||
|
||||
// Check the page properties
|
||||
assert_eq!(home_page.title, "Home");
|
||||
assert_eq!(home_page.ipfs_key, "QmPlaceholderIpfsKey");
|
||||
assert_eq!(home_page.sections.len(), 1);
|
||||
assert_eq!(home_page.sections[0].section_type, "markdown");
|
||||
|
||||
// The content should either be our mock content or a placeholder
|
||||
// depending on whether the page was found
|
||||
assert!(
|
||||
home_page.sections[0].content.contains("Home") ||
|
||||
home_page.sections[0].content.contains("home.md")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_webbuilder_from_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = temp_dir.path().join("site");
|
||||
fs::create_dir(&site_dir).unwrap();
|
||||
|
||||
// Create main.hjson
|
||||
let main_hjson = r#"{ "name": "test", "title": "Test Site" }"#;
|
||||
fs::write(site_dir.join("main.hjson"), main_hjson).unwrap();
|
||||
|
||||
let webbuilder = WebBuilder::from_directory(&site_dir).unwrap();
|
||||
|
||||
assert_eq!(webbuilder.config.name, "test");
|
||||
assert_eq!(webbuilder.config.title, "Test Site");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_webbuilder_from_directory_error() {
|
||||
let result = WebBuilder::from_directory("/nonexistent/directory");
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
WebBuilderError::MissingDirectory(_)
|
||||
));
|
||||
}
|
||||
}
|
@ -4,7 +4,7 @@ This document describes the process of building web metadata and exporting asset
|
||||
|
||||
## Overview
|
||||
|
||||
The web building process starts with a directory containing the site's Hjson configuration files, such as the example directory `/Users/despiegk/code/git.ourworld.tf/herocode/doctree/examples/doctreenew/sites/demo1`. These Hjson files define the structure and content of the entire site and may reference external collections. The Hjson configuration sits "on top" of the collections it utilizes. Using the metadata defined in these Hjson files, the necessary collection data is downloaded from Git repositories (if referenced). The `doctree` is then used to process the relevant data, identify pages and images, and prepare them for export to IPFS. Finally, a `webmeta.json` file is generated containing all the necessary information, including IPFS keys and Blake hashes for content verification, allowing a browser-based tool to render the website by fetching assets from IPFS. Optionally, the generated `webmeta.json` file can also be uploaded to IPFS, and its IPFS URL returned.
|
||||
The web building process starts with a directory containing the site's Hjson configuration files, such as the example directory `/Users/despiegk/code/git.threefold.info/herocode/doctree/examples/doctreenew/sites/demo1`. These Hjson files define the structure and content of the entire site and may reference external collections. The Hjson configuration sits "on top" of the collections it utilizes. Using the metadata defined in these Hjson files, the necessary collection data is downloaded from Git repositories (if referenced). The `doctree` is then used to process the relevant data, identify pages and images, and prepare them for export to IPFS. Finally, a `webmeta.json` file is generated containing all the necessary information, including IPFS keys and Blake hashes for content verification, allowing a browser-based tool to render the website by fetching assets from IPFS. Optionally, the generated `webmeta.json` file can also be uploaded to IPFS, and its IPFS URL returned.
|
||||
|
||||
## Process Steps
|
||||
|
||||
|
@ -1,43 +0,0 @@
|
||||
{
|
||||
"site_metadata": {
|
||||
"name": "demo1",
|
||||
"title": "Demo Site 1",
|
||||
"description": "This is a demo site for doctree",
|
||||
"keywords": ["demo", "doctree", "example"],
|
||||
"header": {
|
||||
"logo": "/images/logo.png",
|
||||
"nav": [
|
||||
{ "text": "Home", "url": "/" },
|
||||
{ "text": "About", "url": "/about" }
|
||||
]
|
||||
},
|
||||
"footer": {
|
||||
"copyright": "© 2023 My Company",
|
||||
"links": [
|
||||
{ "text": "Privacy Policy", "url": "/privacy" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"pages": [
|
||||
{
|
||||
"id": "mypages1",
|
||||
"title": "My Pages 1",
|
||||
"ipfs_key": "QmPlaceholderIpfsKey1",
|
||||
"blakehash": "sha256-PlaceholderBlakeHash1",
|
||||
"sections": [
|
||||
{ "type": "text", "content": "This is example content for My Pages 1." }
|
||||
],
|
||||
"assets": [
|
||||
{
|
||||
"name": "image1.png",
|
||||
"ipfs_key": "QmPlaceholderImageIpfsKey1"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"assets": {
|
||||
"style.css": {
|
||||
"ipfs_key": "QmPlaceholderCssIpfsKey1"
|
||||
}
|
||||
}
|
||||
}
|
214
webbuilder/src/config.rs
Normal file
214
webbuilder/src/config.rs
Normal file
@ -0,0 +1,214 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::error::{Result, WebBuilderError};
|
||||
|
||||
/// Site configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SiteConfig {
|
||||
/// Site name
|
||||
pub name: String,
|
||||
|
||||
/// Site title
|
||||
pub title: String,
|
||||
|
||||
/// Site description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Site keywords
|
||||
pub keywords: Option<Vec<String>>,
|
||||
|
||||
/// Site URL
|
||||
pub url: Option<String>,
|
||||
|
||||
/// Site favicon
|
||||
pub favicon: Option<String>,
|
||||
|
||||
/// Site header
|
||||
pub header: Option<HeaderConfig>,
|
||||
|
||||
/// Site footer
|
||||
pub footer: Option<FooterConfig>,
|
||||
|
||||
/// Site collections
|
||||
pub collections: Vec<CollectionConfig>,
|
||||
|
||||
/// Site pages
|
||||
pub pages: Vec<PageConfig>,
|
||||
|
||||
/// Base path of the site configuration
|
||||
#[serde(skip)]
|
||||
pub base_path: PathBuf,
|
||||
}
|
||||
|
||||
/// Header configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct HeaderConfig {
|
||||
/// Header logo
|
||||
pub logo: Option<LogoConfig>,
|
||||
|
||||
/// Header title
|
||||
pub title: Option<String>,
|
||||
|
||||
/// Header menu
|
||||
pub menu: Option<Vec<MenuItemConfig>>,
|
||||
|
||||
/// Login button
|
||||
pub login: Option<LoginConfig>,
|
||||
}
|
||||
|
||||
/// Logo configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LogoConfig {
|
||||
/// Logo source
|
||||
pub src: String,
|
||||
|
||||
/// Logo alt text
|
||||
pub alt: Option<String>,
|
||||
}
|
||||
|
||||
/// Menu item configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MenuItemConfig {
|
||||
/// Menu item label
|
||||
pub label: String,
|
||||
|
||||
/// Menu item link
|
||||
pub link: String,
|
||||
|
||||
/// Menu item children
|
||||
pub children: Option<Vec<MenuItemConfig>>,
|
||||
}
|
||||
|
||||
/// Login button configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LoginConfig {
|
||||
/// Whether the login button is visible
|
||||
pub visible: bool,
|
||||
|
||||
/// Login button label
|
||||
pub label: Option<String>,
|
||||
|
||||
/// Login button link
|
||||
pub link: Option<String>,
|
||||
}
|
||||
|
||||
/// Footer configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FooterConfig {
|
||||
/// Footer title
|
||||
pub title: Option<String>,
|
||||
|
||||
/// Footer sections
|
||||
pub sections: Option<Vec<FooterSectionConfig>>,
|
||||
|
||||
/// Footer copyright
|
||||
pub copyright: Option<String>,
|
||||
}
|
||||
|
||||
/// Footer section configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FooterSectionConfig {
|
||||
/// Section title
|
||||
pub title: String,
|
||||
|
||||
/// Section links
|
||||
pub links: Vec<LinkConfig>,
|
||||
}
|
||||
|
||||
/// Link configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LinkConfig {
|
||||
/// Link label
|
||||
pub label: String,
|
||||
|
||||
/// Link URL
|
||||
pub href: String,
|
||||
}
|
||||
|
||||
/// Collection configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CollectionConfig {
|
||||
/// Collection name
|
||||
pub name: Option<String>,
|
||||
|
||||
/// Collection URL
|
||||
pub url: Option<String>,
|
||||
|
||||
/// Collection description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Whether to scan the URL for collections
|
||||
pub scan: Option<bool>,
|
||||
}
|
||||
|
||||
/// Page configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PageConfig {
|
||||
/// Page name
|
||||
pub name: String,
|
||||
|
||||
/// Page title
|
||||
pub title: String,
|
||||
|
||||
/// Page description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Page navigation path
|
||||
pub navpath: String,
|
||||
|
||||
/// Page collection
|
||||
pub collection: String,
|
||||
|
||||
/// Whether the page is a draft
|
||||
pub draft: Option<bool>,
|
||||
}
|
||||
|
||||
impl SiteConfig {
|
||||
/// Load site configuration from a directory
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing hjson configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new SiteConfig instance or an error
|
||||
pub fn from_directory<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the directory exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingDirectory(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Check if the directory is a directory
|
||||
if !path.is_dir() {
|
||||
return Err(WebBuilderError::InvalidConfiguration(format!(
|
||||
"{:?} is not a directory",
|
||||
path
|
||||
)));
|
||||
}
|
||||
|
||||
// TODO: Implement loading configuration from hjson files
|
||||
|
||||
// For now, return a placeholder configuration
|
||||
Ok(SiteConfig {
|
||||
name: "demo1".to_string(),
|
||||
title: "Demo Site 1".to_string(),
|
||||
description: Some("This is a demo site for doctree".to_string()),
|
||||
keywords: Some(vec![
|
||||
"demo".to_string(),
|
||||
"doctree".to_string(),
|
||||
"example".to_string(),
|
||||
]),
|
||||
url: Some("https://example.com".to_string()),
|
||||
favicon: Some("img/favicon.png".to_string()),
|
||||
header: None,
|
||||
footer: None,
|
||||
collections: Vec::new(),
|
||||
pages: Vec::new(),
|
||||
base_path: path.to_path_buf(),
|
||||
})
|
||||
}
|
||||
}
|
156
webbuilder/src/config_test.rs
Normal file
156
webbuilder/src/config_test.rs
Normal file
@ -0,0 +1,156 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::config::{
|
||||
CollectionConfig, FooterConfig, FooterSectionConfig, HeaderConfig, LinkConfig, LoginConfig,
|
||||
LogoConfig, MenuItemConfig, PageConfig, SiteConfig,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_site_config_serialization() {
|
||||
let config = SiteConfig {
|
||||
name: "test".to_string(),
|
||||
title: "Test Site".to_string(),
|
||||
description: Some("A test site".to_string()),
|
||||
keywords: Some(vec!["test".to_string(), "site".to_string()]),
|
||||
url: Some("https://example.com".to_string()),
|
||||
favicon: Some("favicon.ico".to_string()),
|
||||
header: Some(HeaderConfig {
|
||||
logo: Some(LogoConfig {
|
||||
src: "logo.png".to_string(),
|
||||
alt: Some("Logo".to_string()),
|
||||
}),
|
||||
title: Some("Test Site".to_string()),
|
||||
menu: Some(vec![
|
||||
MenuItemConfig {
|
||||
label: "Home".to_string(),
|
||||
link: "/".to_string(),
|
||||
children: None,
|
||||
},
|
||||
MenuItemConfig {
|
||||
label: "About".to_string(),
|
||||
link: "/about".to_string(),
|
||||
children: Some(vec![MenuItemConfig {
|
||||
label: "Team".to_string(),
|
||||
link: "/about/team".to_string(),
|
||||
children: None,
|
||||
}]),
|
||||
},
|
||||
]),
|
||||
login: Some(LoginConfig {
|
||||
visible: true,
|
||||
label: Some("Login".to_string()),
|
||||
link: Some("/login".to_string()),
|
||||
}),
|
||||
}),
|
||||
footer: Some(FooterConfig {
|
||||
title: Some("Test Site".to_string()),
|
||||
sections: Some(vec![FooterSectionConfig {
|
||||
title: "Links".to_string(),
|
||||
links: vec![
|
||||
LinkConfig {
|
||||
label: "Home".to_string(),
|
||||
href: "/".to_string(),
|
||||
},
|
||||
LinkConfig {
|
||||
label: "About".to_string(),
|
||||
href: "/about".to_string(),
|
||||
},
|
||||
],
|
||||
}]),
|
||||
copyright: Some("© 2023".to_string()),
|
||||
}),
|
||||
collections: vec![CollectionConfig {
|
||||
name: Some("test".to_string()),
|
||||
url: Some("https://git.threefold.info/tfgrid/home.git".to_string()),
|
||||
description: Some("A test collection".to_string()),
|
||||
scan: Some(true),
|
||||
}],
|
||||
pages: vec![PageConfig {
|
||||
name: "home".to_string(),
|
||||
title: "Home".to_string(),
|
||||
description: Some("Home page".to_string()),
|
||||
navpath: "/".to_string(),
|
||||
collection: "test".to_string(),
|
||||
draft: Some(false),
|
||||
}],
|
||||
base_path: PathBuf::from("/path/to/site"),
|
||||
};
|
||||
|
||||
// Serialize to JSON
|
||||
let json = serde_json::to_string(&config).unwrap();
|
||||
|
||||
// Deserialize from JSON
|
||||
let deserialized: SiteConfig = serde_json::from_str(&json).unwrap();
|
||||
|
||||
// Check that the deserialized config matches the original
|
||||
assert_eq!(deserialized.name, config.name);
|
||||
assert_eq!(deserialized.title, config.title);
|
||||
assert_eq!(deserialized.description, config.description);
|
||||
assert_eq!(deserialized.keywords, config.keywords);
|
||||
assert_eq!(deserialized.url, config.url);
|
||||
assert_eq!(deserialized.favicon, config.favicon);
|
||||
|
||||
// Check header
|
||||
assert!(deserialized.header.is_some());
|
||||
let header = deserialized.header.as_ref().unwrap();
|
||||
let original_header = config.header.as_ref().unwrap();
|
||||
|
||||
// Check logo
|
||||
assert!(header.logo.is_some());
|
||||
let logo = header.logo.as_ref().unwrap();
|
||||
let original_logo = original_header.logo.as_ref().unwrap();
|
||||
assert_eq!(logo.src, original_logo.src);
|
||||
assert_eq!(logo.alt, original_logo.alt);
|
||||
|
||||
// Check title
|
||||
assert_eq!(header.title, original_header.title);
|
||||
|
||||
// Check menu
|
||||
assert!(header.menu.is_some());
|
||||
let menu = header.menu.as_ref().unwrap();
|
||||
let original_menu = original_header.menu.as_ref().unwrap();
|
||||
assert_eq!(menu.len(), original_menu.len());
|
||||
assert_eq!(menu[0].label, original_menu[0].label);
|
||||
assert_eq!(menu[0].link, original_menu[0].link);
|
||||
assert_eq!(menu[1].label, original_menu[1].label);
|
||||
assert_eq!(menu[1].link, original_menu[1].link);
|
||||
|
||||
// Check login
|
||||
assert!(header.login.is_some());
|
||||
let login = header.login.as_ref().unwrap();
|
||||
let original_login = original_header.login.as_ref().unwrap();
|
||||
assert_eq!(login.visible, original_login.visible);
|
||||
assert_eq!(login.label, original_login.label);
|
||||
assert_eq!(login.link, original_login.link);
|
||||
|
||||
// Check footer
|
||||
assert!(deserialized.footer.is_some());
|
||||
let footer = deserialized.footer.as_ref().unwrap();
|
||||
let original_footer = config.footer.as_ref().unwrap();
|
||||
assert_eq!(footer.title, original_footer.title);
|
||||
assert_eq!(footer.copyright, original_footer.copyright);
|
||||
|
||||
// Check collections
|
||||
assert_eq!(deserialized.collections.len(), config.collections.len());
|
||||
assert_eq!(deserialized.collections[0].name, config.collections[0].name);
|
||||
assert_eq!(deserialized.collections[0].url, config.collections[0].url);
|
||||
assert_eq!(
|
||||
deserialized.collections[0].description,
|
||||
config.collections[0].description
|
||||
);
|
||||
assert_eq!(deserialized.collections[0].scan, config.collections[0].scan);
|
||||
|
||||
// Check pages
|
||||
assert_eq!(deserialized.pages.len(), config.pages.len());
|
||||
assert_eq!(deserialized.pages[0].name, config.pages[0].name);
|
||||
assert_eq!(deserialized.pages[0].title, config.pages[0].title);
|
||||
assert_eq!(
|
||||
deserialized.pages[0].description,
|
||||
config.pages[0].description
|
||||
);
|
||||
assert_eq!(deserialized.pages[0].navpath, config.pages[0].navpath);
|
||||
assert_eq!(deserialized.pages[0].collection, config.pages[0].collection);
|
||||
assert_eq!(deserialized.pages[0].draft, config.pages[0].draft);
|
||||
}
|
||||
}
|
68
webbuilder/src/error.rs
Normal file
68
webbuilder/src/error.rs
Normal file
@ -0,0 +1,68 @@
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Result type for WebBuilder operations
|
||||
pub type Result<T> = std::result::Result<T, WebBuilderError>;
|
||||
|
||||
/// Error type for WebBuilder operations
|
||||
#[derive(Error, Debug)]
|
||||
pub enum WebBuilderError {
|
||||
/// IO error
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] io::Error),
|
||||
|
||||
/// DocTree error
|
||||
#[error("DocTree error: {0}")]
|
||||
DocTreeError(#[from] doctree::DocTreeError),
|
||||
|
||||
/// Hjson parsing error
|
||||
#[error("Hjson parsing error: {0}")]
|
||||
HjsonError(String),
|
||||
|
||||
/// Git error
|
||||
#[error("Git error: {0}")]
|
||||
GitError(String),
|
||||
|
||||
/// IPFS error
|
||||
#[error("IPFS error: {0}")]
|
||||
IpfsError(String),
|
||||
|
||||
/// Missing file error
|
||||
#[error("Missing file: {0}")]
|
||||
MissingFile(PathBuf),
|
||||
|
||||
/// Missing directory error
|
||||
#[error("Missing directory: {0}")]
|
||||
MissingDirectory(PathBuf),
|
||||
|
||||
/// Missing configuration error
|
||||
#[error("Missing configuration: {0}")]
|
||||
MissingConfiguration(String),
|
||||
|
||||
/// Invalid configuration error
|
||||
#[error("Invalid configuration: {0}")]
|
||||
InvalidConfiguration(String),
|
||||
|
||||
/// Other error
|
||||
#[error("Error: {0}")]
|
||||
Other(String),
|
||||
}
|
||||
|
||||
impl From<String> for WebBuilderError {
|
||||
fn from(error: String) -> Self {
|
||||
WebBuilderError::Other(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for WebBuilderError {
|
||||
fn from(error: &str) -> Self {
|
||||
WebBuilderError::Other(error.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for WebBuilderError {
|
||||
fn from(error: serde_json::Error) -> Self {
|
||||
WebBuilderError::Other(format!("JSON error: {}", error))
|
||||
}
|
||||
}
|
73
webbuilder/src/error_test.rs
Normal file
73
webbuilder/src/error_test.rs
Normal file
@ -0,0 +1,73 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::WebBuilderError;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_error_from_string() {
|
||||
let error = WebBuilderError::from("test error");
|
||||
assert!(matches!(error, WebBuilderError::Other(s) if s == "test error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_from_string_owned() {
|
||||
let error = WebBuilderError::from("test error".to_string());
|
||||
assert!(matches!(error, WebBuilderError::Other(s) if s == "test error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_from_json_error() {
|
||||
let json_error = serde_json::from_str::<serde_json::Value>("invalid json").unwrap_err();
|
||||
let error = WebBuilderError::from(json_error);
|
||||
assert!(matches!(error, WebBuilderError::Other(s) if s.starts_with("JSON error:")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_display() {
|
||||
let errors = vec![
|
||||
(
|
||||
WebBuilderError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"file not found",
|
||||
)),
|
||||
"IO error: file not found",
|
||||
),
|
||||
(
|
||||
WebBuilderError::HjsonError("invalid hjson".to_string()),
|
||||
"Hjson parsing error: invalid hjson",
|
||||
),
|
||||
(
|
||||
WebBuilderError::GitError("git error".to_string()),
|
||||
"Git error: git error",
|
||||
),
|
||||
(
|
||||
WebBuilderError::IpfsError("ipfs error".to_string()),
|
||||
"IPFS error: ipfs error",
|
||||
),
|
||||
(
|
||||
WebBuilderError::MissingFile(PathBuf::from("/path/to/file")),
|
||||
"Missing file: /path/to/file",
|
||||
),
|
||||
(
|
||||
WebBuilderError::MissingDirectory(PathBuf::from("/path/to/dir")),
|
||||
"Missing directory: /path/to/dir",
|
||||
),
|
||||
(
|
||||
WebBuilderError::MissingConfiguration("config".to_string()),
|
||||
"Missing configuration: config",
|
||||
),
|
||||
(
|
||||
WebBuilderError::InvalidConfiguration("invalid config".to_string()),
|
||||
"Invalid configuration: invalid config",
|
||||
),
|
||||
(
|
||||
WebBuilderError::Other("other error".to_string()),
|
||||
"Error: other error",
|
||||
),
|
||||
];
|
||||
|
||||
for (error, expected) in errors {
|
||||
assert_eq!(error.to_string(), expected);
|
||||
}
|
||||
}
|
||||
}
|
182
webbuilder/src/git.rs
Normal file
182
webbuilder/src/git.rs
Normal file
@ -0,0 +1,182 @@
|
||||
use lazy_static::lazy_static;
|
||||
use sal::git::{GitRepo, GitTree};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{ SystemTime};
|
||||
|
||||
use crate::error::{Result, WebBuilderError};
|
||||
|
||||
// Cache entry for Git repositories
|
||||
struct CacheEntry {
|
||||
path: PathBuf,
|
||||
last_updated: SystemTime,
|
||||
}
|
||||
|
||||
// Global cache for Git repositories
|
||||
lazy_static! {
|
||||
static ref REPO_CACHE: Arc<Mutex<HashMap<String, CacheEntry>>> =
|
||||
Arc::new(Mutex::new(HashMap::new()));
|
||||
}
|
||||
|
||||
// Cache timeout in seconds (default: 1 hour)
|
||||
const CACHE_TIMEOUT: u64 = 3600;
|
||||
|
||||
/// Clone a Git repository
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - URL of the repository to clone
|
||||
/// * `destination` - Destination directory
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The path to the cloned repository or an error
|
||||
pub fn clone_repository<P: AsRef<Path>>(url: &str, destination: P) -> Result<PathBuf> {
|
||||
let destination = destination.as_ref();
|
||||
let destination_str = destination.to_str().unwrap();
|
||||
|
||||
// Create a GitTree for the parent directory
|
||||
let parent_dir = destination.parent().ok_or_else(|| {
|
||||
WebBuilderError::InvalidConfiguration(format!(
|
||||
"Invalid destination path: {}",
|
||||
destination_str
|
||||
))
|
||||
})?;
|
||||
|
||||
let git_tree = GitTree::new(parent_dir.to_str().unwrap())
|
||||
.map_err(|e| WebBuilderError::GitError(format!("Failed to create GitTree: {}", e)))?;
|
||||
|
||||
// Use the GitTree to get (clone) the repository
|
||||
let repos = git_tree
|
||||
.get(url)
|
||||
.map_err(|e| WebBuilderError::GitError(format!("Failed to clone repository: {}", e)))?;
|
||||
|
||||
if repos.is_empty() {
|
||||
return Err(WebBuilderError::GitError(format!(
|
||||
"Failed to clone repository: No repository was created"
|
||||
)));
|
||||
}
|
||||
|
||||
// Return the path of the first repository
|
||||
Ok(PathBuf::from(repos[0].path()))
|
||||
}
|
||||
|
||||
/// Pull the latest changes from a Git repository
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the repository
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Ok(()) on success or an error
|
||||
pub fn pull_repository<P: AsRef<Path>>(path: P) -> Result<()> {
|
||||
let path = path.as_ref();
|
||||
let path_str = path.to_str().unwrap();
|
||||
|
||||
// Create a GitRepo directly
|
||||
let repo = GitRepo::new(path_str.to_string());
|
||||
|
||||
// Pull the repository
|
||||
repo.pull()
|
||||
.map_err(|e| WebBuilderError::GitError(format!("Failed to pull repository: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clone or pull a Git repository with caching
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - URL of the repository to clone
|
||||
/// * `destination` - Destination directory
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The path to the repository or an error
|
||||
pub fn clone_or_pull<P: AsRef<Path>>(url: &str, destination: P) -> Result<PathBuf> {
|
||||
let destination = destination.as_ref();
|
||||
|
||||
// Check the cache first
|
||||
let mut cache = REPO_CACHE.lock().unwrap();
|
||||
let now = SystemTime::now();
|
||||
|
||||
if let Some(entry) = cache.get(url) {
|
||||
// Check if the cache entry is still valid
|
||||
if let Ok(elapsed) = now.duration_since(entry.last_updated) {
|
||||
if elapsed.as_secs() < CACHE_TIMEOUT {
|
||||
// Cache is still valid, return the cached path
|
||||
log::info!("Using cached repository for {}", url);
|
||||
return Ok(entry.path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache miss or expired, clone or pull the repository
|
||||
let result = if destination.exists() {
|
||||
// Pull the repository
|
||||
pull_repository(destination)?;
|
||||
Ok(destination.to_path_buf())
|
||||
} else {
|
||||
// Clone the repository
|
||||
clone_repository(url, destination)
|
||||
};
|
||||
|
||||
// Update the cache
|
||||
if let Ok(path) = &result {
|
||||
cache.insert(
|
||||
url.to_string(),
|
||||
CacheEntry {
|
||||
path: path.clone(),
|
||||
last_updated: now,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Force update a Git repository, bypassing the cache
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - URL of the repository to clone
|
||||
/// * `destination` - Destination directory
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The path to the repository or an error
|
||||
pub fn force_update<P: AsRef<Path>>(url: &str, destination: P) -> Result<PathBuf> {
|
||||
let destination = destination.as_ref();
|
||||
|
||||
// Clone or pull the repository
|
||||
let result = if destination.exists() {
|
||||
// Pull the repository
|
||||
pull_repository(destination)?;
|
||||
Ok(destination.to_path_buf())
|
||||
} else {
|
||||
// Clone the repository
|
||||
clone_repository(url, destination)
|
||||
};
|
||||
|
||||
// Update the cache
|
||||
if let Ok(path) = &result {
|
||||
let mut cache = REPO_CACHE.lock().unwrap();
|
||||
cache.insert(
|
||||
url.to_string(),
|
||||
CacheEntry {
|
||||
path: path.clone(),
|
||||
last_updated: SystemTime::now(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Clear the Git repository cache
|
||||
pub fn clear_cache() {
|
||||
let mut cache = REPO_CACHE.lock().unwrap();
|
||||
cache.clear();
|
||||
}
|
26
webbuilder/src/git_test.rs
Normal file
26
webbuilder/src/git_test.rs
Normal file
@ -0,0 +1,26 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::WebBuilderError;
|
||||
use crate::git::clone_repository;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_clone_repository_error_invalid_destination() {
|
||||
// Test with a destination that has no parent directory
|
||||
// This URL is invalid because we added number 2 after `home`
|
||||
let result = clone_repository("https://git.threefold.info/tfgrid/home2.git", PathBuf::from("/"));
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
WebBuilderError::InvalidConfiguration(_)
|
||||
));
|
||||
}
|
||||
|
||||
// Note: The following tests would require mocking the sal::git module,
|
||||
// which is complex due to the external dependency. In a real-world scenario,
|
||||
// we would use a more sophisticated mocking approach or integration tests.
|
||||
|
||||
// For now, we'll just test the error cases and leave the success cases
|
||||
// for integration testing.
|
||||
}
|
70
webbuilder/src/ipfs.rs
Normal file
70
webbuilder/src/ipfs.rs
Normal file
@ -0,0 +1,70 @@
|
||||
use ipfs_api_backend_hyper::{IpfsApi, IpfsClient};
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use crate::error::{Result, WebBuilderError};
|
||||
|
||||
/// Upload a file to IPFS
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the file to upload
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The IPFS hash of the file or an error
|
||||
pub fn upload_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Create a tokio runtime
|
||||
let rt = Runtime::new()
|
||||
.map_err(|e| WebBuilderError::Other(format!("Failed to create tokio runtime: {}", e)))?;
|
||||
|
||||
// Upload the file to IPFS
|
||||
let client = IpfsClient::default();
|
||||
let ipfs_hash = rt.block_on(async {
|
||||
// Open the file directly - this implements Read trait
|
||||
let file = File::open(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
client
|
||||
.add(file)
|
||||
.await
|
||||
.map_err(|e| WebBuilderError::IpfsError(format!("Failed to upload to IPFS: {}", e)))
|
||||
.map(|res| res.hash)
|
||||
})?;
|
||||
|
||||
Ok(ipfs_hash)
|
||||
}
|
||||
|
||||
/// Calculate the Blake3 hash of a file
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the file to hash
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The Blake3 hash of the file or an error
|
||||
pub fn calculate_blake_hash<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let content = std::fs::read(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
// Calculate the hash
|
||||
let hash = blake3::hash(&content);
|
||||
let hash_hex = hash.to_hex().to_string();
|
||||
|
||||
Ok(format!("blake3-{}", hash_hex))
|
||||
}
|
64
webbuilder/src/ipfs_test.rs
Normal file
64
webbuilder/src/ipfs_test.rs
Normal file
@ -0,0 +1,64 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::WebBuilderError;
|
||||
use crate::ipfs::{calculate_blake_hash, upload_file};
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_upload_file_missing_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("nonexistent.txt");
|
||||
|
||||
let result = upload_file(&file_path);
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
WebBuilderError::MissingFile(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_calculate_blake_hash() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("test.txt");
|
||||
fs::write(&file_path, "test content").unwrap();
|
||||
|
||||
let result = calculate_blake_hash(&file_path).unwrap();
|
||||
|
||||
// The hash should start with "blake3-"
|
||||
assert!(result.starts_with("blake3-"));
|
||||
|
||||
// The hash should be 64 characters long after the prefix
|
||||
assert_eq!(result.len(), "blake3-".len() + 64);
|
||||
|
||||
// The hash should be the same for the same content
|
||||
let file_path2 = temp_dir.path().join("test2.txt");
|
||||
fs::write(&file_path2, "test content").unwrap();
|
||||
|
||||
let result2 = calculate_blake_hash(&file_path2).unwrap();
|
||||
assert_eq!(result, result2);
|
||||
|
||||
// The hash should be different for different content
|
||||
let file_path3 = temp_dir.path().join("test3.txt");
|
||||
fs::write(&file_path3, "different content").unwrap();
|
||||
|
||||
let result3 = calculate_blake_hash(&file_path3).unwrap();
|
||||
assert_ne!(result, result3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_calculate_blake_hash_missing_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("nonexistent.txt");
|
||||
|
||||
let result = calculate_blake_hash(&file_path);
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
WebBuilderError::MissingFile(_)
|
||||
));
|
||||
}
|
||||
}
|
59
webbuilder/src/lib.rs
Normal file
59
webbuilder/src/lib.rs
Normal file
@ -0,0 +1,59 @@
|
||||
//! WebBuilder is a library for building websites from hjson configuration files and markdown content.
|
||||
//!
|
||||
//! It uses the DocTree library to process markdown content and includes, and exports the result
|
||||
//! to a webmeta.json file that can be used by a browser-based website generator.
|
||||
|
||||
pub mod builder;
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod git;
|
||||
pub mod ipfs;
|
||||
pub mod parser;
|
||||
|
||||
#[cfg(test)]
|
||||
mod config_test;
|
||||
#[cfg(test)]
|
||||
mod error_test;
|
||||
#[cfg(test)]
|
||||
mod git_test;
|
||||
#[cfg(test)]
|
||||
mod ipfs_test;
|
||||
#[cfg(test)]
|
||||
mod parser_test;
|
||||
|
||||
pub use builder::WebBuilder;
|
||||
pub use config::SiteConfig;
|
||||
pub use error::{Result, WebBuilderError};
|
||||
pub use parser::{ParsingStrategy, parse_site_config_with_strategy as parse_site_config, parse_site_config_recommended, parse_site_config_auto};
|
||||
|
||||
/// Create a new WebBuilder instance from a directory containing configuration files.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new WebBuilder instance or an error
|
||||
pub fn from_directory<P: AsRef<std::path::Path>>(path: P) -> Result<WebBuilder> {
|
||||
WebBuilder::from_directory(path)
|
||||
}
|
||||
|
||||
/// Create a new WebBuilder instance from a directory containing configuration files,
|
||||
/// using the specified parsing strategy.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing configuration files
|
||||
/// * `strategy` - Parsing strategy to use
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new WebBuilder instance or an error
|
||||
pub fn from_directory_with_strategy<P: AsRef<std::path::Path>>(
|
||||
path: P,
|
||||
strategy: ParsingStrategy,
|
||||
) -> Result<WebBuilder> {
|
||||
let config = parser::parse_site_config_with_strategy(path, strategy)?;
|
||||
Ok(WebBuilder { config })
|
||||
}
|
88
webbuilder/src/main.rs
Normal file
88
webbuilder/src/main.rs
Normal file
@ -0,0 +1,88 @@
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::path::PathBuf;
|
||||
use webbuilder::{from_directory, Result};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Build a website from hjson configuration files
|
||||
Build {
|
||||
/// Path to the directory containing hjson configuration files
|
||||
#[arg(short, long)]
|
||||
path: PathBuf,
|
||||
|
||||
/// Output directory for the webmeta.json file
|
||||
#[arg(short, long)]
|
||||
output: Option<PathBuf>,
|
||||
|
||||
/// Whether to upload the webmeta.json file to IPFS
|
||||
#[arg(short, long)]
|
||||
upload: bool,
|
||||
},
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Initialize logger
|
||||
env_logger::init();
|
||||
|
||||
// Parse command line arguments
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Handle commands
|
||||
match &cli.command {
|
||||
Commands::Build {
|
||||
path,
|
||||
output,
|
||||
upload,
|
||||
} => {
|
||||
// Create a WebBuilder instance
|
||||
let webbuilder = from_directory(path)?;
|
||||
|
||||
// Print the parsed configuration
|
||||
println!("Parsed site configuration:");
|
||||
println!(" Name: {}", webbuilder.config.name);
|
||||
println!(" Title: {}", webbuilder.config.title);
|
||||
println!(" Description: {:?}", webbuilder.config.description);
|
||||
println!(" URL: {:?}", webbuilder.config.url);
|
||||
println!(
|
||||
" Collections: {} items",
|
||||
webbuilder.config.collections.len()
|
||||
);
|
||||
|
||||
for (i, collection) in webbuilder.config.collections.iter().enumerate() {
|
||||
println!(
|
||||
" Collection {}: {:?} - {:?}",
|
||||
i, collection.name, collection.url
|
||||
);
|
||||
}
|
||||
|
||||
println!(" Pages: {} items", webbuilder.config.pages.len());
|
||||
|
||||
// Build the website
|
||||
let webmeta = webbuilder.build()?;
|
||||
|
||||
// Save the webmeta.json file
|
||||
let output_path = output
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from("webmeta.json"));
|
||||
webmeta.save(&output_path)?;
|
||||
|
||||
// Upload to IPFS if requested
|
||||
if *upload {
|
||||
let ipfs_hash = webbuilder.upload_to_ipfs(&output_path)?;
|
||||
println!("Uploaded to IPFS: {}", ipfs_hash);
|
||||
}
|
||||
|
||||
println!("Website built successfully!");
|
||||
println!("Output: {:?}", output_path);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
517
webbuilder/src/parser.rs
Normal file
517
webbuilder/src/parser.rs
Normal file
@ -0,0 +1,517 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use deser_hjson::from_str;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::{self, Value};
|
||||
|
||||
use crate::config::{CollectionConfig, FooterConfig, HeaderConfig, PageConfig, SiteConfig};
|
||||
use crate::error::{Result, WebBuilderError};
|
||||
|
||||
/// Parsing strategy to use
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ParsingStrategy {
|
||||
/// Use the deser-hjson library (recommended)
|
||||
Hjson,
|
||||
/// Use a simple line-by-line parser (legacy)
|
||||
Simple,
|
||||
/// Auto-detect the best parser to use
|
||||
Auto,
|
||||
}
|
||||
|
||||
/// Parse a file into a struct using the specified strategy
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the file to parse
|
||||
/// * `strategy` - Parsing strategy to use
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed struct or an error
|
||||
pub fn parse_file<T, P>(path: P, strategy: ParsingStrategy) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let content = fs::read_to_string(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
match strategy {
|
||||
ParsingStrategy::Hjson => {
|
||||
// Use the deser-hjson library
|
||||
from_str(&content).map_err(|e| WebBuilderError::HjsonError(format!("Error parsing {:?}: {}", path, e)))
|
||||
}
|
||||
ParsingStrategy::Simple => {
|
||||
// Use the simple parser - for this we need to handle the file reading ourselves
|
||||
// since the original parse_hjson function does that internally
|
||||
let path_ref: &Path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path_ref.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path_ref.to_path_buf()));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let content = fs::read_to_string(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
// First try to parse as JSON
|
||||
let json_result = serde_json::from_str::<T>(&content);
|
||||
if json_result.is_ok() {
|
||||
return Ok(json_result.unwrap());
|
||||
}
|
||||
|
||||
// If that fails, try to convert hjson to json using a simple approach
|
||||
let json_content = convert_hjson_to_json(&content)?;
|
||||
|
||||
// Parse the JSON
|
||||
serde_json::from_str(&json_content)
|
||||
.map_err(|e| WebBuilderError::HjsonError(format!("Error parsing {:?}: {}", path, e)))
|
||||
}
|
||||
ParsingStrategy::Auto => {
|
||||
// Try the hjson parser first, fall back to simple if it fails
|
||||
match from_str(&content) {
|
||||
Ok(result) => Ok(result),
|
||||
Err(e) => {
|
||||
log::warn!("Hjson parser failed: {}, falling back to simple parser", e);
|
||||
// Call the simple parser directly
|
||||
let path_ref: &Path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path_ref.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path_ref.to_path_buf()));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let content = fs::read_to_string(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
// First try to parse as JSON
|
||||
let json_result = serde_json::from_str::<T>(&content);
|
||||
if json_result.is_ok() {
|
||||
return Ok(json_result.unwrap());
|
||||
}
|
||||
|
||||
// If that fails, try to convert hjson to json using a simple approach
|
||||
let json_content = convert_hjson_to_json(&content)?;
|
||||
|
||||
// Parse the JSON
|
||||
serde_json::from_str(&json_content)
|
||||
.map_err(|e| WebBuilderError::HjsonError(format!("Error parsing {:?}: {}", path, e)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a hjson file into a struct using the simple parser
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the hjson file
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed struct or an error
|
||||
pub fn parse_hjson<T, P>(path: P) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the file exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingFile(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Read the file
|
||||
let content = fs::read_to_string(path).map_err(|e| WebBuilderError::IoError(e))?;
|
||||
|
||||
// First try to parse as JSON
|
||||
let json_result = serde_json::from_str::<T>(&content);
|
||||
if json_result.is_ok() {
|
||||
return Ok(json_result.unwrap());
|
||||
}
|
||||
|
||||
// If that fails, try to convert hjson to json using a simple approach
|
||||
let json_content = convert_hjson_to_json(&content)?;
|
||||
|
||||
// Parse the JSON
|
||||
serde_json::from_str(&json_content)
|
||||
.map_err(|e| WebBuilderError::HjsonError(format!("Error parsing {:?}: {}", path, e)))
|
||||
}
|
||||
|
||||
/// Convert hjson to json using a simple approach
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `hjson` - The hjson content
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The json content or an error
|
||||
fn convert_hjson_to_json(hjson: &str) -> Result<String> {
|
||||
// Remove comments
|
||||
let mut json = String::new();
|
||||
let mut lines = hjson.lines();
|
||||
|
||||
while let Some(line) = lines.next() {
|
||||
let trimmed = line.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip comment lines
|
||||
if trimmed.starts_with('#') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle key-value pairs
|
||||
if let Some(pos) = trimmed.find(':') {
|
||||
let key = trimmed[..pos].trim();
|
||||
let value = trimmed[pos + 1..].trim();
|
||||
|
||||
// Add quotes to keys
|
||||
json.push_str(&format!("\"{}\":", key));
|
||||
|
||||
// Add value
|
||||
if value.is_empty() {
|
||||
// If value is empty, it might be an object or array start
|
||||
if lines
|
||||
.clone()
|
||||
.next()
|
||||
.map_or(false, |l| l.trim().starts_with('{'))
|
||||
{
|
||||
json.push_str(" {");
|
||||
} else if lines
|
||||
.clone()
|
||||
.next()
|
||||
.map_or(false, |l| l.trim().starts_with('['))
|
||||
{
|
||||
json.push_str(" [");
|
||||
} else {
|
||||
json.push_str(" null");
|
||||
}
|
||||
} else {
|
||||
// Add quotes to string values
|
||||
if value.starts_with('"')
|
||||
|| value.starts_with('[')
|
||||
|| value.starts_with('{')
|
||||
|| value == "true"
|
||||
|| value == "false"
|
||||
|| value == "null"
|
||||
|| value.parse::<f64>().is_ok()
|
||||
{
|
||||
json.push_str(&format!(" {}", value));
|
||||
} else {
|
||||
json.push_str(&format!(" \"{}\"", value.replace('"', "\\\"")));
|
||||
}
|
||||
}
|
||||
|
||||
json.push_str(",\n");
|
||||
} else if trimmed == "{" || trimmed == "[" {
|
||||
json.push_str(trimmed);
|
||||
json.push_str("\n");
|
||||
} else if trimmed == "}" || trimmed == "]" {
|
||||
// Remove trailing comma if present
|
||||
if json.ends_with(",\n") {
|
||||
json.pop();
|
||||
json.pop();
|
||||
json.push_str("\n");
|
||||
}
|
||||
json.push_str(trimmed);
|
||||
json.push_str(",\n");
|
||||
} else {
|
||||
// Just copy the line
|
||||
json.push_str(trimmed);
|
||||
json.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Remove trailing comma if present
|
||||
if json.ends_with(",\n") {
|
||||
json.pop();
|
||||
json.pop();
|
||||
json.push_str("\n");
|
||||
}
|
||||
|
||||
// Wrap in object if not already
|
||||
if !json.trim().starts_with('{') {
|
||||
json = format!("{{\n{}\n}}", json);
|
||||
}
|
||||
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
/// Parse site configuration from a directory
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing hjson configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed site configuration or an error
|
||||
pub fn parse_site_config<P: AsRef<Path>>(path: P) -> Result<SiteConfig> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the directory exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingDirectory(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Check if the directory is a directory
|
||||
if !path.is_dir() {
|
||||
return Err(WebBuilderError::InvalidConfiguration(format!(
|
||||
"{:?} is not a directory",
|
||||
path
|
||||
)));
|
||||
}
|
||||
|
||||
// Parse main.hjson
|
||||
let main_path = path.join("main.hjson");
|
||||
let main_config: serde_json::Value = parse_hjson(main_path)?;
|
||||
|
||||
// Parse header.hjson
|
||||
let header_path = path.join("header.hjson");
|
||||
let header_config: Option<HeaderConfig> = if header_path.exists() {
|
||||
Some(parse_hjson(header_path)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Parse footer.hjson
|
||||
let footer_path = path.join("footer.hjson");
|
||||
let footer_config: Option<FooterConfig> = if footer_path.exists() {
|
||||
Some(parse_hjson(footer_path)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Parse collection.hjson
|
||||
let collection_path = path.join("collection.hjson");
|
||||
let collection_configs: Vec<CollectionConfig> = if collection_path.exists() {
|
||||
parse_hjson(collection_path)?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
// Parse pages directory
|
||||
let pages_path = path.join("pages");
|
||||
let mut page_configs: Vec<PageConfig> = Vec::new();
|
||||
|
||||
if pages_path.exists() && pages_path.is_dir() {
|
||||
for entry in fs::read_dir(pages_path)? {
|
||||
let entry = entry?;
|
||||
let entry_path = entry.path();
|
||||
|
||||
if entry_path.is_file() && entry_path.extension().map_or(false, |ext| ext == "hjson") {
|
||||
let page_config: Vec<PageConfig> = parse_hjson(&entry_path)?;
|
||||
page_configs.extend(page_config);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse keywords from main.hjson
|
||||
let keywords = if let Some(keywords_value) = main_config.get("keywords") {
|
||||
if keywords_value.is_array() {
|
||||
let mut keywords_vec = Vec::new();
|
||||
for keyword in keywords_value.as_array().unwrap() {
|
||||
if let Some(keyword_str) = keyword.as_str() {
|
||||
keywords_vec.push(keyword_str.to_string());
|
||||
}
|
||||
}
|
||||
Some(keywords_vec)
|
||||
} else if let Some(keywords_str) = keywords_value.as_str() {
|
||||
// Handle comma-separated keywords
|
||||
Some(
|
||||
keywords_str
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create site configuration
|
||||
let site_config = SiteConfig {
|
||||
name: main_config["name"]
|
||||
.as_str()
|
||||
.unwrap_or("default")
|
||||
.to_string(),
|
||||
title: main_config["title"].as_str().unwrap_or("").to_string(),
|
||||
description: main_config["description"].as_str().map(|s| s.to_string()),
|
||||
keywords,
|
||||
url: main_config["url"].as_str().map(|s| s.to_string()),
|
||||
favicon: main_config["favicon"].as_str().map(|s| s.to_string()),
|
||||
header: header_config,
|
||||
footer: footer_config,
|
||||
collections: collection_configs,
|
||||
pages: page_configs,
|
||||
base_path: path.to_path_buf(),
|
||||
};
|
||||
|
||||
Ok(site_config)
|
||||
}
|
||||
|
||||
/// Parse site configuration from a directory using the specified strategy
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing configuration files
|
||||
/// * `strategy` - Parsing strategy to use
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed site configuration or an error
|
||||
pub fn parse_site_config_with_strategy<P: AsRef<Path>>(path: P, strategy: ParsingStrategy) -> Result<SiteConfig> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check if the directory exists
|
||||
if !path.exists() {
|
||||
return Err(WebBuilderError::MissingDirectory(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// Check if the directory is a directory
|
||||
if !path.is_dir() {
|
||||
return Err(WebBuilderError::InvalidConfiguration(format!(
|
||||
"{:?} is not a directory",
|
||||
path
|
||||
)));
|
||||
}
|
||||
|
||||
// Create a basic site configuration
|
||||
let mut site_config = SiteConfig {
|
||||
name: "default".to_string(),
|
||||
title: "".to_string(),
|
||||
description: None,
|
||||
keywords: None,
|
||||
url: None,
|
||||
favicon: None,
|
||||
header: None,
|
||||
footer: None,
|
||||
collections: Vec::new(),
|
||||
pages: Vec::new(),
|
||||
base_path: path.to_path_buf(),
|
||||
};
|
||||
|
||||
// Parse main.hjson
|
||||
let main_path = path.join("main.hjson");
|
||||
if main_path.exists() {
|
||||
let main_config: Value = parse_file(main_path, strategy)?;
|
||||
|
||||
// Extract values from main.hjson
|
||||
if let Some(name) = main_config.get("name").and_then(|v| v.as_str()) {
|
||||
site_config.name = name.to_string();
|
||||
}
|
||||
if let Some(title) = main_config.get("title").and_then(|v| v.as_str()) {
|
||||
site_config.title = title.to_string();
|
||||
}
|
||||
if let Some(description) = main_config.get("description").and_then(|v| v.as_str()) {
|
||||
site_config.description = Some(description.to_string());
|
||||
}
|
||||
if let Some(url) = main_config.get("url").and_then(|v| v.as_str()) {
|
||||
site_config.url = Some(url.to_string());
|
||||
}
|
||||
if let Some(favicon) = main_config.get("favicon").and_then(|v| v.as_str()) {
|
||||
site_config.favicon = Some(favicon.to_string());
|
||||
}
|
||||
if let Some(keywords) = main_config.get("keywords").and_then(|v| v.as_array()) {
|
||||
let keywords_vec: Vec<String> = keywords
|
||||
.iter()
|
||||
.filter_map(|k| k.as_str().map(|s| s.to_string()))
|
||||
.collect();
|
||||
if !keywords_vec.is_empty() {
|
||||
site_config.keywords = Some(keywords_vec);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse header.hjson
|
||||
let header_path = path.join("header.hjson");
|
||||
if header_path.exists() {
|
||||
site_config.header = Some(parse_file(header_path, strategy)?);
|
||||
}
|
||||
|
||||
// Parse footer.hjson
|
||||
let footer_path = path.join("footer.hjson");
|
||||
if footer_path.exists() {
|
||||
site_config.footer = Some(parse_file(footer_path, strategy)?);
|
||||
}
|
||||
|
||||
// Parse collection.hjson
|
||||
let collection_path = path.join("collection.hjson");
|
||||
if collection_path.exists() {
|
||||
let collection_array: Vec<CollectionConfig> = parse_file(collection_path, strategy)?;
|
||||
|
||||
// Process each collection
|
||||
for mut collection in collection_array {
|
||||
// Convert web interface URL to Git URL if needed
|
||||
if let Some(url) = &collection.url {
|
||||
if url.contains("/src/branch/") {
|
||||
// This is a web interface URL, convert it to a Git URL
|
||||
let parts: Vec<&str> = url.split("/src/branch/").collect();
|
||||
if parts.len() == 2 {
|
||||
collection.url = Some(format!("{}.git", parts[0]));
|
||||
}
|
||||
}
|
||||
}
|
||||
site_config.collections.push(collection);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse pages directory
|
||||
let pages_path = path.join("pages");
|
||||
if pages_path.exists() && pages_path.is_dir() {
|
||||
for entry in fs::read_dir(pages_path)? {
|
||||
let entry = entry?;
|
||||
let entry_path = entry.path();
|
||||
|
||||
if entry_path.is_file() && entry_path.extension().map_or(false, |ext| ext == "hjson") {
|
||||
let pages_array: Vec<PageConfig> = parse_file(&entry_path, strategy)?;
|
||||
site_config.pages.extend(pages_array);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(site_config)
|
||||
}
|
||||
|
||||
/// Parse site configuration from a directory using the recommended strategy (Hjson)
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed site configuration or an error
|
||||
pub fn parse_site_config_recommended<P: AsRef<Path>>(path: P) -> Result<SiteConfig> {
|
||||
parse_site_config_with_strategy(path, ParsingStrategy::Hjson)
|
||||
}
|
||||
|
||||
/// Parse site configuration from a directory using the auto-detect strategy
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `path` - Path to the directory containing configuration files
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The parsed site configuration or an error
|
||||
pub fn parse_site_config_auto<P: AsRef<Path>>(path: P) -> Result<SiteConfig> {
|
||||
parse_site_config_with_strategy(path, ParsingStrategy::Auto)
|
||||
}
|
267
webbuilder/src/parser_test.rs
Normal file
267
webbuilder/src/parser_test.rs
Normal file
@ -0,0 +1,267 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::WebBuilderError;
|
||||
use crate::parser::{parse_site_config_with_strategy, ParsingStrategy};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_site(temp_dir: &TempDir) -> PathBuf {
|
||||
let site_dir = temp_dir.path().join("site");
|
||||
fs::create_dir(&site_dir).unwrap();
|
||||
|
||||
// Create main.hjson
|
||||
let main_hjson = r#"{
|
||||
# Main configuration
|
||||
"name": "test",
|
||||
"title": "Test Site",
|
||||
"description": "A test site",
|
||||
"url": "https://example.com",
|
||||
"favicon": "favicon.ico",
|
||||
"keywords": [
|
||||
"demo",
|
||||
"test",
|
||||
"example"
|
||||
]
|
||||
}"#;
|
||||
fs::write(site_dir.join("main.hjson"), main_hjson).unwrap();
|
||||
|
||||
// Create header.hjson
|
||||
let header_hjson = r#"{
|
||||
# Header configuration
|
||||
"title": "Test Site",
|
||||
"logo": {
|
||||
"src": "logo.png",
|
||||
"alt": "Logo"
|
||||
},
|
||||
"menu": [
|
||||
{
|
||||
"label": "Home",
|
||||
"link": "/"
|
||||
},
|
||||
{
|
||||
"label": "About",
|
||||
"link": "/about"
|
||||
}
|
||||
]
|
||||
}"#;
|
||||
fs::write(site_dir.join("header.hjson"), header_hjson).unwrap();
|
||||
|
||||
// Create collection.hjson
|
||||
let collection_hjson = r#"[
|
||||
{
|
||||
# First collection
|
||||
"name": "test",
|
||||
"url": "https://git.threefold.info/tfgrid/home.git",
|
||||
"description": "A test collection",
|
||||
"scan": true
|
||||
},
|
||||
{
|
||||
# Second collection
|
||||
"name": "test2",
|
||||
"url": "https://git.example.com/src/branch/main/test2",
|
||||
"description": "Another test collection"
|
||||
}
|
||||
]"#;
|
||||
fs::write(site_dir.join("collection.hjson"), collection_hjson).unwrap();
|
||||
|
||||
// Create pages directory
|
||||
let pages_dir = site_dir.join("pages");
|
||||
fs::create_dir(&pages_dir).unwrap();
|
||||
|
||||
// Create pages/pages.hjson
|
||||
let pages_hjson = r#"[
|
||||
{
|
||||
# Home page
|
||||
"name": "home",
|
||||
"title": "Home",
|
||||
"description": "Home page",
|
||||
"navpath": "/",
|
||||
"collection": "test",
|
||||
"draft": false
|
||||
},
|
||||
{
|
||||
# About page
|
||||
"name": "about",
|
||||
"title": "About",
|
||||
"description": "About page",
|
||||
"navpath": "/about",
|
||||
"collection": "test"
|
||||
}
|
||||
]"#;
|
||||
fs::write(pages_dir.join("pages.hjson"), pages_hjson).unwrap();
|
||||
|
||||
site_dir
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_hjson() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = create_test_site(&temp_dir);
|
||||
|
||||
let config = parse_site_config_with_strategy(&site_dir, ParsingStrategy::Hjson).unwrap();
|
||||
|
||||
// Check basic site info
|
||||
assert_eq!(config.name, "test");
|
||||
assert_eq!(config.title, "Test Site");
|
||||
assert_eq!(config.description, Some("A test site".to_string()));
|
||||
assert_eq!(config.url, Some("https://example.com".to_string()));
|
||||
assert_eq!(config.favicon, Some("favicon.ico".to_string()));
|
||||
assert_eq!(
|
||||
config.keywords,
|
||||
Some(vec![
|
||||
"demo".to_string(),
|
||||
"test".to_string(),
|
||||
"example".to_string()
|
||||
])
|
||||
);
|
||||
|
||||
// Check header
|
||||
assert!(config.header.is_some());
|
||||
let header = config.header.as_ref().unwrap();
|
||||
assert_eq!(header.title, Some("Test Site".to_string()));
|
||||
assert!(header.logo.is_some());
|
||||
let logo = header.logo.as_ref().unwrap();
|
||||
assert_eq!(logo.src, "logo.png");
|
||||
assert_eq!(logo.alt, Some("Logo".to_string()));
|
||||
|
||||
// Check collections
|
||||
assert_eq!(config.collections.len(), 2);
|
||||
|
||||
// First collection
|
||||
assert_eq!(config.collections[0].name, Some("test".to_string()));
|
||||
assert_eq!(
|
||||
config.collections[0].url,
|
||||
Some("https://git.threefold.info/tfgrid/home.git".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
config.collections[0].description,
|
||||
Some("A test collection".to_string())
|
||||
);
|
||||
assert_eq!(config.collections[0].scan, Some(true));
|
||||
|
||||
// Second collection (with URL conversion)
|
||||
assert_eq!(config.collections[1].name, Some("test2".to_string()));
|
||||
assert_eq!(
|
||||
config.collections[1].url,
|
||||
Some("https://git.example.com.git".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
config.collections[1].description,
|
||||
Some("Another test collection".to_string())
|
||||
);
|
||||
assert_eq!(config.collections[1].scan, None);
|
||||
|
||||
// Check pages
|
||||
assert_eq!(config.pages.len(), 2);
|
||||
|
||||
// First page
|
||||
assert_eq!(config.pages[0].name, "home");
|
||||
assert_eq!(config.pages[0].title, "Home");
|
||||
assert_eq!(config.pages[0].description, Some("Home page".to_string()));
|
||||
assert_eq!(config.pages[0].navpath, "/");
|
||||
assert_eq!(config.pages[0].collection, "test");
|
||||
assert_eq!(config.pages[0].draft, Some(false));
|
||||
|
||||
// Second page
|
||||
assert_eq!(config.pages[1].name, "about");
|
||||
assert_eq!(config.pages[1].title, "About");
|
||||
assert_eq!(config.pages[1].description, Some("About page".to_string()));
|
||||
assert_eq!(config.pages[1].navpath, "/about");
|
||||
assert_eq!(config.pages[1].collection, "test");
|
||||
assert_eq!(config.pages[1].draft, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_auto() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = create_test_site(&temp_dir);
|
||||
|
||||
let config = parse_site_config_with_strategy(&site_dir, ParsingStrategy::Auto).unwrap();
|
||||
|
||||
// Basic checks to ensure it worked
|
||||
assert_eq!(config.name, "test");
|
||||
assert_eq!(config.title, "Test Site");
|
||||
assert_eq!(config.collections.len(), 2);
|
||||
assert_eq!(config.pages.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_simple() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = temp_dir.path().join("site");
|
||||
fs::create_dir(&site_dir).unwrap();
|
||||
|
||||
// Create main.hjson in a format that the simple parser can handle
|
||||
let main_hjson = "name: test\ntitle: Test Site\ndescription: A test site";
|
||||
fs::write(site_dir.join("main.hjson"), main_hjson).unwrap();
|
||||
|
||||
let config = parse_site_config_with_strategy(&site_dir, ParsingStrategy::Simple).unwrap();
|
||||
|
||||
// Basic checks to ensure it worked
|
||||
assert_eq!(config.name, "test");
|
||||
assert_eq!(config.title, "Test Site");
|
||||
assert_eq!(config.description, Some("A test site".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_missing_directory() {
|
||||
let result = parse_site_config_with_strategy("/nonexistent/directory", ParsingStrategy::Hjson);
|
||||
assert!(matches!(result, Err(WebBuilderError::MissingDirectory(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_not_a_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("file.txt");
|
||||
fs::write(&file_path, "not a directory").unwrap();
|
||||
|
||||
let result = parse_site_config_with_strategy(&file_path, ParsingStrategy::Hjson);
|
||||
assert!(matches!(
|
||||
result,
|
||||
Err(WebBuilderError::InvalidConfiguration(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_minimal() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = temp_dir.path().join("site");
|
||||
fs::create_dir(&site_dir).unwrap();
|
||||
|
||||
// Create minimal main.hjson
|
||||
let main_hjson = r#"{ "name": "minimal", "title": "Minimal Site" }"#;
|
||||
fs::write(site_dir.join("main.hjson"), main_hjson).unwrap();
|
||||
|
||||
let config = parse_site_config_with_strategy(&site_dir, ParsingStrategy::Hjson).unwrap();
|
||||
|
||||
assert_eq!(config.name, "minimal");
|
||||
assert_eq!(config.title, "Minimal Site");
|
||||
assert_eq!(config.description, None);
|
||||
assert_eq!(config.url, None);
|
||||
assert_eq!(config.favicon, None);
|
||||
assert!(config.header.is_none());
|
||||
assert!(config.footer.is_none());
|
||||
assert!(config.collections.is_empty());
|
||||
assert!(config.pages.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_site_config_empty() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let site_dir = temp_dir.path().join("site");
|
||||
fs::create_dir(&site_dir).unwrap();
|
||||
|
||||
let config = parse_site_config_with_strategy(&site_dir, ParsingStrategy::Hjson).unwrap();
|
||||
|
||||
assert_eq!(config.name, "default");
|
||||
assert_eq!(config.title, "");
|
||||
assert_eq!(config.description, None);
|
||||
assert_eq!(config.url, None);
|
||||
assert_eq!(config.favicon, None);
|
||||
assert!(config.header.is_none());
|
||||
assert!(config.footer.is_none());
|
||||
assert!(config.collections.is_empty());
|
||||
assert!(config.pages.is_empty());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user