Repository: tejas-raskar/noted.md Branch: main Commit: daa63ba1537d Files: 22 Total size: 87.8 KB Directory structure: gitextract_3u9igm6l/ ├── .github/ │ ├── FUNDING.yml │ └── workflows/ │ └── release.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md └── src/ ├── ai_provider.rs ├── cli.rs ├── clients/ │ ├── claude_client.rs │ ├── gemini_client.rs │ ├── mod.rs │ ├── notion_client.rs │ ├── ollama_client.rs │ └── openai_client.rs ├── config.rs ├── error.rs ├── file_utils.rs ├── main.rs ├── notion/ │ ├── converter.rs │ └── mod.rs └── ui.rs ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/FUNDING.yml ================================================ github: tejas-raskar ================================================ FILE: .github/workflows/release.yml ================================================ name: Release on: release: types: - published jobs: build-and-upload: name: Build (${{ matrix.build }}) runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - build: linux os: ubuntu-latest target: x86_64-unknown-linux-musl - build: macos os: macos-latest target: x86_64-apple-darwin - build: macos-arm64 os: macos-latest target: aarch64-apple-darwin - build: windows-gnu os: windows-latest target: x86_64-pc-windows-msvc steps: - name: Clone repository uses: actions/checkout@v4 - name: Install Rust uses: dtolnay/rust-toolchain@stable with: targets: ${{ matrix.target }} - name: Install musl-tools (Linux) if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install -y musl-tools musl-dev libssl-dev pkg-config - name: Build binary run: cargo build --release --target ${{ matrix.target }} - name: Package the binary shell: bash run: | BINARY_NAME="notedmd" RELEASE_VERSION="${{ github.ref_name }}" ROOT_DIR="${BINARY_NAME}-${RELEASE_VERSION}-${{ matrix.target }}" mkdir -p "${ROOT_DIR}/bin" if [ "${{ matrix.os }}" = "windows-latest" ]; then SOURCE_FILE="target/${{ matrix.target }}/release/${BINARY_NAME}.exe" cp "$SOURCE_FILE" "${ROOT_DIR}/bin/" else SOURCE_FILE="target/${{ matrix.target }}/release/${BINARY_NAME}" cp "$SOURCE_FILE" "${ROOT_DIR}/bin/" chmod +x "${ROOT_DIR}/bin/${BINARY_NAME}" fi cp LICENSE README.md CHANGELOG.md "${ROOT_DIR}/" if [ "${{ matrix.os }}" = "windows-latest" ]; then ASSET_NAME="${ROOT_DIR}.zip" 7z a "$ASSET_NAME" "$ROOT_DIR" echo "ASSET=${ASSET_NAME}" >> $GITHUB_ENV else ASSET_NAME="${ROOT_DIR}.tar.gz" tar -czf "$ASSET_NAME" "$ROOT_DIR" echo "ASSET=${ASSET_NAME}" >> $GITHUB_ENV fi - name: Upload binary uses: softprops/action-gh-release@v2 with: files: | ${{ env.ASSET }} ================================================ FILE: .gitignore ================================================ /target ================================================ FILE: CHANGELOG.md ================================================ # Changelog ## [0.3.0] ### Fixed - Resolved an issue where Claude API throwed an error when using PDF files due to wrong type in request body. ### Added - Added support for Notion. You can now save your notes directly to a Notion database. ## [0.2.4] ### Added - Added support for all clients that are compatible with OpenAI's API. LM Studio for example. ## [0.2.3] ### Added - Added `--show`, `--edit`, and `--set-provider` subcommands to the `config` command for better configuration management. ### Changed - Updated the Claude model selection from a text input to a selection menu to improve user experience and prevent typos. ### Fixed - Resolved an issue where API errors in successful (`200 OK`) responses were ignored, preventing silent failures. - Corrected a bug where configuring the Ollama provider would erase all other existing provider settings. ## [0.2.2] ### Added - Added Claude support. ### Changed - Refactored the project to move the individual client files to a client subfolder. ## [0.2.1] ### Fixed - Fixed a bug where `active_provider` was not being set when using `--set-api-key` option. ## [0.2.0] ### Added - Added Ollama support - Added a `prompt` option to the `convert` command to override the default prompt. ## [0.1.1] ### Added - Ollama provider support in onboarding (configuration only) - Provider abstraction for AI client support - Unified configuration via `notedmd config` command ### Changed - Improved provider selection and configuration flow in onboarding process ## [0.1.0] ### Added - Initial release of `notedmd`. - `convert` command to process single files or directories of images and PDFs. - `config` command to manage the Gemini API key. - Interactive prompt to enter API key if not configured. - Progress bar for batch processing. ### Fixed - Progress bar rendering correctly during batch processing without being disrupted by log messages. - Removed redundant ASCII art display on every command run. ================================================ FILE: Cargo.toml ================================================ [package] name = "notedmd" version = "0.3.0" edition = "2024" description = "A command-line tool to convert handwritten notes into a clean and readable Markdown file." license = "MIT" repository = "https://github.com/tejas-raskar/noted.md" readme = "README.md" keywords = ["cli", "notes", "markdown", "gemini", "ollama"] categories = ["command-line-utilities"] [dependencies] base64 = "0.22.1" clap = { version = "4.5.40", features = ["derive", "env"] } tokio = { version = "1.45.1", features = ["full"] } reqwest = { version = "0.12", features = ["json"] } serde = { version = "1.0.219", features = ["derive"] } openssl = { version = "0.10", features = ["vendored"] } serde_json = "1.0.140" toml = "0.8.23" directories = "6.0.0" dialoguer = "0.11.0" colored = "3.0.0" indicatif = "0.17.11" async-trait = "0.1.88" thiserror = "2.0.12" comrak = "0.39.1" notion-client = "1.0.10" anyhow = "1.0.98" ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2025 Tejas Raskar Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================
          ███╗   ██╗ ██████╗ ████████╗███████╗██████╗    ███╗   ███╗██████╗
          ████╗  ██║██╔═══██╗╚══██╔══╝██╔════╝██╔══██╗   ████╗ ████║██╔══██╗
          ██╔██╗ ██║██║   ██║   ██║   █████╗  ██║  ██║   ██╔████╔██║██║  ██║
          ██║╚██╗██║██║   ██║   ██║   ██╔══╝  ██║  ██║   ██║╚██╔╝██║██║  ██║
          ██║ ╚████║╚██████╔╝   ██║   ███████╗██████╔╝██╗██║ ╚═╝ ██║██████╔╝
          ╚═╝  ╚═══╝ ╚═════╝    ╚═╝   ╚══════╝╚═════╝ ╚═╝╚═╝     ╚═╝╚═════╝
  

A command-line tool to convert handwritten notes into a clean and readable Markdown file.

Build Status Version Downloads License

--- `noted.md` is a CLI tool that uses LLMs to convert your handwritten text into markdown files. It's an interactive program that accepts pdfs, jpg, jpeg, png as an input and processes them accordingly. It can recognize mathematical equations too and can correctly format them in LaTeX. And if you have bunch of files to convert them at once, `noted.md` supports batch processing too! https://github.com/user-attachments/assets/5e2f4ab5-2043-4ea4-b95d-bf63e36ce9d9 ## Installation `noted.md` can be installed on macOS, Linux, and Windows. ### macOS & Linux (Recommended: Homebrew) For the easiest installation on macOS and Linux, use Homebrew: ```bash brew tap tejas-raskar/noted.md brew install notedmd ``` To update `noted.md` to the latest version: ```bash brew upgrade notedmd ``` ### Manual Download (Windows) For Windows, download the latest `.zip` archive from the [Releases page](https://github.com/tejas-raskar/noted.md/releases/latest). Extract the contents and add the `bin` directory to your system's PATH. ### Building from Source If you prefer to build from source, clone the repository and use Cargo: ```bash git clone https://github.com/tejas-raskar/noted.md.git cd noted.md cargo build --release # The executable will be in target/release/notedmd ``` ## Usage The typical workflow is: 1. **Configure your AI provider**: Use `notedmd config --edit` for a guided setup. 2. **Convert your files**: Use `notedmd convert ` to process your notes. ### Commands | Command | Description | | ----------------- | ------------------------------------------------------------------------------------ | | `notedmd convert` | Converts a file or all supported files in a directory into Markdown. | | `notedmd config` | Manages the AI provider configuration. Shows the current config if no flags are used. | --- ## Configuration ### Interactive Setup (Recommended) For first-time users, the interactive setup is the easiest way to get started. Run: ```bash notedmd config --edit ``` This will guide you through selecting an AI provider (Gemini, Claude, or Ollama) and entering the necessary credentials, such as API keys or server details. ### AI Providers You can choose between three AI providers. #### Gemini and Claude APIs You will need an API key from your chosen provider: - **Gemini API:** [Google AI Studio](https://aistudio.google.com/app/apikey) - **Claude API:** [Anthropic's website](https://console.anthropic.com/dashboard) #### Ollama Make sure Ollama is installed and running on your local machine. You can download it from [Ollama's website](https://ollama.com/). #### OpenAI API compatible clients Supports all clients that are compatible with the OpenAI API. [LM Studio](https://lmstudio.ai/) for example. --- ### Notion You can also save your converted notes directly to a Notion database. To do this, you'll need to create a Notion integration and provide the API key and database ID. **1. Create a Notion Integration:** Follow the [official Notion guide](https://developers.notion.com/docs/create-a-notion-integration#create-your-integration-in-notion) to create an integration and get your API key (Internal Integration Token). **2. Share the Database with the Integration:** For `noted.md` to be able to add pages to your database, you need to share it with the integration you created. - Go to your database in Notion. - Click the **•••** menu in the top-right corner. - Click **+ Add connections** and select your integration. **3. Get the Database ID:** The database ID is the long string of characters in the URL of your database. For example, if your database URL is `https://www.notion.so/my-workspace/1234567890abcdef1234567890abcdef?v=...`, your database ID is `1234567890abcdef1234567890abcdef`. You will be prompted to enter the API key and database ID when you run `notedmd config --edit` and choose to configure Notion. --- ### Managing Configuration via Flags You can also manage your configuration directly using flags. | Flag | Description | | -------------------------------- | --------------------------------------------------------------------------- | | `--set-provider ` | Set the active provider (`gemini`, `claude`, `ollama`). | | `--set-api-key ` | Set the API key for Gemini. | | `--set-claude-api-key ` | Set the API key for Claude. | | `--show` | Display the current configuration. | | `--show-path` | Show the path to your configuration file. | | `--edit` | Start the interactive configuration wizard. | **Examples:** - Set the active provider to Claude: ```bash notedmd config --set-provider claude ``` - Set your Gemini API key: ```bash notedmd config --set-api-key YOUR_GEMINI_API_KEY ``` --- ## Converting Files Once configured, you can convert your handwritten notes. | Flag | Description | | -------------------------------- | --------------------------------------------------------------------------- | | `-o`, `--output ` | Specify a directory to save the converted Markdown file(s). | | `-p`, `--prompt ` | Add a custom prompt to override the default instructions for the LLM. | | `--api-key ` | Temporarily override the stored API key for a single `convert` command. | | `-n`, `--notion` | Save the converted file to your configured Notion database. | **Examples:** - **Convert a single file**: The converted file will be saved in the same directory with a `.md` extension (e.g., `my_document.md`). ```bash notedmd convert my_document.pdf ``` - **Convert a file and save it to Notion**: ```bash notedmd convert my_notes.png --notion ``` - **Convert a file with a custom prompt**: ```bash notedmd convert my_notes.png --prompt "Transcribe this into a bulleted list." ``` - **Convert a file and save it to a different directory**: ```bash notedmd convert my_document.pdf --output ./markdown_notes/ ``` - **Convert all supported files in a directory**: ```bash notedmd convert ./my_project_files/ ``` - **Convert all files in a directory to a specific output directory**: ```bash notedmd convert ./my_project_files/ --output ./markdown_notes/ ``` ## Contributing Contributions are welcome! If you have a feature request, bug report, or want to contribute to the code, please feel free to open an issue or a pull request on our [GitHub repository](https://github.com/tejas-raskar/noted.md). ## License This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. ================================================ FILE: src/ai_provider.rs ================================================ use crate::{error::NotedError, file_utils::FileData}; use async_trait::async_trait; #[async_trait] pub trait AiProvider { async fn send_request(&self, file_data: FileData) -> Result; } ================================================ FILE: src/cli.rs ================================================ use clap::{Parser, Subcommand}; #[derive(Parser, Debug)] #[command( version, about = "A command-line tool to convert handwritten notes into clean and readable Markdown files", long_about = None)] pub struct Cli { #[command(subcommand)] pub command: Commands, } #[derive(Subcommand, Debug)] pub enum Commands { /// Convert files to Markdown format Convert { /// Path to a file or directory to convert #[arg(required = true)] path: String, /// Output directory to save converted files #[arg( short, long, help = "Directory where converted markdown files will be saved" )] output: Option, /// API key for conversion #[arg(long, env = "GEMINI_API_KEY", hide_env_values = true)] api_key: Option, /// Prompt the LLM #[arg(short, long, help = "Add a custom prompt to pass to the LLM")] prompt: Option, /// Notion Support #[arg(short, long, help = "Use Notion to store the generated output")] notion: bool, }, /// Configure notedmd settings Config { /// Set your Gemini API key #[arg(long, help = "Set your Gemini API key for future use")] set_api_key: Option, /// Set your Claude API key #[arg(long, help = "Set your Claude API key for future use")] set_claude_api_key: Option, /// Set active provider #[arg(long, help = "Set the active provider")] set_provider: Option, /// Show config file location #[arg(long, help = "Shows the location of your configuration file")] show_path: bool, /// Show config file #[arg(long, help = "Shows the content of your configuration")] show: bool, /// Trigger onboarding flow #[arg(long, help = "Edit the configuration file")] edit: bool, }, } ================================================ FILE: src/clients/claude_client.rs ================================================ use crate::ai_provider::AiProvider; use crate::error::NotedError; use crate::file_utils::FileData; use async_trait::async_trait; use reqwest::{Client, StatusCode}; use serde::{Deserialize, Serialize}; // Request structs #[derive(Serialize)] struct ClaudeRequest { model: String, max_tokens: u32, messages: Vec, } #[derive(Serialize)] struct Message { role: String, content: Vec, } #[derive(Serialize)] struct Content { #[serde(rename = "type")] content_type: String, #[serde(skip_serializing_if = "Option::is_none")] text: Option, #[serde(skip_serializing_if = "Option::is_none")] source: Option, } #[derive(Serialize)] struct Source { #[serde(rename = "type")] source_type: String, media_type: String, data: String, } // Response structs #[derive(Deserialize, Debug)] pub struct ClaudeResponse { pub content: Vec, #[serde(default)] pub error: Option, } #[derive(Deserialize, Debug)] pub struct ClaudeError { pub message: String, } #[derive(Deserialize, Debug)] pub struct ContentResponse { pub text: String, } // Client pub struct ClaudeClient { client: Client, api_key: String, model: String, prompt: Option, } impl ClaudeClient { pub fn new(api_key: String, model: String, prompt: Option) -> Self { Self { client: Client::new(), api_key, model, prompt, } } } #[async_trait] impl AiProvider for ClaudeClient { async fn send_request(&self, file_data: FileData) -> Result { let url = "https://api.anthropic.com/v1/messages".to_string(); let prompt = if let Some(custom_prompt) = &self.prompt { custom_prompt.clone() } else { "Take the handwritten notes from this image and convert them into a clean, well-structured Markdown file. Pay attention to headings, lists, and any other formatting. Resemble the hierarchy. Use latex for mathematical equations. For latex use the $$ syntax instead of ```latex. Do not skip anything from the original text. The output should be suitable for use in Obsidian. Just give me the markdown, do not include other text in the response apart from the markdown file. No explanation on how the changes were made is needed".to_string() }; let file_type = if file_data.mime_type == "application/pdf" { "document".to_string() } else { "image".to_string() }; let request_body = ClaudeRequest { model: self.model.clone(), max_tokens: 4096, messages: vec![Message { role: "user".to_string(), content: vec![ Content { content_type: file_type, text: None, source: Some(Source { source_type: "base64".to_string(), media_type: file_data.mime_type, data: file_data.encoded_data, }), }, Content { content_type: "text".to_string(), text: Some(prompt), source: None, }, ], }], }; let response = self .client .post(&url) .header("x-api-key", &self.api_key) .header("anthropic-version", "2023-06-01") .json(&request_body) .send() .await?; let status = response.status(); let response_body = response.text().await?; if status != StatusCode::OK { if status == StatusCode::UNAUTHORIZED { return Err(NotedError::InvalidApiKey); } let error_response: Result = serde_json::from_str(&response_body); if let Ok(err_resp) = error_response { if let Some(error) = err_resp.error { return Err(NotedError::ApiError(error.message)); } } return Err(NotedError::ApiError(format!( "Received status code: {}", status ))); } let claude_response: ClaudeResponse = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; if let Some(error) = claude_response.error { return Err(NotedError::ApiError(error.message)); } let markdown_text = claude_response .content .first() .map(|c| c.text.as_str()) .unwrap_or(""); let cleaned_markdown = markdown_text .trim_start_matches("```markdown\n") .trim_end_matches("```"); Ok(cleaned_markdown.to_string()) } } ================================================ FILE: src/clients/gemini_client.rs ================================================ use crate::ai_provider::AiProvider; use crate::error::NotedError; use crate::file_utils::FileData; use async_trait::async_trait; use reqwest::{Client, StatusCode}; use serde::{Deserialize, Serialize}; // Request structs #[derive(Serialize)] struct GeminiRequest { contents: Vec, } #[derive(Serialize)] struct Content { parts: Vec, } #[derive(Serialize)] struct Part { #[serde(skip_serializing_if = "Option::is_none")] text: Option, #[serde(skip_serializing_if = "Option::is_none")] inline_data: Option, } #[derive(Serialize)] struct InlineData { #[serde(rename = "mimeType")] mime_type: String, data: String, } // Response structs #[derive(Deserialize, Debug)] pub struct GeminiResponse { pub candidates: Option>, #[serde(default)] pub error: Option, } #[derive(Deserialize, Debug)] pub struct GeminiError { pub message: String, } #[derive(Deserialize, Debug)] pub struct Candidate { pub content: ContentResponse, } #[derive(Deserialize, Debug)] pub struct ContentResponse { pub parts: Vec, } #[derive(Deserialize, Debug)] pub struct PartResponse { pub text: String, } // Client pub struct GeminiClient { client: Client, api_key: String, prompt: Option, } impl GeminiClient { pub fn new(api_key: String, prompt: Option) -> Self { Self { client: Client::new(), api_key, prompt, } } } #[async_trait] impl AiProvider for GeminiClient { async fn send_request(&self, file_data: FileData) -> Result { let url = format!( "https://generativelanguage.googleapis.com/v1beta/models/gemma-3-27b-it:generateContent?key={}", self.api_key ); let prompt = if let Some(custom_prompt) = &self.prompt { custom_prompt.clone() } else { "Take the handwritten notes from this image and convert them into a clean, well-structured Markdown file. Pay attention to headings, lists, and any other formatting. Resemble the hierarchy. Use latex for mathematical equations. For latex use the $$ syntax instead of ```latex. Do not skip anything from the original text. The output should be suitable for use in Obsidian. Just give me the markdown, do not include other text in the response apart from the markdown file. No explanation on how the changes were made is needed".to_string() }; let request_body = GeminiRequest { contents: vec![Content { parts: vec![ Part { text: Some(prompt), inline_data: None, }, Part { text: None, inline_data: Some(InlineData { mime_type: file_data.mime_type, data: file_data.encoded_data, }), }, ], }], }; let response = self.client.post(&url).json(&request_body).send().await?; let status = response.status(); let response_body = response.text().await?; if status != StatusCode::OK { if status == StatusCode::UNAUTHORIZED { return Err(NotedError::InvalidApiKey); } let error_response: Result = serde_json::from_str(&response_body); if let Ok(err_resp) = error_response { if let Some(error) = err_resp.error { return Err(NotedError::ApiError(error.message)); } } return Err(NotedError::ApiError(format!( "Received status code: {}", status ))); } let gemini_response: GeminiResponse = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; if let Some(error) = gemini_response.error { return Err(NotedError::ApiError(error.message)); } let markdown_text = gemini_response .candidates .as_ref() .and_then(|candidates| candidates.first()) .and_then(|candidate| candidate.content.parts.first()) .map(|part| part.text.as_str()) .unwrap_or(""); let cleaned_markdown = markdown_text .trim_start_matches("```markdown\n") .trim_end_matches("```"); Ok(cleaned_markdown.to_string()) } } ================================================ FILE: src/clients/mod.rs ================================================ pub mod claude_client; pub mod gemini_client; pub mod notion_client; pub mod ollama_client; pub mod openai_client; ================================================ FILE: src/clients/notion_client.rs ================================================ use std::collections::HashMap; use anyhow::Result; use colored::Colorize; use comrak::Arena; use notion_client::objects::block::Block; use reqwest::Client; use serde::{Deserialize, Serialize}; use crate::{config, error::NotedError, notion::converter}; // Request structs #[derive(Serialize)] pub struct NotionRequest { pub parent: Parent, pub properties: serde_json::Map, pub children: Vec, } #[derive(Serialize)] pub struct Parent { pub database_id: String, } // Response Struct #[derive(Deserialize, Debug)] pub struct NotionResponse { #[serde(rename = "id")] pub _id: String, pub url: String, } #[derive(Deserialize, Debug)] pub struct NotionDatabase { pub properties: HashMap, } #[derive(Deserialize, Debug)] pub struct DatabaseProperty { #[serde(rename = "id")] pub _id: String, pub name: String, #[serde(flatten)] pub type_specific_config: PropertyType, } #[derive(Deserialize, Debug)] #[serde(tag = "type")] #[serde(rename_all = "snake_case")] pub enum PropertyType { Title(EmptyStruct), RichText(EmptyStruct), Number(EmptyStruct), Select { select: SelectStruct }, MultiSelect { multi_select: SelectStruct }, Date(EmptyStruct), Checkbox(EmptyStruct), People(EmptyStruct), Files(EmptyStruct), Url(EmptyStruct), Email(EmptyStruct), CreatedTime(EmptyStruct), CreatedBy(EmptyStruct), LastEditedTime(EmptyStruct), LastEditedBy(EmptyStruct), Status { status: SelectStruct }, Formula(EmptyStruct), Relation(EmptyStruct), Rollup(EmptyStruct), PhoneNumber(EmptyStruct), Button(EmptyStruct), UniqueId(EmptyStruct), Verification(EmptyStruct), } #[derive(Deserialize, Debug)] pub struct SelectStruct { pub options: Vec, } #[derive(Deserialize, Debug, Clone)] pub struct DatabaseSelectOption { #[serde(rename = "id")] pub _id: String, pub name: String, #[serde(rename = "color")] pub _color: String, } #[derive(Deserialize, Debug)] pub struct NumberStruct { pub _number: NumberFormat, } #[derive(Deserialize, Debug)] pub struct NumberFormat { pub _format: String, } #[derive(Deserialize, Debug)] pub struct EmptyStruct {} #[derive(Deserialize, Debug)] pub struct NotionError { pub message: String, } // Client pub struct NotionClient { client: Client, api_key: String, database_id: String, } impl NotionClient { pub fn new(api_key: String, database_id: String) -> Self { Self { client: Client::new(), api_key, database_id, } } pub async fn get_database_schema(&self) -> Result { let url = format!("https://api.notion.com/v1/databases/{}", self.database_id); let response = self .client .get(url) .header("Authorization", format!("Bearer {}", self.api_key)) .header("Notion-Version", "2022-06-28") .send() .await?; let status = response.status(); let response_body = response.text().await?; if status.is_success() { let notion_database: NotionDatabase = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; Ok(notion_database) } else { let error_response: NotionError = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; Err(NotedError::ApiError(format!( "Notion API Error ({}): {}", status, error_response.message.red() ))) } } pub async fn create_notion_page( &self, title: &str, title_property_name: &str, properties: &[config::NotionPropertyConfig], markdown_content: &str, ) -> Result { let url = "https://api.notion.com/v1/pages"; let arena = Arena::new(); let blocks = converter::Converter::run(&markdown_content, &arena) .map_err(|e| NotedError::ApiError(e.to_string()))?; let mut props_map = serde_json::Map::new(); props_map.insert( title_property_name.to_string(), serde_json::json!( { "title": [ { "text":{ "content": title } } ] }), ); for prop_config in properties { let prop_name = &prop_config.name; let prop_type = &prop_config.property_type; let prop_value = &prop_config.default_value; let notion_property_value = match prop_type.as_str() { "multi_select" => { if let Some(arr) = prop_value.as_array() { let options: Vec<_> = arr .iter() .map(|val| serde_json::json!({"name": val})) .collect(); serde_json::json!({"multi_select": options}) } else { continue; } } "select" => serde_json::json!({ "select": { "name": prop_value } }), "rich_text" => serde_json::json!({ "rich_text": [ { "type": "text", "text": { "content": prop_value } } ] }), "number" => serde_json::json!({ "number": prop_value }), "date" => serde_json::json!({ "date": { "start": prop_value } }), "checkbox" => serde_json::json!({ "checkbox": prop_value }), _ => continue, }; props_map.insert(prop_name.clone(), notion_property_value); } let request_body = NotionRequest { parent: Parent { database_id: self.database_id.clone(), }, properties: props_map, children: blocks, }; let response = self .client .post(url) .header("Authorization", format!("Bearer {}", self.api_key)) .header("Notion-Version", "2022-06-28") .json(&request_body) .send() .await?; let status = response.status(); let response_body = response.text().await?; if status.is_success() { let notion_reponse: NotionResponse = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; Ok(notion_reponse) } else { let error_response: NotionError = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; Err(NotedError::ApiError(format!( "Notion API Error ({}): {}", status, error_response.message ))) } } } ================================================ FILE: src/clients/ollama_client.rs ================================================ use async_trait::async_trait; use reqwest::{Client, StatusCode}; use serde::{Deserialize, Serialize}; use crate::{ai_provider::AiProvider, error::NotedError, file_utils::FileData}; // Request struct #[derive(Serialize)] struct OllamaRequest { model: String, prompt: String, images: Vec, stream: bool, } // Response struct #[derive(Deserialize, Debug)] pub struct OllamaResponse { pub response: String, #[serde(default)] pub error: Option, } // Client struct pub struct OllamaClient { client: Client, url: String, model: String, prompt: Option, } impl OllamaClient { pub fn new(url: String, model: String, prompt: Option) -> Self { Self { client: Client::new(), url, model, prompt, } } } #[async_trait] impl AiProvider for OllamaClient { async fn send_request(&self, file_data: FileData) -> Result { let url = format!("{}/api/generate", self.url); let prompt = if let Some(custom_prompt) = &self.prompt { custom_prompt.clone() } else { "The user has provided an image of handwritten notes. Your task is to accurately transcribe these notes into a well-structured Markdown file. Preserve the original hierarchy, including headings and lists. Use LaTeX for any mathematical equations that appear in the notes. The output should only be the markdown content.".to_string() }; let request_body = OllamaRequest { model: self.model.clone(), prompt, images: vec![file_data.encoded_data], stream: false, }; let response = self.client.post(&url).json(&request_body).send().await?; let status = response.status(); let response_body = response.text().await?; if status != StatusCode::OK { let error_response: Result = serde_json::from_str(&response_body); if let Ok(err_resp) = error_response { if let Some(error) = err_resp.error { return Err(NotedError::ApiError(error)); } } return Err(NotedError::ApiError(format!( "Received status code: {}", status ))); } let ollama_response: OllamaResponse = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; if let Some(error) = ollama_response.error { return Err(NotedError::ApiError(error)); } let cleaned_markdown = ollama_response .response .trim_start_matches("```markdown\n") .trim_end_matches("```"); Ok(cleaned_markdown.to_string()) } } ================================================ FILE: src/clients/openai_client.rs ================================================ use crate::{ai_provider::AiProvider, error::NotedError, file_utils::FileData}; use async_trait::async_trait; use reqwest::{Client, StatusCode}; use serde::{Deserialize, Serialize}; // Request structs #[derive(Serialize)] struct OpenAIRequest { model: String, messages: Vec, } #[derive(Serialize)] struct Message { role: String, content: Vec, } #[derive(Serialize)] struct Content { #[serde(rename = "type")] content_type: String, #[serde(skip_serializing_if = "Option::is_none")] text: Option, #[serde(skip_serializing_if = "Option::is_none")] image_url: Option, } #[derive(Serialize)] struct Image { url: String, } // Response structs #[derive(Deserialize, Debug)] pub struct OpenAIResponse { pub choices: Vec, #[serde(default)] pub error: Option, } #[derive(Deserialize, Debug)] pub struct OpenAIError { pub message: String, } #[derive(Deserialize, Debug)] pub struct Choice { pub message: ResponseMessage, } #[derive(Deserialize, Debug)] pub struct ResponseMessage { pub content: String, } //Client pub struct OpenAIClient { client: Client, url: String, model: String, api_key: Option, prompt: Option, } impl OpenAIClient { pub fn new( url: String, model: String, api_key: Option, prompt: Option, ) -> Self { Self { client: Client::new(), url, model, api_key, prompt, } } } #[async_trait] impl AiProvider for OpenAIClient { async fn send_request(&self, file_data: FileData) -> Result { let url = format!("{}/v1/chat/completions", self.url); let prompt = if let Some(custom_prompt) = &self.prompt { custom_prompt.clone() } else { "The user has provided an image of handwritten notes. Your task is to accurately transcribe these notes into a well-structured Markdown file. Preserve the original hierarchy, including headings and lists. Use LaTeX for any mathematical equations that appear in the notes. The output should only be the markdown content.".to_string() }; let image_url = format!( "data:{};base64,{}", file_data.mime_type, file_data.encoded_data ); let request_body = OpenAIRequest { model: self.model.clone(), messages: vec![Message { role: "user".to_string(), content: vec![ Content { content_type: "text".to_string(), text: Some(prompt), image_url: None, }, Content { content_type: "image_url".to_string(), text: None, image_url: Some(Image { url: image_url }), }, ], }], }; let mut request = self.client.post(&url); if let Some(api_key) = &self.api_key { request = request.header("Authorization", format!("Bearer {}", api_key)); } let response = request.json(&request_body).send().await?; let status = response.status(); let response_body = response.text().await?; if status != StatusCode::OK { let error_response: Result = serde_json::from_str(&response_body); if let Ok(err_resp) = error_response { if let Some(error) = err_resp.error { return Err(NotedError::ApiError(error.message)); } } return Err(NotedError::ApiError(format!( "Received status code: {}", status ))); } let openai_response: OpenAIResponse = serde_json::from_str(&response_body) .map_err(|e| NotedError::ResponseDecodeError(e.to_string()))?; if let Some(error) = openai_response.error { return Err(NotedError::ApiError(error.message)); } let markdown_text = openai_response .choices .first() .map(|c| c.message.content.as_str()) .unwrap_or(""); let cleaned_markdown = markdown_text .trim_start_matches("```markdown\n") .trim_end_matches("```"); Ok(cleaned_markdown.to_string()) } } ================================================ FILE: src/config.rs ================================================ use crate::error::NotedError; use directories::ProjectDirs; use serde::{Deserialize, Serialize}; use std::{fs, path::PathBuf}; #[derive(Serialize, Deserialize, Debug, Default)] pub struct Config { pub active_provider: Option, pub gemini: Option, pub ollama: Option, pub claude: Option, pub openai: Option, pub notion: Option, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct NotionConfig { pub api_key: String, pub database_id: String, #[serde(default)] pub title_property_name: String, #[serde(default)] pub properties: Vec, } #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct NotionPropertyConfig { pub name: String, pub property_type: String, pub default_value: serde_json::Value, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct ClaudeConfig { pub api_key: String, pub model: String, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct GeminiConfig { pub api_key: String, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct OllamaConfig { pub url: String, pub model: String, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct OpenAIConfig { pub url: String, pub model: String, pub api_key: Option, } pub fn get_config_path() -> Option { ProjectDirs::from("com", "company", "notedmd").map(|dirs| { let config_dir = dirs.config_dir(); if !config_dir.exists() { fs::create_dir_all(config_dir).ok(); } config_dir.join("config.toml") }) } impl Config { pub fn load() -> Result { if let Some(config_path) = get_config_path() { if config_path.exists() { let content = fs::read_to_string(config_path)?; return Ok(toml::from_str(&content)?); } } Ok(Self::default()) } pub fn save(&self) -> Result<(), NotedError> { if let Some(config_path) = get_config_path() { let toml_string = toml::to_string_pretty(self)?; fs::write(config_path, toml_string)?; } Ok(()) } } ================================================ FILE: src/error.rs ================================================ use thiserror::Error; #[derive(Debug, Error)] pub enum NotedError { #[error(" Configuration file not found. Please run 'notedmd config --edit' to set it up.")] ConfigNotFound, #[error(" Failed to save configuration: {0}")] ConfigSaveError(#[from] toml::ser::Error), #[error(" Failed to read configuration: {0}")] ConfigReadError(#[from] toml::de::Error), #[error(" I/O error: {0}")] IoError(#[from] std::io::Error), #[error(" Network request failed: {0}")] NetworkError(#[from] reqwest::Error), #[error(" API key is invalid or missing. Please check your configuration.")] InvalidApiKey, #[error(" The AI provider returned an error: {0}")] ApiError(String), #[error(" Failed to decode API response: {0}")] ResponseDecodeError(String), #[error(" Could not determine the file name for the path: {0}")] FileNameError(String), #[error(" File type not supported: {0}")] UnsupportedFileType(String), #[error(" Ollama is not configured properly. Please run 'notedmd config --edit' to set it up.")] OllamaNotConfigured, #[error(" Gemini is not configured properly. Please run 'notedmd config --edit' to set it up.")] GeminiNotConfigured, #[error(" Claude is not configured properly. Please run 'notedmd config --edit' to set it up.")] ClaudeNotConfigured, #[error(" Notion is not configured properly. Please run 'notedmd config --edit' to set it up.")] NotionNotConfigured, #[error( " OpenAI/LM Studio is not configured properly. Please run 'notedmd config --edit' to set it up." )] OpenAINotConfigured, #[error(" No active provider. Please run 'notedmd config --edit' to set a provider.")] NoActiveProvider, #[error(" Dialoguer error: {0}")] DialoguerError(#[from] dialoguer::Error), } ================================================ FILE: src/file_utils.rs ================================================ use crate::error::NotedError; use base64::{Engine, engine::general_purpose}; use std::{fs, path::Path}; pub struct FileData { pub encoded_data: String, pub mime_type: String, } pub fn process_file(file_path: &str) -> Result { let data = fs::read(file_path)?; let encoded_data: String = general_purpose::STANDARD.encode(&data); let mime_type = get_file_mime_type(file_path)?; Ok(FileData { encoded_data, mime_type, }) } pub fn get_file_mime_type(file_path: &str) -> Result { let file_extension = Path::new(file_path) .extension() .and_then(|ext| ext.to_str()); match file_extension { Some("png") => Ok("image/png".to_string()), Some("pdf") => Ok("application/pdf".to_string()), Some("jpg") => Ok("image/jpeg".to_string()), Some("jpeg") => Ok("image/jpeg".to_string()), Some(ext) => Err(NotedError::UnsupportedFileType(ext.to_string())), None => Err(NotedError::UnsupportedFileType("No extension".to_string())), } } ================================================ FILE: src/main.rs ================================================ mod ai_provider; mod cli; mod clients; mod config; mod error; mod file_utils; mod notion; mod ui; use ai_provider::AiProvider; use clap::Parser; use cli::{Cli, Commands}; use colored::*; use config::{ClaudeConfig, Config, GeminiConfig, OllamaConfig}; use dialoguer::Confirm; use dialoguer::Input; use dialoguer::MultiSelect; use dialoguer::Select; use dialoguer::{Password, theme::ColorfulTheme}; use error::NotedError; use indicatif::ProgressBar; use indicatif::ProgressStyle; use crate::clients::claude_client::ClaudeClient; use crate::clients::gemini_client::GeminiClient; use crate::clients::notion_client::NotionClient; use crate::clients::notion_client::PropertyType; use crate::clients::ollama_client::OllamaClient; use crate::clients::openai_client::OpenAIClient; use crate::config::NotionConfig; use crate::config::OpenAIConfig; use std::path::Path; use ui::{ascii_art, print_clean_config}; use crate::config::get_config_path; async fn process_and_save_file( file_path: &str, client: &dyn AiProvider, output_dir: Option<&str>, progress_bar: &ProgressBar, notion_client: Option<&NotionClient>, notion_config: Option<&NotionConfig>, ) -> Result<(), NotedError> { let path = Path::new(file_path); let file_name = match path.file_name() { Some(name) => name, None => { return Err(NotedError::FileNameError(file_path.to_string())); } }; progress_bar.println(format!( "\n{}", format!("Processing file: {:#?}", file_name).bold() )); let file_data = file_utils::process_file(file_path)?; progress_bar.println(format!( "{} {}", "✔".green(), "File read successfully.".green() )); progress_bar.set_message(format!("{}", "Sending to your AI model...".yellow())); let markdown = client.send_request(file_data).await?; progress_bar.println(format!("{} {}", "✔".green(), "Received response.".green())); let output_path = match output_dir { Some(dir) => { let dir_path = Path::new(dir); if !dir_path.exists() { std::fs::create_dir_all(dir_path)?; } let final_path = dir_path.join(file_name); final_path .with_extension("md") .to_string_lossy() .into_owned() } None => path.with_extension("md").to_string_lossy().into_owned(), }; match std::fs::write(&output_path, &markdown) { Ok(_) => { progress_bar.println(format!( "{} {}", "✔".green(), format!("Markdown saved to '{}'", output_path.cyan()).green() )); if let (Some(client), Some(config)) = (notion_client, notion_config) { match client .create_notion_page( file_name.to_string_lossy().into_owned().as_str(), &config.title_property_name, &config.properties, &markdown, ) .await { Ok(page) => { progress_bar.println(format!( "{} {}", "✔".green(), format!("Notion page created at '{}'", page.url.cyan()).green() )); } Err(e) => { return Err(e); } } }; Ok(()) } Err(e) => { progress_bar.println(format!( "{} {}", "✖".red(), format!("Failed to save file to '{}'. Error: {}", &output_path, e).red() )); Err(e.into()) } } } async fn run() -> Result<(), NotedError> { let args = Cli::parse(); match args.command { Commands::Config { set_api_key, set_claude_api_key, set_provider, show_path, show, edit, } => { if show_path { if let Some(config_path) = config::get_config_path() { if config_path.exists() { println!("Config saved in {:?}", config_path); } else { return Err(NotedError::ConfigNotFound); } } } if show { if let Some(config_path) = config::get_config_path() { if config_path.exists() { let config = Config::load()?; print_clean_config(config); } else { return Err(NotedError::ConfigNotFound); } } } if let Some(ref key) = set_api_key { let mut config = Config::load()?; config.active_provider = Some("gemini".to_string()); config.gemini = Some(config::GeminiConfig { api_key: key.to_string(), }); config.save()?; println!("Config saved successfully."); } if let Some(ref key) = set_claude_api_key { let mut config = Config::load()?; config.active_provider = Some("claude".to_string()); let model = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Claude model") .default("claude-3-opus-20240229".to_string()) .interact_text()?; config.claude = Some(config::ClaudeConfig { api_key: key.to_string(), model, }); config.save()?; println!("Config saved successfully."); } if edit { ascii_art(); println!( "{}\n", "Welcome to noted.md! Let's set up your AI provider.".bold() ); let providers = vec![ "Gemini API (Cloud-based, requires API key)", "Claude API (Cloud-based, requires API key)", "Ollama (Local, requires Ollama to be set up)", "OpenAI Compatible API (Cloud/Local, works with LM Studio)", ]; let selected_provider = Select::with_theme(&ColorfulTheme::default()) .with_prompt("Choose your AI provider") .items(&providers) .default(0) .interact()?; match selected_provider { 0 => { let mut config = Config::load()?; let api_key = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your Gemini API key: ") .interact()?; config.active_provider = Some("gemini".to_string()); config.gemini = Some(GeminiConfig { api_key }); config.save()?; println!("{}", "Config saved successfully.".green()); } 1 => { let mut config = Config::load()?; let api_key = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your Claude API key: ") .interact()?; config.active_provider = Some("claude".to_string()); let anthropic_models = vec![ " claude-opus-4-20250514", " claude-sonnet-4-20250514", " claude-3-7-sonnet-20250219", " claude-3-5-haiku-20241022", " claude-3-5-sonnet-20241022", " Other", ]; let selected_model = Select::with_theme(&ColorfulTheme::default()) .with_prompt("Choose your Claude model:") .items(&anthropic_models) .default(0) .interact()?; let model = if selected_model == anthropic_models.len() - 1 { Input::with_theme(&ColorfulTheme::default()) .with_prompt("Enter the custom model name:") .interact_text()? } else { anthropic_models[selected_model].trim().to_string() }; config.claude = Some(ClaudeConfig { api_key, model }); config.save()?; println!("{}", "Config saved successfully.".green()); } 2 => { let url = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Ollama server url") .default("http://localhost:11434".to_string()) .interact_text()?; let model = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Ollama model") .default("gemma3:27b".to_string()) .interact_text()?; let mut config = Config::load()?; config.active_provider = Some("ollama".to_string()); config.ollama = Some(OllamaConfig { url, model }); config.save()?; println!("{}", "Config saved successfully.".green()); } 3 => { let url = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Server url") .default("http://localhost:1234".to_string()) .interact_text()?; let model = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Model") .default("gemma3:27b".to_string()) .interact_text()?; let api_key_str = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your API key (Optional, press Enter if none): ") .allow_empty_password(true) .interact()?; let api_key = if api_key_str.is_empty() { None } else { Some(api_key_str) }; let mut config = Config::load()?; config.active_provider = Some("openai".to_string()); config.openai = Some(OpenAIConfig { url, model, api_key, }); config.save()?; println!("{}", "Config saved successfully.".green()); } _ => unreachable!(), } // notion let is_notion = Confirm::with_theme(&ColorfulTheme::default()) .with_prompt("Do you want to configure Notion to save your notes there?") .interact()?; if is_notion { let api_key = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your Notion API key: ") .interact()?; let database_id = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your Notion Database ID: ") .interact()?; let spinner = ProgressBar::new_spinner(); spinner.set_style( ProgressStyle::default_spinner() .template("{spinner:.cyan} {msg}") .unwrap(), ); spinner.set_message("Fetching Notion database schema..."); spinner.enable_steady_tick(std::time::Duration::from_millis(100)); let client = NotionClient::new(api_key.clone(), database_id.clone()); let schema_result = client.get_database_schema().await; spinner.finish_and_clear(); match schema_result { Ok(schema) => { let title_property_name = schema .properties .values() .find(|prop| { matches!(prop.type_specific_config, PropertyType::Title(_)) }) .map(|prop| prop.name.clone()) .ok_or_else(|| { NotedError::ApiError(format!( "{}", "Database has no title property".red() )) })?; let properties: Vec<_> = schema .properties .into_iter() .filter(|(_name, property)| match &property.type_specific_config { PropertyType::Select { .. } | PropertyType::MultiSelect { .. } | PropertyType::RichText(_) | PropertyType::Number(_) | PropertyType::Date(_) | PropertyType::Checkbox(_) => true, _ => false, }) .collect(); let mut default_properties = Vec::new(); if properties.is_empty() { println!( "{}", "No user configurable properties found in this database." .yellow() ); } else { println!("Enter the default values for the following properties: "); } for (name, property) in &properties { match &property.type_specific_config { PropertyType::MultiSelect { multi_select } => { let options: Vec<_> = multi_select .options .iter() .map(|option| option.name.clone()) .collect(); let selections = MultiSelect::with_theme(&ColorfulTheme::default()) .with_prompt(format!( "Select default options for '{}' (press Space to select and Enter to confirm)", name )) .items(&options) .interact()?; let selected_names: Vec = selections .iter() .map(|&i| options[i].clone()) .collect(); let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "multi_select".to_string(), default_value: serde_json::json!(selected_names), }; default_properties.push(prop_config); } PropertyType::Select { select } => { let options: Vec<_> = select .options .iter() .map(|option| option.name.clone()) .collect(); let selection = Select::with_theme(&ColorfulTheme::default()) .with_prompt(format!("Select default option for '{}' (Select and Enter to confirm)", name)) .items(&options) .interact()?; let selected_name = options[selection].clone(); let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "select".to_string(), default_value: serde_json::json!(selected_name), }; default_properties.push(prop_config); } PropertyType::RichText(_) => { let default_value: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt(format!("Default text for '{}'", name)) .interact_text()?; let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "rich_text".to_string(), default_value: serde_json::json!(default_value), }; default_properties.push(prop_config); } PropertyType::Checkbox(_) => { let checked = Confirm::with_theme(&ColorfulTheme::default()) .with_prompt(format!( "Should '{}' be checked by default?", name )) .interact()?; let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "checkbox".to_string(), default_value: serde_json::json!(checked), }; default_properties.push(prop_config); } PropertyType::Date(_) => { let default_value: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt(format!( "Default date for '{}' (YYYY-MM-DD)", name )) .interact_text()?; let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "date".to_string(), default_value: serde_json::json!(default_value), }; default_properties.push(prop_config); } PropertyType::Number(_) => { let default_value: f64 = Input::with_theme(&ColorfulTheme::default()) .with_prompt(format!( "Default number for '{}'", name )) .interact()?; let prop_config = config::NotionPropertyConfig { name: name.clone(), property_type: "number".to_string(), default_value: serde_json::json!(default_value), }; default_properties.push(prop_config); } _ => { println!( "{} Property '{}' is not supported for default configuration.", "✖".red(), name ); } } } let mut config = Config::load()?; config.notion = Some(NotionConfig { api_key, database_id, title_property_name, properties: default_properties, }); config.save()?; } Err(e) => eprintln!("{}", e), } } println!( "{}", "You can now run 'notedmd convert ' to convert your files.".cyan() ); } if let Some(ref new_provider) = set_provider { if let Some(config_path) = get_config_path() { if !config_path.exists() { return Err(NotedError::ConfigNotFound); } let mut config = Config::load()?; let new_provider_str = new_provider.as_str(); let is_configured = match new_provider_str { "gemini" => config.gemini.is_some(), "claude" => config.claude.is_some(), "ollama" => config.ollama.is_some(), "openai" => config.openai.is_some(), _ => { eprintln!( "Invalid provider '{}'. Please choose from 'gemini', 'claude', or 'ollama'.", new_provider ); return Ok(()); } }; if is_configured { config.active_provider = Some(new_provider_str.to_string()); config.save()?; println!("Active provider set to '{}'.", new_provider_str.cyan()); } else { eprintln!( "{} is not configured. Please run 'notedmd config --edit' to set it up.", new_provider_str.yellow() ); } } } if !edit && !show && !show_path && set_api_key.is_none() && set_claude_api_key.is_none() && set_provider.is_none() { if let Some(config_path) = get_config_path() { if config_path.exists() { let config = Config::load()?; print_clean_config(config); } else { return Err(NotedError::ConfigNotFound); } } } } Commands::Convert { path, output, api_key, prompt, notion, } => { let config = Config::load()?; let client: Box = match config.active_provider.as_deref() { Some("gemini") => { let final_api_key = if let Some(key) = api_key { key } else if let Some(gemini_config) = &config.gemini { gemini_config.api_key.clone() } else { return Err(NotedError::GeminiNotConfigured); }; Box::new(GeminiClient::new(final_api_key, prompt)) } Some("ollama") => { let url = if let Some(ollama_config) = &config.ollama { ollama_config.url.clone() } else { return Err(NotedError::OllamaNotConfigured); }; let model = if let Some(ollama_config) = &config.ollama { ollama_config.model.clone() } else { return Err(NotedError::OllamaNotConfigured); }; Box::new(OllamaClient::new(url, model, prompt)) } Some("claude") => { let api_key = if let Some(key) = api_key { key } else if let Some(claude_config) = &config.claude { claude_config.api_key.clone() } else { return Err(NotedError::ClaudeNotConfigured); }; let model = if let Some(claude_config) = &config.claude { claude_config.model.clone() } else { return Err(NotedError::ClaudeNotConfigured); }; Box::new(ClaudeClient::new(api_key, model, prompt)) } Some("openai") => { let url = if let Some(openai_config) = &config.openai { openai_config.url.clone() } else { return Err(NotedError::OpenAINotConfigured); }; let model = if let Some(openai_config) = &config.openai { openai_config.model.clone() } else { return Err(NotedError::OpenAINotConfigured); }; let api_key = if let Some(openai_config) = &config.openai { openai_config.api_key.clone() } else { return Err(NotedError::OpenAINotConfigured); }; Box::new(OpenAIClient::new(url, model, api_key, prompt)) } _ => return Err(NotedError::NoActiveProvider), }; let input_path = Path::new(&path); if !input_path.exists() { return Err(NotedError::IoError(std::io::Error::new( std::io::ErrorKind::NotFound, format!("Input path not found: {}", path), ))); } let (notion_client, notion_config) = if notion { if let Some(config) = &config.notion { let client = NotionClient::new(config.api_key.clone(), config.database_id.clone()); (Some(client), Some(config)) } else { return Err(NotedError::NotionNotConfigured); } } else { (None, None) }; if input_path.is_dir() { let files_to_convert: Vec<_> = std::fs::read_dir(input_path)? .filter_map(Result::ok) .filter_map(|entry| { let path = entry.path(); if path.is_file() { if let Some(path_str) = path.to_str() { if file_utils::get_file_mime_type(path_str).is_ok() { return Some(path); } } } None }) .collect(); if files_to_convert.is_empty() { println!("No supported files found in the directory."); return Ok(()); } let progress_bar = ProgressBar::new(files_to_convert.len() as u64); progress_bar.set_style( ProgressStyle::default_bar() .template("{bar:40.cyan/blue} {pos}/{len} {msg}") .unwrap(), ); progress_bar.set_message("Processing files..."); for file_path_buf in files_to_convert { if let Some(file_path_str) = file_path_buf.to_str() { if let Err(e) = process_and_save_file( file_path_str, client.as_ref(), output.as_deref(), &progress_bar, notion_client.as_ref(), notion_config, ) .await { progress_bar.println(format!("{}", e.to_string().red())); } } progress_bar.inc(1); } progress_bar .finish_with_message(format!("{}", "Completed processing all files".green())); } else { let path_str = input_path.to_str().ok_or_else(|| { NotedError::FileNameError(input_path.to_string_lossy().to_string()) })?; file_utils::get_file_mime_type(path_str)?; let progress_bar = ProgressBar::new(1); progress_bar.set_style( ProgressStyle::default_bar() .template("{bar:40.cyan/blue} {pos}/{len} {msg}") .unwrap(), ); progress_bar.set_message("Processing file..."); if let Err(e) = process_and_save_file( path_str, client.as_ref(), output.as_deref(), &progress_bar, notion_client.as_ref(), notion_config, ) .await { progress_bar.println(format!("{}", e.to_string().red())); } progress_bar.inc(1); progress_bar .finish_with_message(format!("{}", "Completed processing file".green())); } } } Ok(()) } #[tokio::main] async fn main() { if let Err(e) = run().await { eprintln!("{} {}", "✖".red(), e.to_string().red()); std::process::exit(1); } } ================================================ FILE: src/notion/converter.rs ================================================ use anyhow::Result; use comrak::{ Arena, ComrakOptions, nodes::{AstNode, ListType, NodeValue}, parse_document, }; use notion_client::objects::{ block::{ Block, BlockType, BulletedListItemValue, EquationValue, HeadingsValue, NumberedListItemValue, ParagraphValue, }, rich_text::{self, RichText}, }; pub struct Converter<'a> { _arena: &'a Arena>, } impl<'a> Converter<'a> { pub fn run(markdown: &str, arena: &'a Arena>) -> Result, anyhow::Error> { let mut options = ComrakOptions::default(); options.extension.math_dollars = true; let root = parse_document(arena, markdown, &options); let mut converter = Self { _arena: arena }; let blocks = converter.render_nodes(root.children())?; Ok(blocks) } fn render_nodes( &mut self, nodes: impl Iterator>, ) -> Result, anyhow::Error> { let mut blocks = Vec::new(); for node in nodes { blocks.extend(self.render_node(node)?); } Ok(blocks) } fn render_node(&mut self, node: &'a AstNode<'a>) -> Result> { match &node.data.borrow().value { NodeValue::Heading(heading) => Ok(vec![self.render_heading(node, heading)?]), NodeValue::Paragraph => { let mut children = node.children(); if let (Some(child), None) = (children.next(), children.next()) { if let NodeValue::Math(_) = &child.data.borrow().value { return Ok(vec![self.render_math(child)?]); } } Ok(vec![self.render_paragraph(node)?]) } NodeValue::List(list) => match list.list_type { ListType::Bullet => self.render_bullet_list(node), ListType::Ordered => self.render_numbered_list(node), }, _ => Ok(Vec::new()), } } fn render_bullet_list(&mut self, node: &'a AstNode<'a>) -> Result> { let mut items = Vec::new(); for child in node.children() { let block = self.render_bulleted_list_item(child)?; items.push(block); } Ok(items) } fn render_numbered_list(&mut self, node: &'a AstNode<'a>) -> Result> { let mut items = Vec::new(); for child in node.children() { let block = self.render_numbered_list_item(child)?; items.push(block); } Ok(items) } fn render_numbered_list_item(&mut self, node: &'a AstNode<'a>) -> Result { let mut rich_text = Vec::new(); if let Some(paragraph) = node .children() .find(|child| matches!(child.data.borrow().value, NodeValue::Paragraph)) { rich_text = self.render_rich_text(paragraph)?; } let value = NumberedListItemValue { rich_text, color: notion_client::objects::block::TextColor::Default, children: None, }; Ok(Block { block_type: BlockType::NumberedListItem { numbered_list_item: value, }, ..Default::default() }) } fn render_bulleted_list_item(&mut self, node: &'a AstNode<'a>) -> Result { let mut rich_text = Vec::new(); if let Some(paragraph) = node .children() .find(|child| matches!(child.data.borrow().value, NodeValue::Paragraph)) { rich_text = self.render_rich_text(paragraph)?; } let value = BulletedListItemValue { rich_text, color: notion_client::objects::block::TextColor::Default, children: None, }; Ok(Block { block_type: BlockType::BulletedListItem { bulleted_list_item: value, }, ..Default::default() }) } fn render_math(&mut self, node: &'a AstNode<'a>) -> Result { if let NodeValue::Math(math) = &node.data.borrow().value { let expression = math.literal.clone(); let value = EquationValue { expression }; let block_type = BlockType::Equation { equation: value }; Ok(Block { block_type, ..Default::default() }) } else { Err(anyhow::anyhow!( "Node passed to render_math was not a Math node" )) } } fn render_paragraph(&mut self, node: &'a AstNode<'a>) -> Result { let rich_text = self.render_rich_text(node)?; let value = ParagraphValue { rich_text, ..Default::default() }; let block_type = BlockType::Paragraph { paragraph: value }; Ok(Block { block_type, ..Default::default() }) } fn render_heading( &mut self, node: &'a AstNode<'a>, heading: &comrak::nodes::NodeHeading, ) -> Result { let rich_text = self.render_rich_text(node)?; let value = HeadingsValue { rich_text, ..Default::default() }; let block_type = match &heading.level { 1 => BlockType::Heading1 { heading_1: value }, 2 => BlockType::Heading2 { heading_2: value }, _ => BlockType::Heading3 { heading_3: value }, }; Ok(Block { block_type, ..Default::default() }) } fn render_rich_text( &mut self, node: &'a AstNode<'a>, ) -> Result> { let mut rich_text_nodes = Vec::new(); for child in node.children() { match &child.data.borrow().value { NodeValue::Text(text) => { rich_text_nodes.push(notion_client::objects::rich_text::RichText::Text { text: notion_client::objects::rich_text::Text { content: text.clone(), link: None, }, annotations: Default::default(), plain_text: Some(text.clone()), href: None, }); } NodeValue::Math(math) => { let latex = math.literal.clone(); rich_text_nodes.push(RichText::Equation { equation: rich_text::Equation { expression: latex.clone(), }, annotations: Default::default(), plain_text: latex.to_string(), href: None, }) } _ => {} } } Ok(rich_text_nodes) } } ================================================ FILE: src/notion/mod.rs ================================================ pub mod converter; ================================================ FILE: src/ui.rs ================================================ use crate::Config; use colored::Colorize; pub fn ascii_art() { println!( "{}", r" ███╗ ██╗ ██████╗ ████████╗███████╗██████╗ ███╗ ███╗██████╗ ████╗ ██║██╔═══██╗╚══██╔══╝██╔════╝██╔══██╗ ████╗ ████║██╔══██╗ ██╔██╗ ██║██║ ██║ ██║ █████╗ ██║ ██║ ██╔████╔██║██║ ██║ ██║╚██╗██║██║ ██║ ██║ ██╔══╝ ██║ ██║ ██║╚██╔╝██║██║ ██║ ██║ ╚████║╚██████╔╝ ██║ ███████╗██████╔╝██╗██║ ╚═╝ ██║██████╔╝ ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚══════╝╚═════╝ ╚═╝╚═╝ ╚═╝╚═════╝ " .bright_blue() ); println!( "{}", "-------------------------------------------------".dimmed() ); } pub fn print_clean_config(config: Config) { println!("{}", "noted.md Configuration".bold()); println!("-------------------------"); if let Some(provider) = config.active_provider { println!("Active Provider: {}", provider.green()); } else { println!("Active Provider: {}", "Not Set".yellow()); } println!("{}", "Gemini".bold()); if let Some(gemini_config) = config.gemini { let api_key = format!( "{:.3}***************** (hidden for security)", gemini_config.api_key ); println!(" API Key: {}", api_key); } else { println!(" (Not Configured)"); } println!("{}", "Claude".bold()); if let Some(claude_config) = config.claude { let api_key = format!( "{:.3}***************** (hidden for security)", claude_config.api_key ); println!(" API Key: {}", api_key); println!(" Model: {}", claude_config.model); } else { println!(" (Not Configured)"); } println!("{}", "Ollama".bold()); if let Some(ollama_config) = config.ollama { println!(" URL: {}", ollama_config.url); println!(" Model: {}", ollama_config.model); } else { println!(" (Not Configured)"); } println!("{}", "OpenAI (Compatible)".bold()); if let Some(openai_config) = config.openai { println!(" URL: {}", openai_config.url); println!(" Model: {}", openai_config.model); let api_key = if openai_config.api_key.is_none() { "API key empty.".to_string() } else { format!( "{:.3}***************** (hidden for security)", openai_config.api_key.unwrap() ) }; println!(" API Key: {}", api_key); } else { println!(" (Not Configured)"); } println!("{}", "Notion".bold()); if let Some(notion_config) = config.notion { let api_key = format!( "{:.3}***************** (hidden for security)", notion_config.api_key ); println!(" API Key: {}", api_key); println!(" Database ID: {}", notion_config.database_id); println!( " Title Property Name: {}", notion_config.title_property_name ); println!(" Database Properties: {:#?}", notion_config.properties); } else { println!(" (Not Configured)"); } }