Premier commit déjà bien avancé
This commit is contained in:
814
internal/api/handler.go
Normal file
814
internal/api/handler.go
Normal file
@ -0,0 +1,814 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/mathieu/project-notes/internal/indexer"
|
||||
)
|
||||
|
||||
// TreeNode représente un nœud dans l'arborescence des fichiers
|
||||
type TreeNode struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
IsDir bool `json:"isDir"`
|
||||
Children []*TreeNode `json:"children,omitempty"`
|
||||
}
|
||||
|
||||
// Handler gère toutes les routes de l'API.
|
||||
type Handler struct {
|
||||
notesDir string
|
||||
idx *indexer.Indexer
|
||||
templates *template.Template
|
||||
logger *log.Logger
|
||||
}
|
||||
|
||||
// NewHandler construit un handler unifié pour l'API.
|
||||
func NewHandler(notesDir string, idx *indexer.Indexer, tpl *template.Template, logger *log.Logger) *Handler {
|
||||
return &Handler{
|
||||
notesDir: notesDir,
|
||||
idx: idx,
|
||||
templates: tpl,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
path := r.URL.Path
|
||||
h.logger.Printf("%s %s", r.Method, path)
|
||||
|
||||
// REST API v1 endpoints
|
||||
if strings.HasPrefix(path, "/api/v1/notes") {
|
||||
h.handleRESTNotes(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// Legacy/HTML endpoints
|
||||
if strings.HasPrefix(path, "/api/search") {
|
||||
h.handleSearch(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/folders/create" {
|
||||
h.handleCreateFolder(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/files/move" {
|
||||
h.handleMoveFile(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/notes/new-auto" {
|
||||
h.handleNewNoteAuto(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/notes/new-prompt" {
|
||||
h.handleNewNotePrompt(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/notes/create-custom" {
|
||||
h.handleCreateCustomNote(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/home" {
|
||||
h.handleHome(w, r)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(path, "/api/notes/") {
|
||||
h.handleNotes(w, r)
|
||||
return
|
||||
}
|
||||
if path == "/api/tree" {
|
||||
h.handleFileTree(w, r)
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}
|
||||
|
||||
// buildFileTree construit l'arborescence hiérarchique des fichiers et dossiers
|
||||
func (h *Handler) buildFileTree() (*TreeNode, error) {
|
||||
root := &TreeNode{
|
||||
Name: "notes",
|
||||
Path: "",
|
||||
IsDir: true,
|
||||
Children: make([]*TreeNode, 0),
|
||||
}
|
||||
|
||||
err := filepath.WalkDir(h.notesDir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Ignorer le répertoire racine lui-même
|
||||
if path == h.notesDir {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(h.notesDir, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Ignorer les fichiers cachés
|
||||
if strings.HasPrefix(d.Name(), ".") {
|
||||
if d.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Pour les fichiers, ne garder que les .md
|
||||
if !d.IsDir() && !strings.EqualFold(filepath.Ext(path), ".md") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Créer le nœud
|
||||
node := &TreeNode{
|
||||
Name: d.Name(),
|
||||
Path: relPath,
|
||||
IsDir: d.IsDir(),
|
||||
Children: make([]*TreeNode, 0),
|
||||
}
|
||||
|
||||
// Trouver le parent et ajouter ce nœud
|
||||
h.insertNode(root, node, relPath)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Trier les enfants (dossiers d'abord, puis fichiers, alphabétiquement)
|
||||
h.sortTreeNode(root)
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
// insertNode insère un nœud dans l'arbre à la bonne position
|
||||
func (h *Handler) insertNode(root *TreeNode, node *TreeNode, path string) {
|
||||
parts := strings.Split(filepath.ToSlash(path), "/")
|
||||
|
||||
current := root
|
||||
for i := 0; i < len(parts)-1; i++ {
|
||||
found := false
|
||||
for _, child := range current.Children {
|
||||
if child.Name == parts[i] && child.IsDir {
|
||||
current = child
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
// Créer le dossier intermédiaire
|
||||
newDir := &TreeNode{
|
||||
Name: parts[i],
|
||||
Path: strings.Join(parts[:i+1], "/"),
|
||||
IsDir: true,
|
||||
Children: make([]*TreeNode, 0),
|
||||
}
|
||||
current.Children = append(current.Children, newDir)
|
||||
current = newDir
|
||||
}
|
||||
}
|
||||
|
||||
current.Children = append(current.Children, node)
|
||||
}
|
||||
|
||||
// sortTreeNode trie récursivement les enfants d'un nœud
|
||||
func (h *Handler) sortTreeNode(node *TreeNode) {
|
||||
if !node.IsDir || len(node.Children) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
sort.Slice(node.Children, func(i, j int) bool {
|
||||
// Les dossiers avant les fichiers
|
||||
if node.Children[i].IsDir != node.Children[j].IsDir {
|
||||
return node.Children[i].IsDir
|
||||
}
|
||||
// Puis alphabétiquement
|
||||
return strings.ToLower(node.Children[i].Name) < strings.ToLower(node.Children[j].Name)
|
||||
})
|
||||
|
||||
// Trier récursivement
|
||||
for _, child := range node.Children {
|
||||
h.sortTreeNode(child)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleFileTree(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
tree, err := h.buildFileTree()
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur lors de la construction de l arborescence: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Tree *TreeNode
|
||||
}{
|
||||
Tree: tree,
|
||||
}
|
||||
|
||||
err = h.templates.ExecuteTemplate(w, "file-tree.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template de l arborescence: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleHome(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Générer le contenu Markdown avec la liste de toutes les notes
|
||||
content := h.generateHomeMarkdown()
|
||||
|
||||
// Utiliser le template editor.html pour afficher la page d'accueil
|
||||
data := struct {
|
||||
Filename string
|
||||
Content string
|
||||
IsHome bool
|
||||
}{
|
||||
Filename: "🏠 Accueil - Index des notes",
|
||||
Content: content,
|
||||
IsHome: true,
|
||||
}
|
||||
|
||||
err := h.templates.ExecuteTemplate(w, "editor.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template home: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
// generateHomeMarkdown génère le contenu Markdown de la page d'accueil
|
||||
func (h *Handler) generateHomeMarkdown() string {
|
||||
var sb strings.Builder
|
||||
|
||||
// En-tête
|
||||
sb.WriteString("# 📚 Index des Notes\n\n")
|
||||
sb.WriteString("_Mise à jour automatique • " + time.Now().Format("02/01/2006 à 15:04") + "_\n\n")
|
||||
sb.WriteString("---\n\n")
|
||||
|
||||
// Construire l'arborescence
|
||||
tree, err := h.buildFileTree()
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur lors de la construction de l'arbre: %v", err)
|
||||
sb.WriteString("❌ Erreur lors de la génération de l'index\n")
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// Compter le nombre de notes
|
||||
noteCount := h.countNotes(tree)
|
||||
sb.WriteString(fmt.Sprintf("**%d note(s) au total**\n\n", noteCount))
|
||||
|
||||
// Générer l'arborescence en Markdown
|
||||
h.generateMarkdownTree(&sb, tree, 0)
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// countNotes compte le nombre de fichiers .md dans l'arborescence
|
||||
func (h *Handler) countNotes(node *TreeNode) int {
|
||||
count := 0
|
||||
if !node.IsDir {
|
||||
count = 1
|
||||
}
|
||||
for _, child := range node.Children {
|
||||
count += h.countNotes(child)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// generateMarkdownTree génère l'arborescence en HTML avec accordéon
|
||||
func (h *Handler) generateMarkdownTree(sb *strings.Builder, node *TreeNode, depth int) {
|
||||
// Ignorer le nœud racine
|
||||
if depth == 0 {
|
||||
sb.WriteString("<div class=\"note-tree\">\n")
|
||||
for _, child := range node.Children {
|
||||
h.generateMarkdownTree(sb, child, depth+1)
|
||||
}
|
||||
sb.WriteString("</div>\n")
|
||||
return
|
||||
}
|
||||
|
||||
indent := strings.Repeat(" ", depth-1)
|
||||
// Créer un ID unique basé sur le path
|
||||
safeID := strings.ReplaceAll(strings.ReplaceAll(node.Path, "/", "-"), "\\", "-")
|
||||
|
||||
if node.IsDir {
|
||||
// Dossier - affichage avec accordéon
|
||||
// Utiliser des classes CSS au lieu de styles inline
|
||||
indentClass := fmt.Sprintf("indent-level-%d", depth)
|
||||
sb.WriteString(fmt.Sprintf("%s<div class=\"folder %s\">\n", indent, indentClass))
|
||||
sb.WriteString(fmt.Sprintf("%s <div class=\"folder-header\" onclick=\"toggleFolder('%s')\">\n", indent, safeID))
|
||||
sb.WriteString(fmt.Sprintf("%s <span class=\"folder-icon\" id=\"icon-%s\">📁</span>\n", indent, safeID))
|
||||
sb.WriteString(fmt.Sprintf("%s <strong>%s</strong>\n", indent, node.Name))
|
||||
sb.WriteString(fmt.Sprintf("%s </div>\n", indent))
|
||||
sb.WriteString(fmt.Sprintf("%s <div class=\"folder-content\" id=\"folder-%s\">\n", indent, safeID))
|
||||
|
||||
for _, child := range node.Children {
|
||||
h.generateMarkdownTree(sb, child, depth+1)
|
||||
}
|
||||
|
||||
sb.WriteString(fmt.Sprintf("%s </div>\n", indent))
|
||||
sb.WriteString(fmt.Sprintf("%s</div>\n", indent))
|
||||
} else {
|
||||
// Fichier - créer un lien HTML cliquable avec HTMX
|
||||
displayName := strings.TrimSuffix(node.Name, ".md")
|
||||
// Échapper le path pour l'URL
|
||||
escapedPath := strings.ReplaceAll(node.Path, "\\", "/")
|
||||
indentClass := fmt.Sprintf("indent-level-%d", depth)
|
||||
sb.WriteString(fmt.Sprintf("%s<div class=\"file %s\">\n", indent, indentClass))
|
||||
sb.WriteString(fmt.Sprintf("%s <span class=\"file-icon\">📄</span>\n", indent))
|
||||
sb.WriteString(fmt.Sprintf("%s <a href=\"#\" hx-get=\"/api/notes/%s\" hx-target=\"#editor-container\" hx-swap=\"innerHTML\">%s</a>\n",
|
||||
indent, escapedPath, displayName))
|
||||
sb.WriteString(fmt.Sprintf("%s</div>\n", indent))
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleNewNoteAuto(w http.ResponseWriter, r *http.Request) {
|
||||
// Generate a unique filename
|
||||
baseFilename := "nouvelle-note"
|
||||
filename := ""
|
||||
for i := 1; ; i++ {
|
||||
tempFilename := fmt.Sprintf("%s-%d.md", baseFilename, i)
|
||||
fullPath := filepath.Join(h.notesDir, tempFilename)
|
||||
if _, err := os.Stat(fullPath); os.IsNotExist(err) {
|
||||
filename = tempFilename
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
h.createAndRenderNote(w, r, filename)
|
||||
}
|
||||
|
||||
func (h *Handler) handleNewNotePrompt(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
err := h.templates.ExecuteTemplate(w, "new-note-prompt.html", nil)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template new-note-prompt: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleCreateCustomNote(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "lecture du formulaire impossible", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
filename := r.FormValue("filename")
|
||||
if filename == "" {
|
||||
http.Error(w, "nom de fichier manquant", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Securite : nettoyer le nom du fichier
|
||||
filename = filepath.Clean(filename)
|
||||
if strings.HasPrefix(filename, "..") || !strings.HasSuffix(filename, ".md") {
|
||||
http.Error(w, "nom de fichier invalide", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
fullPath := filepath.Join(h.notesDir, filename)
|
||||
if _, err := os.Stat(fullPath); err == nil {
|
||||
http.Error(w, "une note avec ce nom existe deja", http.StatusConflict)
|
||||
return
|
||||
}
|
||||
|
||||
h.createAndRenderNote(w, r, filename)
|
||||
}
|
||||
|
||||
// createAndRenderNote est une fonction utilitaire pour creer et rendre une nouvelle note
|
||||
func (h *Handler) createAndRenderNote(w http.ResponseWriter, r *http.Request, filename string) {
|
||||
// Prepare initial front matter for a new note
|
||||
now := time.Now()
|
||||
newFM := indexer.FullFrontMatter{
|
||||
Title: strings.Title(strings.ReplaceAll(strings.TrimSuffix(filename, filepath.Ext(filename)), "-", " ")),
|
||||
Date: now.Format("02-01-2006"),
|
||||
LastModified: now.Format("02-01-2006:15:04"),
|
||||
Tags: []string{"default"}, // Default tag for new notes
|
||||
}
|
||||
|
||||
fmBytes, err := yaml.Marshal(newFM)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur de marshalling du front matter pour nouvelle note: %v", err)
|
||||
http.Error(w, "erreur interne lors de la generation du front matter", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Combine new front matter with a placeholder body
|
||||
initialContent := "---\n" + string(fmBytes) + "---\n\n# " + newFM.Title + "\n\nCommencez à écrire votre note ici..."
|
||||
|
||||
data := struct {
|
||||
Filename string
|
||||
Content string
|
||||
IsHome bool
|
||||
}{
|
||||
Filename: filename,
|
||||
Content: initialContent,
|
||||
IsHome: false,
|
||||
}
|
||||
|
||||
err = h.templates.ExecuteTemplate(w, "editor.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template editeur pour nouvelle note: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
query := strings.TrimSpace(r.URL.Query().Get("query"))
|
||||
if query == "" {
|
||||
query = strings.TrimSpace(r.URL.Query().Get("tag"))
|
||||
}
|
||||
|
||||
results := h.idx.SearchDocuments(query)
|
||||
|
||||
data := struct {
|
||||
Query string
|
||||
Results []indexer.SearchResult
|
||||
}{
|
||||
Query: query,
|
||||
Results: results,
|
||||
}
|
||||
|
||||
err := h.templates.ExecuteTemplate(w, "search-results.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template de recherche: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleNotes(w http.ResponseWriter, r *http.Request) {
|
||||
filename, err := h.extractFilename(r.URL.Path)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
h.handleGetNote(w, r, filename)
|
||||
case http.MethodPost:
|
||||
h.handlePostNote(w, r, filename)
|
||||
case http.MethodDelete:
|
||||
h.handleDeleteNote(w, r, filename)
|
||||
default:
|
||||
w.Header().Set("Allow", "GET, POST, DELETE")
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleDeleteNote(w http.ResponseWriter, r *http.Request, filename string) {
|
||||
fullPath := filepath.Join(h.notesDir, filename)
|
||||
|
||||
if err := os.Remove(fullPath); err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
http.Error(w, "note introuvable", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
h.logger.Printf("erreur de suppression du fichier %s: %v", filename, err)
|
||||
http.Error(w, "suppression impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Re-indexation en arriere-plan
|
||||
go func() {
|
||||
if err := h.idx.Load(h.notesDir); err != nil {
|
||||
h.logger.Printf("echec de la reindexation post-suppression: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Repondre a htmx pour vider l'editeur et rafraichir l'arborescence
|
||||
h.renderFileTreeOOB(w)
|
||||
io.WriteString(w, `<p>Note "`+filename+`" supprimée.</p>`)
|
||||
}
|
||||
|
||||
func (h *Handler) handleGetNote(w http.ResponseWriter, r *http.Request, filename string) {
|
||||
fullPath := filepath.Join(h.notesDir, filename)
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
h.logger.Printf("erreur de lecture du fichier %s: %v", filename, err)
|
||||
http.Error(w, "lecture impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
// Le fichier n'existe pas, créer un front matter initial
|
||||
now := time.Now()
|
||||
newFM := indexer.FullFrontMatter{
|
||||
Title: strings.Title(strings.ReplaceAll(strings.TrimSuffix(filename, filepath.Ext(filename)), "-", " ")),
|
||||
Date: now.Format("02-01-2006"),
|
||||
LastModified: now.Format("02-01-2006:15:04"),
|
||||
Tags: []string{},
|
||||
}
|
||||
|
||||
fmBytes, err := yaml.Marshal(newFM)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur de marshalling du front matter pour nouvelle note: %v", err)
|
||||
http.Error(w, "erreur interne lors de la generation du front matter", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Générer le contenu initial avec front matter
|
||||
initialContent := "---\n" + string(fmBytes) + "---\n\n# " + newFM.Title + "\n\nCommencez à écrire votre note ici..."
|
||||
content = []byte(initialContent)
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Filename string
|
||||
Content string
|
||||
IsHome bool
|
||||
}{
|
||||
Filename: filename,
|
||||
Content: string(content),
|
||||
IsHome: false,
|
||||
}
|
||||
|
||||
err = h.templates.ExecuteTemplate(w, "editor.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template editeur: %v", err)
|
||||
http.Error(w, "erreur interne", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handlePostNote(w http.ResponseWriter, r *http.Request, filename string) {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "lecture du formulaire impossible", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
incomingContent := r.FormValue("content")
|
||||
|
||||
fullPath := filepath.Join(h.notesDir, filename)
|
||||
isNewFile := false
|
||||
if _, err := os.Stat(fullPath); errors.Is(err, os.ErrNotExist) {
|
||||
isNewFile = true
|
||||
}
|
||||
|
||||
// Extract existing front matter and body from incoming content
|
||||
var currentFM indexer.FullFrontMatter
|
||||
var bodyContent string
|
||||
var err error
|
||||
|
||||
// Use a strings.Reader to pass the content to the extractor
|
||||
currentFM, bodyContent, err = indexer.ExtractFrontMatterAndBodyFromReader(strings.NewReader(incomingContent))
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d'extraction du front matter: %v", err)
|
||||
http.Error(w, "erreur interne lors de l'analyse du contenu", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Prepare new front matter
|
||||
newFM := currentFM
|
||||
|
||||
// Set Title
|
||||
if newFM.Title == "" {
|
||||
newFM.Title = strings.TrimSuffix(filename, filepath.Ext(filename))
|
||||
newFM.Title = strings.ReplaceAll(newFM.Title, "-", " ")
|
||||
newFM.Title = strings.Title(newFM.Title)
|
||||
}
|
||||
|
||||
// Set Date (creation date)
|
||||
now := time.Now()
|
||||
if isNewFile || newFM.Date == "" { // Check for empty string
|
||||
newFM.Date = now.Format("02-01-2006")
|
||||
}
|
||||
|
||||
// Set LastModified
|
||||
newFM.LastModified = now.Format("02-01-2006:15:04")
|
||||
|
||||
// Marshal new front matter to YAML
|
||||
fmBytes, err := yaml.Marshal(newFM)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur de marshalling du front matter: %v", err)
|
||||
http.Error(w, "erreur interne lors de la generation du front matter", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Combine new front matter with body
|
||||
finalContent := "---\n" + string(fmBytes) + "---\n" + bodyContent
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(fullPath), 0o755); err != nil {
|
||||
h.logger.Printf("erreur de creation du repertoire pour %s: %v", filename, err)
|
||||
http.Error(w, "creation repertoire impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err := os.WriteFile(fullPath, []byte(finalContent), 0o644); err != nil {
|
||||
h.logger.Printf("erreur d ecriture du fichier %s: %v", filename, err)
|
||||
http.Error(w, "ecriture impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Re-indexation en arriere-plan pour ne pas ralentir la reponse
|
||||
go func() {
|
||||
if err := h.idx.Load(h.notesDir); err != nil {
|
||||
h.logger.Printf("echec de la reindexation post-ecriture: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Repondre a htmx pour vider l'editeur et rafraichir l'arborescence
|
||||
h.renderFileTreeOOB(w)
|
||||
|
||||
// Répondre avec les statuts de sauvegarde OOB
|
||||
nowStr := time.Now().Format("15:04:05")
|
||||
oobStatus := fmt.Sprintf(`
|
||||
<span id="auto-save-status" hx-swap-oob="true">Enregistré à %s</span>
|
||||
<span id="save-status" hx-swap-oob="true"></span>`, nowStr)
|
||||
io.WriteString(w, oobStatus)
|
||||
}
|
||||
|
||||
func (h *Handler) extractFilename(path string) (string, error) {
|
||||
const prefix = "/api/notes/"
|
||||
if !strings.HasPrefix(path, prefix) {
|
||||
return "", errors.New("chemin invalide")
|
||||
}
|
||||
|
||||
rel := strings.TrimPrefix(path, prefix)
|
||||
rel = strings.TrimSpace(rel)
|
||||
if rel == "" {
|
||||
return "", errors.New("fichier manquant")
|
||||
}
|
||||
|
||||
rel = filepath.Clean(rel)
|
||||
if rel == "." || strings.HasPrefix(rel, "..") {
|
||||
return "", errors.New("nom de fichier invalide")
|
||||
}
|
||||
|
||||
if filepath.Ext(rel) != ".md" {
|
||||
return "", errors.New("extension invalide")
|
||||
}
|
||||
|
||||
return rel, nil
|
||||
}
|
||||
|
||||
// renderFileTreeOOB genere le HTML de l'arborescence des fichiers pour un swap out-of-band.
|
||||
func (h *Handler) renderFileTreeOOB(w http.ResponseWriter) {
|
||||
tree, err := h.buildFileTree()
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur lors de la construction de l arborescence pour OOB: %v", err)
|
||||
return // Don't fail the main request, just log
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Tree *TreeNode
|
||||
}{
|
||||
Tree: tree,
|
||||
}
|
||||
|
||||
// Render the file tree template into a buffer
|
||||
var buf strings.Builder
|
||||
err = h.templates.ExecuteTemplate(&buf, "file-tree.html", data)
|
||||
if err != nil {
|
||||
h.logger.Printf("erreur d execution du template de l arborescence pour OOB: %v", err)
|
||||
return // Don't fail the main request, just log
|
||||
}
|
||||
|
||||
// Wrap it in a div with hx-swap-oob
|
||||
oobContent := fmt.Sprintf(`<div id="file-tree" hx-swap-oob="true">%s</div>`, buf.String())
|
||||
io.WriteString(w, oobContent)
|
||||
}
|
||||
|
||||
// handleCreateFolder crée un nouveau dossier
|
||||
func (h *Handler) handleCreateFolder(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "lecture du formulaire impossible", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
folderPath := r.FormValue("path")
|
||||
if folderPath == "" {
|
||||
http.Error(w, "chemin du dossier manquant", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Sécurité : nettoyer le chemin
|
||||
folderPath = filepath.Clean(folderPath)
|
||||
if strings.HasPrefix(folderPath, "..") || filepath.IsAbs(folderPath) {
|
||||
http.Error(w, "chemin invalide", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
fullPath := filepath.Join(h.notesDir, folderPath)
|
||||
|
||||
// Vérifier si le dossier existe déjà
|
||||
if _, err := os.Stat(fullPath); err == nil {
|
||||
http.Error(w, "un dossier avec ce nom existe deja", http.StatusConflict)
|
||||
return
|
||||
}
|
||||
|
||||
// Créer le dossier
|
||||
if err := os.MkdirAll(fullPath, 0o755); err != nil {
|
||||
h.logger.Printf("erreur de creation du dossier %s: %v", folderPath, err)
|
||||
http.Error(w, "creation du dossier impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
h.logger.Printf("dossier cree: %s", folderPath)
|
||||
|
||||
// Rafraîchir l'arborescence
|
||||
h.renderFileTreeOOB(w)
|
||||
io.WriteString(w, fmt.Sprintf("Dossier '%s' créé avec succès", folderPath))
|
||||
}
|
||||
|
||||
// handleMoveFile déplace ou renomme un fichier/dossier
|
||||
func (h *Handler) handleMoveFile(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "methode non supportee", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "lecture du formulaire impossible", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
sourcePath := r.FormValue("source")
|
||||
destPath := r.FormValue("destination")
|
||||
|
||||
if sourcePath == "" || destPath == "" {
|
||||
http.Error(w, "chemins source et destination requis", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Sécurité : nettoyer les chemins
|
||||
sourcePath = filepath.Clean(sourcePath)
|
||||
destPath = filepath.Clean(destPath)
|
||||
|
||||
if strings.HasPrefix(sourcePath, "..") || strings.HasPrefix(destPath, "..") {
|
||||
http.Error(w, "chemins invalides", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
sourceFullPath := filepath.Join(h.notesDir, sourcePath)
|
||||
destFullPath := filepath.Join(h.notesDir, destPath)
|
||||
|
||||
// Vérifier que la source existe
|
||||
if _, err := os.Stat(sourceFullPath); os.IsNotExist(err) {
|
||||
http.Error(w, "fichier source introuvable", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
// Créer le répertoire de destination si nécessaire
|
||||
destDir := filepath.Dir(destFullPath)
|
||||
if err := os.MkdirAll(destDir, 0o755); err != nil {
|
||||
h.logger.Printf("erreur de creation du repertoire de destination %s: %v", destDir, err)
|
||||
http.Error(w, "creation du repertoire de destination impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Déplacer le fichier
|
||||
if err := os.Rename(sourceFullPath, destFullPath); err != nil {
|
||||
h.logger.Printf("erreur de deplacement de %s vers %s: %v", sourcePath, destPath, err)
|
||||
http.Error(w, "deplacement impossible", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
h.logger.Printf("fichier deplace: %s -> %s", sourcePath, destPath)
|
||||
|
||||
// Re-indexer
|
||||
go func() {
|
||||
if err := h.idx.Load(h.notesDir); err != nil {
|
||||
h.logger.Printf("echec de la reindexation post-deplacement: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Rafraîchir l'arborescence
|
||||
h.renderFileTreeOOB(w)
|
||||
io.WriteString(w, fmt.Sprintf("Fichier déplacé de '%s' vers '%s'", sourcePath, destPath))
|
||||
}
|
||||
117
internal/api/handler_test.go
Normal file
117
internal/api/handler_test.go
Normal file
@ -0,0 +1,117 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mathieu/project-notes/internal/indexer"
|
||||
)
|
||||
|
||||
func newTestHandler(t *testing.T, notesDir string) *Handler {
|
||||
t.Helper()
|
||||
|
||||
tpl, err := template.New("").Parse(`
|
||||
{{define "search-results.html"}}
|
||||
Query: {{.Query}}, Count: {{len .Results}}
|
||||
{{range .Results}}
|
||||
{{.Path}}|{{.Title}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{define "editor.html"}}
|
||||
Filename: {{.Filename}}, Content: {{.Content}}
|
||||
{{end}}
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatalf("impossible d'analyser les templates de test: %v", err)
|
||||
}
|
||||
|
||||
return NewHandler(notesDir, indexer.New(), tpl, log.New(io.Discard, "", 0))
|
||||
}
|
||||
|
||||
func TestHandler_Search(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
if err := os.WriteFile(filepath.Join(dir, "test.md"), []byte("---\ntags: [foo]\n---\ncorps"), 0o644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
handler := newTestHandler(t, dir)
|
||||
if err := handler.idx.Load(dir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/search?query=foo", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("code de statut attendu %d, obtenu %d", http.StatusOK, rec.Code)
|
||||
}
|
||||
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
if !strings.Contains(body, "Query: foo, Count: 1") {
|
||||
t.Fatalf("corps de réponse inattendu: %s", body)
|
||||
}
|
||||
if !strings.Contains(body, "test.md|Test") {
|
||||
t.Fatalf("résultat de recherche manquant dans: %s", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandler_GetNote(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
content := "hello world"
|
||||
if err := os.WriteFile(filepath.Join(dir, "test.md"), []byte(content), 0o644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
handler := newTestHandler(t, dir)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/notes/test.md", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("code de statut attendu %d, obtenu %d", http.StatusOK, rec.Code)
|
||||
}
|
||||
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
expected := "Filename: test.md, Content: hello world"
|
||||
if body != expected {
|
||||
t.Fatalf("corps de réponse attendu '%s', obtenu '%s'", expected, body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandler_PostNote(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
handler := newTestHandler(t, dir)
|
||||
|
||||
form := "content=new content"
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/notes/new.md", strings.NewReader(form))
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("code de statut attendu %d, obtenu %d", http.StatusOK, rec.Code)
|
||||
}
|
||||
|
||||
body := rec.Body.String()
|
||||
if body != "Enregistré !" {
|
||||
t.Fatalf("corps de réponse attendu 'Enregistré !', obtenu '%s'", body)
|
||||
}
|
||||
|
||||
savedContent, err := os.ReadFile(filepath.Join(dir, "new.md"))
|
||||
if err != nil {
|
||||
t.Fatalf("impossible de lire le fichier sauvegardé: %v", err)
|
||||
}
|
||||
if string(savedContent) != "new content" {
|
||||
t.Fatalf("contenu de fichier attendu 'new content', obtenu '%s'", string(savedContent))
|
||||
}
|
||||
}
|
||||
436
internal/api/rest_handler.go
Normal file
436
internal/api/rest_handler.go
Normal file
@ -0,0 +1,436 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/mathieu/project-notes/internal/indexer"
|
||||
)
|
||||
|
||||
// REST API Structures
|
||||
// ===================
|
||||
|
||||
// NoteResponse représente une note dans les réponses API
|
||||
type NoteResponse struct {
|
||||
Path string `json:"path"`
|
||||
Title string `json:"title"`
|
||||
Content string `json:"content,omitempty"` // Full content with front matter
|
||||
Body string `json:"body,omitempty"` // Body without front matter
|
||||
FrontMatter *indexer.FullFrontMatter `json:"frontMatter,omitempty"`
|
||||
LastModified string `json:"lastModified"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
// NoteMetadata représente les métadonnées d'une note (pour la liste)
|
||||
type NoteMetadata struct {
|
||||
Path string `json:"path"`
|
||||
Title string `json:"title"`
|
||||
Tags []string `json:"tags"`
|
||||
LastModified string `json:"lastModified"`
|
||||
Date string `json:"date"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
// ListNotesResponse représente la réponse de liste de notes
|
||||
type ListNotesResponse struct {
|
||||
Notes []NoteMetadata `json:"notes"`
|
||||
Total int `json:"total"`
|
||||
}
|
||||
|
||||
// ErrorResponse représente une erreur API
|
||||
type ErrorResponse struct {
|
||||
Error string `json:"error"`
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
|
||||
// NoteRequest représente une requête de création/modification de note
|
||||
type NoteRequest struct {
|
||||
Content string `json:"content,omitempty"`
|
||||
Body string `json:"body,omitempty"`
|
||||
FrontMatter *indexer.FullFrontMatter `json:"frontMatter,omitempty"`
|
||||
}
|
||||
|
||||
// REST API Handlers
|
||||
// =================
|
||||
|
||||
// handleRESTNotes route les requêtes REST vers les bons handlers
|
||||
func (h *Handler) handleRESTNotes(w http.ResponseWriter, r *http.Request) {
|
||||
// Extract path after /api/v1/notes/
|
||||
const prefix = "/api/v1/notes"
|
||||
path := strings.TrimPrefix(r.URL.Path, prefix)
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
|
||||
// Si pas de path spécifique, c'est une liste
|
||||
if path == "" {
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
h.handleRESTListNotes(w, r)
|
||||
default:
|
||||
h.sendJSONError(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Validation du path
|
||||
cleanPath := filepath.Clean(path)
|
||||
if strings.HasPrefix(cleanPath, "..") || filepath.IsAbs(cleanPath) {
|
||||
h.sendJSONError(w, "Invalid path", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if filepath.Ext(cleanPath) != ".md" {
|
||||
h.sendJSONError(w, "Only .md files are supported", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Router selon la méthode HTTP
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
h.handleRESTGetNote(w, r, cleanPath)
|
||||
case http.MethodPut:
|
||||
h.handleRESTPutNote(w, r, cleanPath)
|
||||
case http.MethodDelete:
|
||||
h.handleRESTDeleteNote(w, r, cleanPath)
|
||||
default:
|
||||
h.sendJSONError(w, "Method not allowed. Supported: GET, PUT, DELETE", http.StatusMethodNotAllowed)
|
||||
}
|
||||
}
|
||||
|
||||
// handleRESTListNotes liste toutes les notes avec leurs métadonnées
|
||||
// GET /api/v1/notes
|
||||
func (h *Handler) handleRESTListNotes(w http.ResponseWriter, r *http.Request) {
|
||||
notes := []NoteMetadata{}
|
||||
|
||||
err := filepath.WalkDir(h.notesDir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Ignorer les dossiers et fichiers cachés
|
||||
if d.IsDir() || strings.HasPrefix(d.Name(), ".") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ne garder que les .md
|
||||
if filepath.Ext(path) != ".md" {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(h.notesDir, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Lire les métadonnées
|
||||
info, err := d.Info()
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur lecture info fichier %s: %v", relPath, err)
|
||||
return nil // Continue malgré l'erreur
|
||||
}
|
||||
|
||||
// Extraire le front matter
|
||||
fm, _, err := indexer.ExtractFrontMatterAndBody(path)
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur extraction front matter %s: %v", relPath, err)
|
||||
// Continuer avec des valeurs par défaut
|
||||
fm = indexer.FullFrontMatter{
|
||||
Title: strings.TrimSuffix(d.Name(), ".md"),
|
||||
Tags: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
notes = append(notes, NoteMetadata{
|
||||
Path: filepath.ToSlash(relPath),
|
||||
Title: fm.Title,
|
||||
Tags: fm.Tags,
|
||||
LastModified: fm.LastModified,
|
||||
Date: fm.Date,
|
||||
Size: info.Size(),
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur lors du listing des notes: %v", err)
|
||||
h.sendJSONError(w, "Failed to list notes", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
response := ListNotesResponse{
|
||||
Notes: notes,
|
||||
Total: len(notes),
|
||||
}
|
||||
|
||||
h.sendJSON(w, response, http.StatusOK)
|
||||
}
|
||||
|
||||
// handleRESTGetNote retourne une note spécifique
|
||||
// GET /api/v1/notes/{path}
|
||||
// Supporte Accept: application/json ou text/markdown
|
||||
func (h *Handler) handleRESTGetNote(w http.ResponseWriter, r *http.Request, notePath string) {
|
||||
fullPath := filepath.Join(h.notesDir, notePath)
|
||||
|
||||
// Vérifier que le fichier existe
|
||||
info, err := os.Stat(fullPath)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
h.sendJSONError(w, "Note not found", http.StatusNotFound)
|
||||
} else {
|
||||
h.logger.Printf("Erreur stat fichier %s: %v", notePath, err)
|
||||
h.sendJSONError(w, "Failed to access note", http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Lire le contenu
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur lecture fichier %s: %v", notePath, err)
|
||||
h.sendJSONError(w, "Failed to read note", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Extraire front matter et body
|
||||
fm, body, err := indexer.ExtractFrontMatterAndBody(fullPath)
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur extraction front matter %s: %v", notePath, err)
|
||||
// Continuer sans front matter
|
||||
fm = indexer.FullFrontMatter{}
|
||||
body = string(content)
|
||||
}
|
||||
|
||||
// Content negotiation
|
||||
accept := r.Header.Get("Accept")
|
||||
|
||||
// Si client veut du Markdown brut
|
||||
if strings.Contains(accept, "text/markdown") || strings.Contains(accept, "text/plain") {
|
||||
w.Header().Set("Content-Type", "text/markdown; charset=utf-8")
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf("inline; filename=\"%s\"", filepath.Base(notePath)))
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write(content)
|
||||
return
|
||||
}
|
||||
|
||||
// Par défaut, retourner du JSON
|
||||
response := NoteResponse{
|
||||
Path: filepath.ToSlash(notePath),
|
||||
Title: fm.Title,
|
||||
Content: string(content),
|
||||
Body: body,
|
||||
FrontMatter: &fm,
|
||||
LastModified: fm.LastModified,
|
||||
Size: info.Size(),
|
||||
}
|
||||
|
||||
h.sendJSON(w, response, http.StatusOK)
|
||||
}
|
||||
|
||||
// handleRESTPutNote crée ou met à jour une note
|
||||
// PUT /api/v1/notes/{path}
|
||||
// Accepte: application/json, text/markdown, ou multipart/form-data
|
||||
func (h *Handler) handleRESTPutNote(w http.ResponseWriter, r *http.Request, notePath string) {
|
||||
fullPath := filepath.Join(h.notesDir, notePath)
|
||||
isNewFile := false
|
||||
if _, err := os.Stat(fullPath); os.IsNotExist(err) {
|
||||
isNewFile = true
|
||||
}
|
||||
|
||||
var finalContent string
|
||||
contentType := r.Header.Get("Content-Type")
|
||||
|
||||
// Parser selon le Content-Type
|
||||
if strings.Contains(contentType, "application/json") {
|
||||
// JSON Request
|
||||
var req NoteRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
h.sendJSONError(w, "Invalid JSON: "+err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Si content fourni directement, l'utiliser
|
||||
if req.Content != "" {
|
||||
finalContent = req.Content
|
||||
} else {
|
||||
// Sinon, construire depuis body + frontMatter
|
||||
var fm indexer.FullFrontMatter
|
||||
if req.FrontMatter != nil {
|
||||
fm = *req.FrontMatter
|
||||
} else {
|
||||
fm = indexer.FullFrontMatter{}
|
||||
}
|
||||
|
||||
// Remplir les champs manquants
|
||||
now := time.Now()
|
||||
if fm.Title == "" {
|
||||
fm.Title = strings.TrimSuffix(filepath.Base(notePath), ".md")
|
||||
fm.Title = strings.ReplaceAll(fm.Title, "-", " ")
|
||||
fm.Title = strings.Title(fm.Title)
|
||||
}
|
||||
if isNewFile || fm.Date == "" {
|
||||
fm.Date = now.Format("02-01-2006")
|
||||
}
|
||||
fm.LastModified = now.Format("02-01-2006:15:04")
|
||||
if fm.Tags == nil {
|
||||
fm.Tags = []string{}
|
||||
}
|
||||
|
||||
// Marshal front matter
|
||||
fmBytes, err := yaml.Marshal(fm)
|
||||
if err != nil {
|
||||
h.logger.Printf("Erreur marshalling front matter: %v", err)
|
||||
h.sendJSONError(w, "Failed to generate front matter", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
body := req.Body
|
||||
if body == "" {
|
||||
body = "\n# " + fm.Title + "\n\nVotre contenu ici..."
|
||||
}
|
||||
|
||||
finalContent = "---\n" + string(fmBytes) + "---\n" + body
|
||||
}
|
||||
} else if strings.Contains(contentType, "text/markdown") || strings.Contains(contentType, "text/plain") {
|
||||
// Raw Markdown
|
||||
bodyBytes, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
h.sendJSONError(w, "Failed to read body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
finalContent = string(bodyBytes)
|
||||
|
||||
// Si pas de front matter, en générer un
|
||||
if !strings.HasPrefix(finalContent, "---") {
|
||||
now := time.Now()
|
||||
fm := indexer.FullFrontMatter{
|
||||
Title: strings.TrimSuffix(filepath.Base(notePath), ".md"),
|
||||
Date: now.Format("02-01-2006"),
|
||||
LastModified: now.Format("02-01-2006:15:04"),
|
||||
Tags: []string{},
|
||||
}
|
||||
fmBytes, _ := yaml.Marshal(fm)
|
||||
finalContent = "---\n" + string(fmBytes) + "---\n" + finalContent
|
||||
} else {
|
||||
// Mettre à jour le LastModified du front matter existant
|
||||
fm, body, err := indexer.ExtractFrontMatterAndBodyFromReader(strings.NewReader(finalContent))
|
||||
if err == nil {
|
||||
fm.LastModified = time.Now().Format("02-01-2006:15:04")
|
||||
fmBytes, _ := yaml.Marshal(fm)
|
||||
finalContent = "---\n" + string(fmBytes) + "---\n" + body
|
||||
}
|
||||
}
|
||||
} else {
|
||||
h.sendJSONError(w, "Unsupported Content-Type. Use application/json or text/markdown", http.StatusUnsupportedMediaType)
|
||||
return
|
||||
}
|
||||
|
||||
// Créer les dossiers parents si nécessaire
|
||||
if err := os.MkdirAll(filepath.Dir(fullPath), 0o755); err != nil {
|
||||
h.logger.Printf("Erreur création dossier pour %s: %v", notePath, err)
|
||||
h.sendJSONError(w, "Failed to create parent directory", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Écrire le fichier
|
||||
if err := os.WriteFile(fullPath, []byte(finalContent), 0o644); err != nil {
|
||||
h.logger.Printf("Erreur écriture fichier %s: %v", notePath, err)
|
||||
h.sendJSONError(w, "Failed to write note", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Ré-indexer en arrière-plan
|
||||
go func() {
|
||||
if err := h.idx.Load(h.notesDir); err != nil {
|
||||
h.logger.Printf("Échec ré-indexation après PUT: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Lire les métadonnées pour la réponse
|
||||
info, _ := os.Stat(fullPath)
|
||||
fm, body, _ := indexer.ExtractFrontMatterAndBody(fullPath)
|
||||
|
||||
response := NoteResponse{
|
||||
Path: filepath.ToSlash(notePath),
|
||||
Title: fm.Title,
|
||||
Content: finalContent,
|
||||
Body: body,
|
||||
FrontMatter: &fm,
|
||||
LastModified: fm.LastModified,
|
||||
Size: info.Size(),
|
||||
}
|
||||
|
||||
statusCode := http.StatusOK
|
||||
if isNewFile {
|
||||
statusCode = http.StatusCreated
|
||||
}
|
||||
|
||||
h.sendJSON(w, response, statusCode)
|
||||
}
|
||||
|
||||
// handleRESTDeleteNote supprime une note
|
||||
// DELETE /api/v1/notes/{path}
|
||||
func (h *Handler) handleRESTDeleteNote(w http.ResponseWriter, r *http.Request, notePath string) {
|
||||
fullPath := filepath.Join(h.notesDir, notePath)
|
||||
|
||||
// Vérifier que le fichier existe
|
||||
if _, err := os.Stat(fullPath); os.IsNotExist(err) {
|
||||
h.sendJSONError(w, "Note not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
// Supprimer le fichier
|
||||
if err := os.Remove(fullPath); err != nil {
|
||||
h.logger.Printf("Erreur suppression fichier %s: %v", notePath, err)
|
||||
h.sendJSONError(w, "Failed to delete note", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Ré-indexer en arrière-plan
|
||||
go func() {
|
||||
if err := h.idx.Load(h.notesDir); err != nil {
|
||||
h.logger.Printf("Échec ré-indexation après DELETE: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Réponse de succès
|
||||
response := map[string]interface{}{
|
||||
"message": "Note deleted successfully",
|
||||
"path": filepath.ToSlash(notePath),
|
||||
}
|
||||
|
||||
h.sendJSON(w, response, http.StatusOK)
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
// =================
|
||||
|
||||
// sendJSON envoie une réponse JSON
|
||||
func (h *Handler) sendJSON(w http.ResponseWriter, data interface{}, statusCode int) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(statusCode)
|
||||
|
||||
encoder := json.NewEncoder(w)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(data); err != nil {
|
||||
h.logger.Printf("Erreur encodage JSON: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// sendJSONError envoie une erreur JSON
|
||||
func (h *Handler) sendJSONError(w http.ResponseWriter, message string, statusCode int) {
|
||||
response := ErrorResponse{
|
||||
Error: http.StatusText(statusCode),
|
||||
Message: message,
|
||||
Code: statusCode,
|
||||
}
|
||||
h.sendJSON(w, response, statusCode)
|
||||
}
|
||||
640
internal/indexer/indexer.go
Normal file
640
internal/indexer/indexer.go
Normal file
@ -0,0 +1,640 @@
|
||||
package indexer
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// Indexer maintient un index en memoire des tags associes aux fichiers Markdown.
|
||||
type Indexer struct {
|
||||
mu sync.RWMutex
|
||||
tags map[string][]string
|
||||
docs map[string]*Document
|
||||
}
|
||||
|
||||
// Document représente une note indexée pour la recherche.
|
||||
type Document struct {
|
||||
Path string
|
||||
Title string
|
||||
Tags []string
|
||||
Date string
|
||||
LastModified string
|
||||
Body string
|
||||
Summary string
|
||||
|
||||
lowerTitle string
|
||||
lowerBody string
|
||||
lowerTags []string
|
||||
}
|
||||
|
||||
// SearchResult représente un résultat de recherche enrichi.
|
||||
type SearchResult struct {
|
||||
Path string
|
||||
Title string
|
||||
Tags []string
|
||||
Snippet string
|
||||
Score float64
|
||||
Date string
|
||||
LastModified string
|
||||
}
|
||||
|
||||
// New cree une nouvelle instance d Indexer.
|
||||
func New() *Indexer {
|
||||
return &Indexer{
|
||||
tags: make(map[string][]string),
|
||||
docs: make(map[string]*Document),
|
||||
}
|
||||
}
|
||||
|
||||
// Load reconstruit l index a partir du repertoire fourni.
|
||||
func (i *Indexer) Load(root string) error {
|
||||
entries := make(map[string]map[string]struct{})
|
||||
documents := make(map[string]*Document)
|
||||
|
||||
err := filepath.WalkDir(root, func(path string, d os.DirEntry, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if !strings.EqualFold(filepath.Ext(path), ".md") {
|
||||
return nil
|
||||
}
|
||||
|
||||
rel, err := filepath.Rel(root, path)
|
||||
if err != nil {
|
||||
rel = path
|
||||
}
|
||||
|
||||
fm, body, err := ExtractFrontMatterAndBody(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("analyse du front matter pour %s: %w", path, err)
|
||||
}
|
||||
|
||||
tags := normalizeTags([]string(fm.Tags))
|
||||
if len(tags) > 0 {
|
||||
for _, tag := range tags {
|
||||
key := strings.ToLower(tag)
|
||||
if _, ok := entries[key]; !ok {
|
||||
entries[key] = make(map[string]struct{})
|
||||
}
|
||||
entries[key][rel] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
doc := buildDocument(rel, fm, body, tags)
|
||||
documents[rel] = doc
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
indexed := make(map[string][]string, len(entries))
|
||||
for tag, files := range entries {
|
||||
list := make([]string, 0, len(files))
|
||||
for file := range files {
|
||||
list = append(list, file)
|
||||
}
|
||||
sort.Strings(list)
|
||||
indexed[tag] = list
|
||||
}
|
||||
|
||||
i.mu.Lock()
|
||||
i.tags = indexed
|
||||
i.docs = documents
|
||||
i.mu.Unlock()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func normalizeTags(tags []string) []string {
|
||||
if len(tags) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(tags))
|
||||
result := make([]string, 0, len(tags))
|
||||
|
||||
for _, tag := range tags {
|
||||
trimmed := strings.TrimSpace(tag)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(trimmed)
|
||||
if _, ok := seen[lower]; ok {
|
||||
continue
|
||||
}
|
||||
seen[lower] = struct{}{}
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func buildDocument(path string, fm FullFrontMatter, body string, tags []string) *Document {
|
||||
title := strings.TrimSpace(fm.Title)
|
||||
if title == "" {
|
||||
title = deriveTitleFromPath(path)
|
||||
}
|
||||
|
||||
summary := buildSummary(body)
|
||||
|
||||
lowerTags := make([]string, len(tags))
|
||||
for idx, tag := range tags {
|
||||
lowerTags[idx] = strings.ToLower(tag)
|
||||
}
|
||||
|
||||
doc := &Document{
|
||||
Path: path,
|
||||
Title: title,
|
||||
Tags: tags,
|
||||
Date: strings.TrimSpace(fm.Date),
|
||||
LastModified: strings.TrimSpace(fm.LastModified),
|
||||
Body: body,
|
||||
Summary: summary,
|
||||
lowerTitle: strings.ToLower(title),
|
||||
lowerBody: strings.ToLower(body),
|
||||
lowerTags: lowerTags,
|
||||
}
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
func deriveTitleFromPath(path string) string {
|
||||
base := filepath.Base(path)
|
||||
base = strings.TrimSuffix(base, filepath.Ext(base))
|
||||
base = strings.ReplaceAll(base, "-", " ")
|
||||
base = strings.ReplaceAll(base, "_", " ")
|
||||
base = strings.TrimSpace(base)
|
||||
if base == "" {
|
||||
return "Sans titre"
|
||||
}
|
||||
return strings.Title(base)
|
||||
}
|
||||
|
||||
func buildSummary(body string) string {
|
||||
const maxRunes = 240
|
||||
trimmed := strings.TrimSpace(body)
|
||||
if trimmed == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Remplacer les retours à la ligne multiples par un espace simple
|
||||
normalized := strings.Join(strings.Fields(trimmed), " ")
|
||||
|
||||
runes := []rune(normalized)
|
||||
if len(runes) <= maxRunes {
|
||||
return normalized
|
||||
}
|
||||
|
||||
return string(runes[:maxRunes]) + "…"
|
||||
}
|
||||
|
||||
// SearchByTag renvoie une copie de la liste des fichiers indexés pour un tag donné.
|
||||
func (i *Indexer) SearchByTag(tag string) []string {
|
||||
i.mu.RLock()
|
||||
defer i.mu.RUnlock()
|
||||
|
||||
tag = strings.TrimSpace(tag)
|
||||
if tag == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
lowerTag := strings.ToLower(tag)
|
||||
files, ok := i.tags[lowerTag]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
copyFiles := make([]string, len(files))
|
||||
copy(copyFiles, files)
|
||||
return copyFiles
|
||||
}
|
||||
|
||||
// SearchDocuments effectue une recherche riche sur les documents indexés.
|
||||
func (i *Indexer) SearchDocuments(query string) []SearchResult {
|
||||
parsed := parseSearchQuery(query)
|
||||
|
||||
i.mu.RLock()
|
||||
defer i.mu.RUnlock()
|
||||
|
||||
if len(parsed.terms) == 0 && len(parsed.tagFilters) == 0 && len(parsed.titleFilters) == 0 && len(parsed.pathFilters) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
results := make([]SearchResult, 0, len(i.docs))
|
||||
|
||||
for _, doc := range i.docs {
|
||||
match, score := matchDocument(doc, parsed)
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
|
||||
snippet := buildSnippet(doc, parsed.terms)
|
||||
if snippet == "" {
|
||||
snippet = doc.Summary
|
||||
}
|
||||
|
||||
results = append(results, SearchResult{
|
||||
Path: doc.Path,
|
||||
Title: doc.Title,
|
||||
Tags: doc.Tags,
|
||||
Snippet: snippet,
|
||||
Score: score,
|
||||
Date: doc.Date,
|
||||
LastModified: doc.LastModified,
|
||||
})
|
||||
}
|
||||
|
||||
sort.SliceStable(results, func(a, b int) bool {
|
||||
if results[a].Score == results[b].Score {
|
||||
return strings.ToLower(results[a].Title) < strings.ToLower(results[b].Title)
|
||||
}
|
||||
return results[a].Score > results[b].Score
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func matchDocument(doc *Document, q parsedQuery) (bool, float64) {
|
||||
score := 0.0
|
||||
|
||||
// Tag filters must all match
|
||||
for _, filter := range q.tagFilters {
|
||||
if !containsString(doc.lowerTags, filter) {
|
||||
return false, 0
|
||||
}
|
||||
score += 2 // léger bonus pour les filtres respectés
|
||||
}
|
||||
|
||||
// Title filters must all match
|
||||
for _, filter := range q.titleFilters {
|
||||
if !strings.Contains(doc.lowerTitle, filter) {
|
||||
return false, 0
|
||||
}
|
||||
score += 4
|
||||
}
|
||||
|
||||
// Path filters must all match
|
||||
lowerPath := strings.ToLower(doc.Path)
|
||||
for _, filter := range q.pathFilters {
|
||||
if !strings.Contains(lowerPath, filter) {
|
||||
return false, 0
|
||||
}
|
||||
score += 1.5
|
||||
}
|
||||
|
||||
// General terms (AND logic)
|
||||
for _, term := range q.terms {
|
||||
if term == "" {
|
||||
continue
|
||||
}
|
||||
termScore := 0.0
|
||||
if strings.Contains(doc.lowerTitle, term) {
|
||||
termScore += 6
|
||||
}
|
||||
if containsString(doc.lowerTags, term) {
|
||||
termScore += 4
|
||||
}
|
||||
if strings.Contains(lowerPath, term) {
|
||||
termScore += 2
|
||||
}
|
||||
if strings.Contains(doc.lowerBody, term) {
|
||||
termScore += 1.5
|
||||
}
|
||||
if termScore == 0 {
|
||||
return false, 0 // term must match somewhere
|
||||
}
|
||||
score += termScore
|
||||
}
|
||||
|
||||
if len(q.terms) == 0 && len(q.tagFilters) == 0 && len(q.titleFilters) == 0 && len(q.pathFilters) == 0 {
|
||||
return false, 0
|
||||
}
|
||||
|
||||
// Bonus léger pour documents avec titre défini
|
||||
if doc.Title != "" {
|
||||
score += 0.5
|
||||
}
|
||||
|
||||
return true, score
|
||||
}
|
||||
|
||||
func containsString(list []string, target string) bool {
|
||||
for _, item := range list {
|
||||
if item == target {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func buildSnippet(doc *Document, terms []string) string {
|
||||
if doc.Body == "" || len(terms) == 0 {
|
||||
return doc.Summary
|
||||
}
|
||||
|
||||
pos, termLen := findTermPosition(doc.Body, terms)
|
||||
if pos == -1 {
|
||||
return doc.Summary
|
||||
}
|
||||
|
||||
return extractSnippetFromRunes([]rune(doc.Body), pos, termLen)
|
||||
}
|
||||
|
||||
func findTermPosition(body string, terms []string) (int, int) {
|
||||
if len(terms) == 0 {
|
||||
return -1, 0
|
||||
}
|
||||
|
||||
bodyRunes := []rune(body)
|
||||
lowerRunes := make([]rune, len(bodyRunes))
|
||||
for idx, r := range bodyRunes {
|
||||
lowerRunes[idx] = unicode.ToLower(r)
|
||||
}
|
||||
|
||||
for _, term := range terms {
|
||||
term = strings.TrimSpace(term)
|
||||
if term == "" {
|
||||
continue
|
||||
}
|
||||
termRunes := []rune(term)
|
||||
for idx, r := range termRunes {
|
||||
termRunes[idx] = unicode.ToLower(r)
|
||||
}
|
||||
pos := indexRunes(lowerRunes, termRunes)
|
||||
if pos != -1 {
|
||||
return pos, len(termRunes)
|
||||
}
|
||||
}
|
||||
|
||||
return -1, 0
|
||||
}
|
||||
|
||||
func indexRunes(haystack, needle []rune) int {
|
||||
if len(needle) == 0 || len(needle) > len(haystack) {
|
||||
return -1
|
||||
}
|
||||
|
||||
for i := 0; i <= len(haystack)-len(needle); i++ {
|
||||
match := true
|
||||
for j := 0; j < len(needle); j++ {
|
||||
if haystack[i+j] != needle[j] {
|
||||
match = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if match {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func extractSnippetFromRunes(body []rune, pos, termLen int) string {
|
||||
if len(body) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
const window = 120
|
||||
|
||||
start := pos - window/2
|
||||
if start < 0 {
|
||||
start = 0
|
||||
}
|
||||
|
||||
end := pos + termLen + window/2
|
||||
if end > len(body) {
|
||||
end = len(body)
|
||||
}
|
||||
|
||||
snippet := strings.TrimSpace(string(body[start:end]))
|
||||
snippet = strings.Join(strings.Fields(snippet), " ")
|
||||
|
||||
if start > 0 {
|
||||
snippet = "…" + snippet
|
||||
}
|
||||
if end < len(body) {
|
||||
snippet += "…"
|
||||
}
|
||||
|
||||
return snippet
|
||||
}
|
||||
|
||||
type parsedQuery struct {
|
||||
terms []string
|
||||
tagFilters []string
|
||||
titleFilters []string
|
||||
pathFilters []string
|
||||
}
|
||||
|
||||
func parseSearchQuery(query string) parsedQuery {
|
||||
trimmed := strings.TrimSpace(query)
|
||||
if trimmed == "" {
|
||||
return parsedQuery{}
|
||||
}
|
||||
|
||||
tokens := splitQuery(trimmed)
|
||||
result := parsedQuery{
|
||||
terms: make([]string, 0, len(tokens)),
|
||||
tagFilters: []string{},
|
||||
titleFilters: []string{},
|
||||
pathFilters: []string{},
|
||||
}
|
||||
|
||||
for _, token := range tokens {
|
||||
if token == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(token)
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(lower, "tag:"):
|
||||
value := strings.TrimSpace(token[4:])
|
||||
if value != "" {
|
||||
result.tagFilters = append(result.tagFilters, strings.ToLower(value))
|
||||
}
|
||||
case strings.HasPrefix(lower, "title:"):
|
||||
value := strings.TrimSpace(token[6:])
|
||||
if value != "" {
|
||||
result.titleFilters = append(result.titleFilters, strings.ToLower(value))
|
||||
}
|
||||
case strings.HasPrefix(lower, "path:"):
|
||||
value := strings.TrimSpace(token[5:])
|
||||
if value != "" {
|
||||
result.pathFilters = append(result.pathFilters, strings.ToLower(value))
|
||||
}
|
||||
default:
|
||||
result.terms = append(result.terms, strings.ToLower(token))
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func splitQuery(input string) []string {
|
||||
var tokens []string
|
||||
var current strings.Builder
|
||||
inQuotes := false
|
||||
|
||||
for _, r := range input {
|
||||
switch r {
|
||||
case '"':
|
||||
if inQuotes {
|
||||
tokens = appendToken(tokens, current.String())
|
||||
current.Reset()
|
||||
inQuotes = false
|
||||
} else {
|
||||
if current.Len() > 0 {
|
||||
tokens = appendToken(tokens, current.String())
|
||||
current.Reset()
|
||||
}
|
||||
inQuotes = true
|
||||
}
|
||||
case ' ', '\t', '\n':
|
||||
if inQuotes {
|
||||
current.WriteRune(r)
|
||||
} else {
|
||||
if current.Len() > 0 {
|
||||
tokens = appendToken(tokens, current.String())
|
||||
current.Reset()
|
||||
}
|
||||
}
|
||||
default:
|
||||
current.WriteRune(r)
|
||||
}
|
||||
}
|
||||
|
||||
if current.Len() > 0 {
|
||||
tokens = appendToken(tokens, current.String())
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
func appendToken(tokens []string, token string) []string {
|
||||
token = strings.TrimSpace(token)
|
||||
if token != "" {
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
// FullFrontMatter represente la structure complete du front matter YAML.
|
||||
type FullFrontMatter struct {
|
||||
Title string `yaml:"title,omitempty"`
|
||||
Date string `yaml:"date,omitempty"`
|
||||
LastModified string `yaml:"last_modified,omitempty"`
|
||||
Tags tagList `yaml:"tags,omitempty"`
|
||||
}
|
||||
|
||||
// frontMatter est une version simplifiee pour la compatibilite avec Load.
|
||||
type frontMatter struct {
|
||||
Tags tagList `yaml:"tags"`
|
||||
}
|
||||
|
||||
type tagList []string
|
||||
|
||||
func (t *tagList) UnmarshalYAML(value *yaml.Node) error {
|
||||
switch value.Kind {
|
||||
case yaml.ScalarNode:
|
||||
var tag string
|
||||
if err := value.Decode(&tag); err != nil {
|
||||
return err
|
||||
}
|
||||
*t = tagList{tag}
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
var tags []string
|
||||
if err := value.Decode(&tags); err != nil {
|
||||
return err
|
||||
}
|
||||
*t = tagList(tags)
|
||||
return nil
|
||||
case yaml.AliasNode:
|
||||
return t.UnmarshalYAML(value.Alias)
|
||||
default:
|
||||
return fmt.Errorf("format de tags non supporte")
|
||||
}
|
||||
}
|
||||
|
||||
// ExtractFrontMatterAndBody extrait le front matter et le corps d'un fichier Markdown.
|
||||
func ExtractFrontMatterAndBody(path string) (FullFrontMatter, string, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return FullFrontMatter{}, "", err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ExtractFrontMatterAndBodyFromReader(file)
|
||||
}
|
||||
|
||||
// extractFrontMatterAndBodyFromReader extrait le front matter et le corps d'un io.Reader.
|
||||
func ExtractFrontMatterAndBodyFromReader(reader io.Reader) (FullFrontMatter, string, error) {
|
||||
bufReader := bufio.NewReader(reader)
|
||||
var fm FullFrontMatter
|
||||
var bodyBuilder strings.Builder
|
||||
var fmBuilder strings.Builder
|
||||
|
||||
line, err := bufReader.ReadString('\n')
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return FullFrontMatter{}, "", err
|
||||
}
|
||||
|
||||
if strings.TrimSpace(line) != "---" {
|
||||
bodyBuilder.WriteString(line) // If no front matter, this is part of the body
|
||||
_, err := io.Copy(&bodyBuilder, bufReader)
|
||||
return FullFrontMatter{}, bodyBuilder.String(), err
|
||||
}
|
||||
|
||||
// Found first '---', now read front matter
|
||||
fmFound := false
|
||||
for {
|
||||
line, err = bufReader.ReadString('\n')
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
// Front matter not terminated, treat entire content as body
|
||||
return FullFrontMatter{}, "---" + fmBuilder.String() + bodyBuilder.String(), nil
|
||||
}
|
||||
return FullFrontMatter{}, "", err
|
||||
}
|
||||
if strings.TrimSpace(line) == "---" {
|
||||
fmFound = true
|
||||
break
|
||||
}
|
||||
fmBuilder.WriteString(line)
|
||||
}
|
||||
|
||||
if fmFound {
|
||||
if err := yaml.Unmarshal([]byte(fmBuilder.String()), &fm); err != nil {
|
||||
return FullFrontMatter{}, "", fmt.Errorf("erreur d'analyse YAML du front matter: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Read the rest of the body
|
||||
_, err = io.Copy(&bodyBuilder, bufReader)
|
||||
if err != nil {
|
||||
return FullFrontMatter{}, "", err
|
||||
}
|
||||
|
||||
return fm, bodyBuilder.String(), nil
|
||||
}
|
||||
|
||||
// extractFrontMatter est une version simplifiee pour la compatibilite avec Load.
|
||||
func extractFrontMatter(path string) (frontMatter, error) {
|
||||
fm, _, err := ExtractFrontMatterAndBody(path)
|
||||
return frontMatter{Tags: fm.Tags}, err
|
||||
}
|
||||
116
internal/indexer/indexer_test.go
Normal file
116
internal/indexer/indexer_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
package indexer
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIndexerLoadAndSearchByTag(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
|
||||
files := map[string]string{
|
||||
"note1.md": "---\ntags: [tag1, tag2]\n---\ncontenu 1\n",
|
||||
"note2.md": "---\ntags: tag2\n---\ncontenu 2\n",
|
||||
"note3.md": "---\n---\ncontenu 3\n",
|
||||
}
|
||||
|
||||
for name, content := range files {
|
||||
path := filepath.Join(tmp, name)
|
||||
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||
t.Fatalf("ecriture fichier %s: %v", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
idx := New()
|
||||
if err := idx.Load(tmp); err != nil {
|
||||
t.Fatalf("chargement index: %v", err)
|
||||
}
|
||||
|
||||
cases := map[string][]string{
|
||||
"tag1": []string{"note1.md"},
|
||||
"TAG2": []string{"note1.md", "note2.md"},
|
||||
"tag3": nil,
|
||||
"": nil,
|
||||
}
|
||||
|
||||
for tag, expected := range cases {
|
||||
results := idx.SearchByTag(tag)
|
||||
if len(results) != len(expected) {
|
||||
t.Errorf("tag %q: taille %d attendue %d", tag, len(results), len(expected))
|
||||
continue
|
||||
}
|
||||
for i, file := range expected {
|
||||
if results[i] != file {
|
||||
t.Errorf("tag %q: resultat[%d]=%s attendu %s", tag, i, results[i], file)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexerSearchDocuments(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
|
||||
files := map[string]string{
|
||||
"projet-alpha.md": `---
|
||||
title: Projet Alpha
|
||||
tags: [projet, alpha]
|
||||
---
|
||||
Le projet Alpha explore une nouvelle architecture.
|
||||
`,
|
||||
"journal.md": `---
|
||||
title: Journal Quotidien
|
||||
tags: [journal]
|
||||
---
|
||||
Aujourd'hui, nous avons discuté du projet Bêta et des priorités.
|
||||
`,
|
||||
"guide.md": `---
|
||||
title: Guide Pratique
|
||||
tags: [documentation, guide]
|
||||
---
|
||||
Ce guide explique comment démarrer rapidement.
|
||||
`,
|
||||
}
|
||||
|
||||
for name, content := range files {
|
||||
path := filepath.Join(tmp, name)
|
||||
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||
t.Fatalf("écriture fichier %s: %v", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
idx := New()
|
||||
if err := idx.Load(tmp); err != nil {
|
||||
t.Fatalf("chargement index: %v", err)
|
||||
}
|
||||
|
||||
type expected struct {
|
||||
query string
|
||||
results []string
|
||||
}
|
||||
|
||||
tests := []expected{
|
||||
{query: "projet", results: []string{"journal.md", "projet-alpha.md"}},
|
||||
{query: "title:\"Projet Alpha\"", results: []string{"projet-alpha.md"}},
|
||||
{query: "tag:guide", results: []string{"guide.md"}},
|
||||
{query: "tag:projet projet", results: []string{"projet-alpha.md"}},
|
||||
{query: "tag:journal beta", results: []string{"journal.md"}},
|
||||
{query: "path:guide démarrer", results: []string{"guide.md"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
results := idx.SearchDocuments(tt.query)
|
||||
paths := make([]string, len(results))
|
||||
for i, res := range results {
|
||||
paths[i] = res.Path
|
||||
}
|
||||
if len(paths) != len(tt.results) {
|
||||
t.Fatalf("query %q: taille %d attendue %d (résultats: %v)", tt.query, len(paths), len(tt.results), paths)
|
||||
}
|
||||
for idxRes, path := range tt.results {
|
||||
if paths[idxRes] != path {
|
||||
t.Fatalf("query %q: résultat[%d]=%s attendu %s", tt.query, idxRes, paths[idxRes], path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
126
internal/watcher/watcher.go
Normal file
126
internal/watcher/watcher.go
Normal file
@ -0,0 +1,126 @@
|
||||
package watcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
|
||||
"github.com/mathieu/project-notes/internal/indexer"
|
||||
)
|
||||
|
||||
// Watcher observe les modifications dans le repertoire des notes et relance l indexation au besoin.
|
||||
type Watcher struct {
|
||||
fs *fsnotify.Watcher
|
||||
idx *indexer.Indexer
|
||||
dir string
|
||||
logger *log.Logger
|
||||
wg sync.WaitGroup
|
||||
}
|
||||
|
||||
// Start initialise le watcher et commence la surveillance dans une goroutine.
|
||||
func Start(ctx context.Context, dir string, idx *indexer.Indexer, logger *log.Logger) (*Watcher, error) {
|
||||
fsWatcher, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
w := &Watcher{
|
||||
fs: fsWatcher,
|
||||
idx: idx,
|
||||
dir: dir,
|
||||
logger: logger,
|
||||
}
|
||||
|
||||
if err := w.addDirRecursive(dir); err != nil {
|
||||
fsWatcher.Close()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
w.wg.Add(1)
|
||||
go w.run(ctx)
|
||||
|
||||
return w, nil
|
||||
}
|
||||
|
||||
// Close arrete le watcher et attend la fin des goroutines.
|
||||
func (w *Watcher) Close() error {
|
||||
err := w.fs.Close()
|
||||
w.wg.Wait()
|
||||
return err
|
||||
}
|
||||
|
||||
func (w *Watcher) run(ctx context.Context) {
|
||||
defer w.wg.Done()
|
||||
|
||||
debounce := time.NewTimer(0)
|
||||
if !debounce.Stop() {
|
||||
<-debounce.C
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case event, ok := <-w.fs.Events:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if event.Op&fsnotify.Create != 0 {
|
||||
if info, err := os.Stat(event.Name); err == nil && info.IsDir() {
|
||||
if err := w.addDirRecursive(event.Name); err != nil {
|
||||
w.logger.Printf("watcher: ajout repertoire %s impossible: %v", event.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if w.shouldReindex(event.Name, event.Op) {
|
||||
if !debounce.Stop() {
|
||||
select {
|
||||
case <-debounce.C:
|
||||
default:
|
||||
}
|
||||
}
|
||||
debounce.Reset(200 * time.Millisecond)
|
||||
}
|
||||
case err, ok := <-w.fs.Errors:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
w.logger.Printf("watcher: erreur: %v", err)
|
||||
case <-debounce.C:
|
||||
w.logger.Printf("watcher: reindexation suite a modification")
|
||||
if err := w.idx.Load(w.dir); err != nil {
|
||||
w.logger.Printf("watcher: echec reindexation: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) shouldReindex(path string, op fsnotify.Op) bool {
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
if ext != ".md" && op&fsnotify.Remove == 0 && op&fsnotify.Rename == 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (w *Watcher) addDirRecursive(root string) error {
|
||||
return filepath.WalkDir(root, func(path string, entry os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if entry.IsDir() {
|
||||
if err := w.fs.Add(path); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
Reference in New Issue
Block a user