mirror of
https://github.com/fccview/cronmaster.git
synced 2025-12-23 22:18:20 -05:00
implement snippets in a better, more clever way
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { useState, useEffect } from "react";
|
||||
import { Button } from "./ui/Button";
|
||||
import { Input } from "./ui/Input";
|
||||
import {
|
||||
@@ -16,11 +16,11 @@ import {
|
||||
Check,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
bashSnippets,
|
||||
bashSnippetCategories,
|
||||
searchBashSnippets,
|
||||
fetchSnippets,
|
||||
fetchSnippetCategories,
|
||||
searchSnippets,
|
||||
type BashSnippet,
|
||||
} from "../_utils/bashSnippets";
|
||||
} from "../_server/actions/snippets";
|
||||
|
||||
interface BashSnippetHelperProps {
|
||||
onInsertSnippet: (snippet: string) => void;
|
||||
@@ -32,18 +32,57 @@ const categoryIcons = {
|
||||
Conditionals: Code,
|
||||
"System Operations": Settings,
|
||||
"Database Operations": Database,
|
||||
"User Examples": FolderOpen,
|
||||
"Custom Scripts": Code,
|
||||
};
|
||||
|
||||
export function BashSnippetHelper({ onInsertSnippet }: BashSnippetHelperProps) {
|
||||
const [searchQuery, setSearchQuery] = useState("");
|
||||
const [selectedCategory, setSelectedCategory] = useState<string | null>(null);
|
||||
const [copiedId, setCopiedId] = useState<string | null>(null);
|
||||
const [snippets, setSnippets] = useState<BashSnippet[]>([]);
|
||||
const [categories, setCategories] = useState<string[]>([]);
|
||||
const [filteredSnippets, setFilteredSnippets] = useState<BashSnippet[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
const filteredSnippets = searchQuery
|
||||
? searchBashSnippets(searchQuery)
|
||||
: selectedCategory
|
||||
? bashSnippets.filter((s) => s.category === selectedCategory)
|
||||
: bashSnippets;
|
||||
// Load snippets and categories on mount
|
||||
useEffect(() => {
|
||||
const loadData = async () => {
|
||||
try {
|
||||
const [snippetsData, categoriesData] = await Promise.all([
|
||||
fetchSnippets(),
|
||||
fetchSnippetCategories(),
|
||||
]);
|
||||
setSnippets(snippetsData);
|
||||
setCategories(categoriesData);
|
||||
} catch (error) {
|
||||
console.error("Error loading snippets:", error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadData();
|
||||
}, []);
|
||||
|
||||
// Filter snippets based on search and category
|
||||
useEffect(() => {
|
||||
const filterSnippets = async () => {
|
||||
if (searchQuery) {
|
||||
const searchResults = await searchSnippets(searchQuery);
|
||||
setFilteredSnippets(searchResults);
|
||||
} else if (selectedCategory) {
|
||||
const categoryResults = snippets.filter(
|
||||
(s) => s.category === selectedCategory
|
||||
);
|
||||
setFilteredSnippets(categoryResults);
|
||||
} else {
|
||||
setFilteredSnippets(snippets);
|
||||
}
|
||||
};
|
||||
|
||||
filterSnippets();
|
||||
}, [searchQuery, selectedCategory, snippets]);
|
||||
|
||||
const handleCopy = async (snippet: BashSnippet) => {
|
||||
await navigator.clipboard.writeText(snippet.template);
|
||||
@@ -55,6 +94,17 @@ export function BashSnippetHelper({ onInsertSnippet }: BashSnippetHelperProps) {
|
||||
onInsertSnippet(snippet.template);
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
<div className="text-center py-8">
|
||||
<Code className="h-8 w-8 text-muted-foreground mx-auto mb-2 animate-spin" />
|
||||
<p className="text-sm text-muted-foreground">Loading snippets...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
{/* Search */}
|
||||
@@ -80,8 +130,9 @@ export function BashSnippetHelper({ onInsertSnippet }: BashSnippetHelperProps) {
|
||||
>
|
||||
All
|
||||
</Button>
|
||||
{bashSnippetCategories.map((category) => {
|
||||
const Icon = categoryIcons[category as keyof typeof categoryIcons];
|
||||
{categories.map((category) => {
|
||||
const Icon =
|
||||
categoryIcons[category as keyof typeof categoryIcons] || Code;
|
||||
return (
|
||||
<Button
|
||||
key={category}
|
||||
@@ -103,7 +154,8 @@ export function BashSnippetHelper({ onInsertSnippet }: BashSnippetHelperProps) {
|
||||
<div className="space-y-2 max-h-64 overflow-y-auto custom-scrollbar">
|
||||
{filteredSnippets.map((snippet) => {
|
||||
const Icon =
|
||||
categoryIcons[snippet.category as keyof typeof categoryIcons];
|
||||
categoryIcons[snippet.category as keyof typeof categoryIcons] ||
|
||||
Code;
|
||||
return (
|
||||
<div
|
||||
key={snippet.id}
|
||||
@@ -116,6 +168,11 @@ export function BashSnippetHelper({ onInsertSnippet }: BashSnippetHelperProps) {
|
||||
<h4 className="text-sm font-medium text-foreground truncate">
|
||||
{snippet.title}
|
||||
</h4>
|
||||
{snippet.source === "user" && (
|
||||
<span className="inline-block px-1.5 py-0.5 text-xs bg-green-100 text-green-700 rounded border border-green-200">
|
||||
User
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mb-2">
|
||||
{snippet.description}
|
||||
|
||||
@@ -73,14 +73,6 @@ export function EditTaskModal({
|
||||
<Terminal className="h-4 w-4 text-muted-foreground" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Bash Snippets Helper */}
|
||||
<div className="bg-muted/20 rounded-lg border border-border/30 p-3">
|
||||
<h4 className="text-sm font-medium text-foreground mb-2">
|
||||
💡 Useful Bash Snippets
|
||||
</h4>
|
||||
<BashSnippetHelper onInsertSnippet={handleInsertSnippet} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
77
app/_server/actions/snippets/index.ts
Normal file
77
app/_server/actions/snippets/index.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
"use server";
|
||||
|
||||
import { revalidatePath } from "next/cache";
|
||||
import {
|
||||
loadAllSnippets,
|
||||
searchBashSnippets,
|
||||
getSnippetCategories,
|
||||
getSnippetById,
|
||||
type BashSnippet,
|
||||
} from "@/app/_utils/snippetScanner";
|
||||
|
||||
export { type BashSnippet } from "@/app/_utils/snippetScanner";
|
||||
|
||||
export async function fetchSnippets(): Promise<BashSnippet[]> {
|
||||
try {
|
||||
return await loadAllSnippets();
|
||||
} catch (error) {
|
||||
console.error("Error loading snippets:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function searchSnippets(query: string): Promise<BashSnippet[]> {
|
||||
try {
|
||||
const snippets = await loadAllSnippets();
|
||||
return searchBashSnippets(snippets, query);
|
||||
} catch (error) {
|
||||
console.error("Error searching snippets:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchSnippetCategories(): Promise<string[]> {
|
||||
try {
|
||||
const snippets = await loadAllSnippets();
|
||||
return getSnippetCategories(snippets);
|
||||
} catch (error) {
|
||||
console.error("Error loading snippet categories:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchSnippetById(
|
||||
id: string
|
||||
): Promise<BashSnippet | undefined> {
|
||||
try {
|
||||
const snippets = await loadAllSnippets();
|
||||
return getSnippetById(snippets, id);
|
||||
} catch (error) {
|
||||
console.error("Error loading snippet by ID:", error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchSnippetsByCategory(
|
||||
category: string
|
||||
): Promise<BashSnippet[]> {
|
||||
try {
|
||||
const snippets = await loadAllSnippets();
|
||||
return snippets.filter((snippet) => snippet.category === category);
|
||||
} catch (error) {
|
||||
console.error("Error loading snippets by category:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchSnippetsBySource(
|
||||
source: "builtin" | "user"
|
||||
): Promise<BashSnippet[]> {
|
||||
try {
|
||||
const snippets = await loadAllSnippets();
|
||||
return snippets.filter((snippet) => snippet.source === source);
|
||||
} catch (error) {
|
||||
console.error("Error loading snippets by source:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -1,357 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
export interface BashSnippet {
|
||||
id: string;
|
||||
title: string;
|
||||
description: string;
|
||||
category: string;
|
||||
template: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
export const bashSnippets: BashSnippet[] = [
|
||||
// File Operations
|
||||
{
|
||||
id: "backup-rsync",
|
||||
title: "Backup with rsync",
|
||||
description:
|
||||
"Create a backup using rsync with progress and exclude options",
|
||||
category: "File Operations",
|
||||
template: `# Backup source directory to destination
|
||||
# Change SOURCE_DIR and DEST_DIR to your paths
|
||||
SOURCE_DIR="/path/to/source"
|
||||
DEST_DIR="/path/to/backup"
|
||||
|
||||
rsync -av --progress --delete \\
|
||||
--exclude='*.tmp' \\
|
||||
--exclude='*.log' \\
|
||||
--exclude='node_modules' \\
|
||||
"$SOURCE_DIR/" "$DEST_DIR/"
|
||||
|
||||
echo "Backup completed at $(date)"`,
|
||||
tags: ["backup", "rsync", "sync", "copy"],
|
||||
},
|
||||
{
|
||||
id: "copy-files",
|
||||
title: "Copy files with confirmation",
|
||||
description: "Copy files with interactive confirmation and error handling",
|
||||
category: "File Operations",
|
||||
template: `# Copy files with confirmation
|
||||
# Change SOURCE and DEST to your paths
|
||||
SOURCE="/path/to/source"
|
||||
DEST="/path/to/destination"
|
||||
|
||||
if [ -f "$SOURCE" ]; then
|
||||
cp -i "$SOURCE" "$DEST"
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "File copied successfully"
|
||||
else
|
||||
echo "Error copying file"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Source file does not exist"
|
||||
exit 1
|
||||
fi`,
|
||||
tags: ["copy", "cp", "file", "confirmation"],
|
||||
},
|
||||
{
|
||||
id: "move-files",
|
||||
title: "Move files safely",
|
||||
description: "Move files with backup and error handling",
|
||||
category: "File Operations",
|
||||
template: `# Move files with backup
|
||||
# Change SOURCE and DEST to your paths
|
||||
SOURCE="/path/to/source"
|
||||
DEST="/path/to/destination"
|
||||
|
||||
# Create backup before moving
|
||||
if [ -f "$SOURCE" ]; then
|
||||
cp "$SOURCE" "\${SOURCE}.backup.\$(date +%Y%m%d_%H%M%S)"
|
||||
mv "$SOURCE" "$DEST"
|
||||
echo "File moved successfully with backup created"
|
||||
else
|
||||
echo "Source file does not exist"
|
||||
exit 1
|
||||
fi`,
|
||||
tags: ["move", "mv", "backup", "safe"],
|
||||
},
|
||||
|
||||
// Loops
|
||||
{
|
||||
id: "for-loop-files",
|
||||
title: "For loop through files",
|
||||
description: "Process multiple files in a directory",
|
||||
category: "Loops",
|
||||
template: `# Loop through files in a directory
|
||||
# Change DIRECTORY to your target directory
|
||||
DIRECTORY="/path/to/files"
|
||||
|
||||
for file in "$DIRECTORY"/*; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Processing: $file"
|
||||
# Add your processing logic here
|
||||
# Example: process_file "$file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All files processed"`,
|
||||
tags: ["loop", "files", "for", "process"],
|
||||
},
|
||||
{
|
||||
id: "while-loop-read",
|
||||
title: "While loop reading input",
|
||||
description: "Read input line by line and process it",
|
||||
category: "Loops",
|
||||
template: `# Read input line by line
|
||||
# You can pipe input: echo "line1\\nline2" | ./script.sh
|
||||
while IFS= read -r line; do
|
||||
echo "Processing line: $line"
|
||||
# Add your processing logic here
|
||||
# Example: process_line "$line"
|
||||
done
|
||||
|
||||
echo "Finished processing all input"`,
|
||||
tags: ["loop", "while", "read", "input"],
|
||||
},
|
||||
{
|
||||
id: "for-loop-range",
|
||||
title: "For loop with range",
|
||||
description: "Loop through a range of numbers",
|
||||
category: "Loops",
|
||||
template: `# Loop through a range of numbers
|
||||
# Change START and END to your desired range
|
||||
START=1
|
||||
END=10
|
||||
|
||||
for i in $(seq $START $END); do
|
||||
echo "Processing item $i"
|
||||
# Add your processing logic here
|
||||
# Example: process_item $i
|
||||
done
|
||||
|
||||
echo "Finished processing range $START to $END"`,
|
||||
tags: ["loop", "range", "numbers", "seq"],
|
||||
},
|
||||
|
||||
// Conditionals
|
||||
{
|
||||
id: "if-else-basic",
|
||||
title: "Basic if/else condition",
|
||||
description: "Simple conditional logic with error handling",
|
||||
category: "Conditionals",
|
||||
template: `# Basic if/else condition
|
||||
# Change CONDITION to your actual condition
|
||||
CONDITION="test"
|
||||
|
||||
if [ "$CONDITION" = "test" ]; then
|
||||
echo "Condition is true"
|
||||
# Add your logic here
|
||||
else
|
||||
echo "Condition is false"
|
||||
# Add your alternative logic here
|
||||
fi`,
|
||||
tags: ["if", "else", "condition", "basic"],
|
||||
},
|
||||
{
|
||||
id: "if-file-exists",
|
||||
title: "Check if file exists",
|
||||
description: "Check file existence and handle accordingly",
|
||||
category: "Conditionals",
|
||||
template: `# Check if file exists
|
||||
# Change FILE_PATH to your file path
|
||||
FILE_PATH="/path/to/file"
|
||||
|
||||
if [ -f "$FILE_PATH" ]; then
|
||||
echo "File exists: $FILE_PATH"
|
||||
# Add your logic for existing file
|
||||
else
|
||||
echo "File does not exist: $FILE_PATH"
|
||||
# Add your logic for missing file
|
||||
fi`,
|
||||
tags: ["if", "file", "exists", "check"],
|
||||
},
|
||||
{
|
||||
id: "if-directory-exists",
|
||||
title: "Check if directory exists",
|
||||
description: "Check directory existence and create if needed",
|
||||
category: "Conditionals",
|
||||
template: `# Check if directory exists and create if needed
|
||||
# Change DIR_PATH to your directory path
|
||||
DIR_PATH="/path/to/directory"
|
||||
|
||||
if [ -d "$DIR_PATH" ]; then
|
||||
echo "Directory exists: $DIR_PATH"
|
||||
else
|
||||
echo "Creating directory: $DIR_PATH"
|
||||
mkdir -p "$DIR_PATH"
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Directory created successfully"
|
||||
else
|
||||
echo "Failed to create directory"
|
||||
exit 1
|
||||
fi
|
||||
fi`,
|
||||
tags: ["if", "directory", "mkdir", "create"],
|
||||
},
|
||||
|
||||
// System Operations
|
||||
{
|
||||
id: "system-info",
|
||||
title: "System information",
|
||||
description: "Get basic system information",
|
||||
category: "System Operations",
|
||||
template: `# Get system information
|
||||
echo "=== System Information ==="
|
||||
echo "Hostname: $(hostname)"
|
||||
echo "OS: $(uname -s)"
|
||||
echo "Kernel: $(uname -r)"
|
||||
echo "Architecture: $(uname -m)"
|
||||
echo "Uptime: $(uptime)"
|
||||
echo "Memory: $(free -h | grep Mem | awk '{print $2}')"
|
||||
echo "Disk Usage: $(df -h / | tail -1 | awk '{print $5}')"
|
||||
echo "========================"`,
|
||||
tags: ["system", "info", "hostname", "uptime"],
|
||||
},
|
||||
{
|
||||
id: "log-rotation",
|
||||
title: "Log rotation",
|
||||
description: "Rotate log files with compression",
|
||||
category: "System Operations",
|
||||
template: `# Rotate log files
|
||||
# Change LOG_FILE to your log file path
|
||||
LOG_FILE="/var/log/your-app.log"
|
||||
MAX_SIZE="100M"
|
||||
|
||||
# Check if log file exists and is larger than max size
|
||||
if [ -f "$LOG_FILE" ] && [ $(stat -c%s "$LOG_FILE") -gt $(numfmt --from=iec $MAX_SIZE) ]; then
|
||||
echo "Rotating log file: $LOG_FILE"
|
||||
|
||||
# Create backup with timestamp
|
||||
mv "$LOG_FILE" "\${LOG_FILE}.\$(date +%Y%m%d_%H%M%S)"
|
||||
|
||||
# Compress old log
|
||||
gzip "\${LOG_FILE}.\$(date +%Y%m%d_%H%M%S)"
|
||||
|
||||
# Create new log file
|
||||
touch "$LOG_FILE"
|
||||
|
||||
echo "Log rotation completed"
|
||||
else
|
||||
echo "Log file does not need rotation"
|
||||
fi`,
|
||||
tags: ["log", "rotation", "compress", "gzip"],
|
||||
},
|
||||
{
|
||||
id: "process-monitor",
|
||||
title: "Process monitoring",
|
||||
description: "Monitor a process and restart if needed",
|
||||
category: "System Operations",
|
||||
template: `# Monitor and restart process if needed
|
||||
# Change PROCESS_NAME to your process name
|
||||
PROCESS_NAME="your-process"
|
||||
RESTART_CMD="systemctl restart your-service"
|
||||
|
||||
if pgrep -x "$PROCESS_NAME" > /dev/null; then
|
||||
echo "$PROCESS_NAME is running"
|
||||
else
|
||||
echo "$PROCESS_NAME is not running, restarting..."
|
||||
$RESTART_CMD
|
||||
|
||||
# Wait a moment and check again
|
||||
sleep 5
|
||||
if pgrep -x "$PROCESS_NAME" > /dev/null; then
|
||||
echo "$PROCESS_NAME restarted successfully"
|
||||
else
|
||||
echo "Failed to restart $PROCESS_NAME"
|
||||
exit 1
|
||||
fi
|
||||
fi`,
|
||||
tags: ["process", "monitor", "restart", "service"],
|
||||
},
|
||||
|
||||
// Database Operations
|
||||
{
|
||||
id: "mysql-backup",
|
||||
title: "MySQL database backup",
|
||||
description: "Create a MySQL database backup with timestamp",
|
||||
category: "Database Operations",
|
||||
template: `# MySQL database backup
|
||||
# Change these variables to your database details
|
||||
DB_NAME="your_database"
|
||||
DB_USER="your_username"
|
||||
DB_PASS="your_password"
|
||||
BACKUP_DIR="/path/to/backups"
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Create backup filename with timestamp
|
||||
BACKUP_FILE="$BACKUP_DIR/\${DB_NAME}_\$(date +%Y%m%d_%H%M%S).sql"
|
||||
|
||||
# Create the backup
|
||||
mysqldump -u "$DB_USER" -p"$DB_PASS" "$DB_NAME" > "$BACKUP_FILE"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Database backup created: $BACKUP_FILE"
|
||||
|
||||
# Compress the backup
|
||||
gzip "$BACKUP_FILE"
|
||||
echo "Backup compressed: \${BACKUP_FILE}.gz"
|
||||
else
|
||||
echo "Database backup failed"
|
||||
exit 1
|
||||
fi`,
|
||||
tags: ["mysql", "database", "backup", "mysqldump"],
|
||||
},
|
||||
{
|
||||
id: "postgres-backup",
|
||||
title: "PostgreSQL database backup",
|
||||
description: "Create a PostgreSQL database backup with timestamp",
|
||||
category: "Database Operations",
|
||||
template: `# PostgreSQL database backup
|
||||
# Change these variables to your database details
|
||||
DB_NAME="your_database"
|
||||
DB_USER="your_username"
|
||||
BACKUP_DIR="/path/to/backups"
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Create backup filename with timestamp
|
||||
BACKUP_FILE="$BACKUP_DIR/\${DB_NAME}_\$(date +%Y%m%d_%H%M%S).sql"
|
||||
|
||||
# Create the backup
|
||||
pg_dump -U "$DB_USER" "$DB_NAME" > "$BACKUP_FILE"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Database backup created: $BACKUP_FILE"
|
||||
|
||||
# Compress the backup
|
||||
gzip "$BACKUP_FILE"
|
||||
echo "Backup compressed: \${BACKUP_FILE}.gz"
|
||||
else
|
||||
echo "Database backup failed"
|
||||
exit 1
|
||||
fi`,
|
||||
tags: ["postgres", "postgresql", "database", "backup", "pg_dump"],
|
||||
},
|
||||
];
|
||||
|
||||
export const bashSnippetCategories = [
|
||||
"File Operations",
|
||||
"Loops",
|
||||
"Conditionals",
|
||||
"System Operations",
|
||||
"Database Operations",
|
||||
];
|
||||
|
||||
export function searchBashSnippets(query: string): BashSnippet[] {
|
||||
const lowercaseQuery = query.toLowerCase();
|
||||
return bashSnippets.filter(
|
||||
(snippet) =>
|
||||
snippet.title.toLowerCase().includes(lowercaseQuery) ||
|
||||
snippet.description.toLowerCase().includes(lowercaseQuery) ||
|
||||
snippet.tags.some((tag) => tag.toLowerCase().includes(lowercaseQuery)) ||
|
||||
snippet.category.toLowerCase().includes(lowercaseQuery)
|
||||
);
|
||||
}
|
||||
162
app/_utils/snippetScanner.ts
Normal file
162
app/_utils/snippetScanner.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { promises as fs } from "fs";
|
||||
import path from "path";
|
||||
|
||||
export interface BashSnippet {
|
||||
id: string;
|
||||
title: string;
|
||||
description: string;
|
||||
category: string;
|
||||
template: string;
|
||||
tags: string[];
|
||||
source: "builtin" | "user";
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
interface SnippetMetadata {
|
||||
id?: string;
|
||||
title?: string;
|
||||
description?: string;
|
||||
category?: string;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
function parseMetadata(content: string): SnippetMetadata {
|
||||
const metadata: SnippetMetadata = {};
|
||||
const lines = content.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
// Parse metadata comments like # @id: value
|
||||
const match = trimmed.match(/^#\s*@(\w+):\s*(.+)$/);
|
||||
if (match) {
|
||||
const [, key, value] = match;
|
||||
switch (key) {
|
||||
case "id":
|
||||
metadata.id = value.trim();
|
||||
break;
|
||||
case "title":
|
||||
metadata.title = value.trim();
|
||||
break;
|
||||
case "description":
|
||||
metadata.description = value.trim();
|
||||
break;
|
||||
case "category":
|
||||
metadata.category = value.trim();
|
||||
break;
|
||||
case "tags":
|
||||
metadata.tags = value.split(",").map((tag) => tag.trim());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
function extractTemplate(content: string): string {
|
||||
const lines = content.split("\n");
|
||||
const templateLines: string[] = [];
|
||||
let inTemplate = false;
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip metadata comments
|
||||
if (line.trim().match(/^#\s*@\w+:/)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Start template after first non-metadata line
|
||||
if (!inTemplate && line.trim() && !line.trim().startsWith("# @")) {
|
||||
inTemplate = true;
|
||||
}
|
||||
|
||||
if (inTemplate) {
|
||||
templateLines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
return templateLines.join("\n").trim();
|
||||
}
|
||||
|
||||
async function scanSnippetDirectory(
|
||||
dirPath: string,
|
||||
source: "builtin" | "user"
|
||||
): Promise<BashSnippet[]> {
|
||||
const snippets: BashSnippet[] = [];
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(dirPath);
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith(".sh")) {
|
||||
const filePath = path.join(dirPath, file);
|
||||
const content = await fs.readFile(filePath, "utf-8");
|
||||
const metadata = parseMetadata(content);
|
||||
const template = extractTemplate(content);
|
||||
|
||||
// Only include snippets with valid metadata
|
||||
if (
|
||||
metadata.id &&
|
||||
metadata.title &&
|
||||
metadata.description &&
|
||||
metadata.category
|
||||
) {
|
||||
snippets.push({
|
||||
id: metadata.id,
|
||||
title: metadata.title,
|
||||
description: metadata.description,
|
||||
category: metadata.category,
|
||||
template,
|
||||
tags: metadata.tags || [],
|
||||
source,
|
||||
filePath,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Could not scan directory ${dirPath}:`, error);
|
||||
}
|
||||
|
||||
return snippets;
|
||||
}
|
||||
|
||||
export async function loadAllSnippets(): Promise<BashSnippet[]> {
|
||||
const builtinSnippets = await scanSnippetDirectory(
|
||||
path.join(process.cwd(), "app", "_utils", "snippets"),
|
||||
"builtin"
|
||||
);
|
||||
|
||||
const userSnippets = await scanSnippetDirectory(
|
||||
path.join(process.cwd(), "snippets"),
|
||||
"user"
|
||||
);
|
||||
|
||||
return [...builtinSnippets, ...userSnippets];
|
||||
}
|
||||
|
||||
export function searchBashSnippets(
|
||||
snippets: BashSnippet[],
|
||||
query: string
|
||||
): BashSnippet[] {
|
||||
const lowercaseQuery = query.toLowerCase();
|
||||
return snippets.filter(
|
||||
(snippet) =>
|
||||
snippet.title.toLowerCase().includes(lowercaseQuery) ||
|
||||
snippet.description.toLowerCase().includes(lowercaseQuery) ||
|
||||
snippet.tags.some((tag) => tag.toLowerCase().includes(lowercaseQuery)) ||
|
||||
snippet.category.toLowerCase().includes(lowercaseQuery)
|
||||
);
|
||||
}
|
||||
|
||||
export function getSnippetCategories(snippets: BashSnippet[]): string[] {
|
||||
const categories = new Set(snippets.map((snippet) => snippet.category));
|
||||
return Array.from(categories).sort();
|
||||
}
|
||||
|
||||
export function getSnippetById(
|
||||
snippets: BashSnippet[],
|
||||
id: string
|
||||
): BashSnippet | undefined {
|
||||
return snippets.find((snippet) => snippet.id === id);
|
||||
}
|
||||
18
app/_utils/snippets/backup-rsync.sh
Normal file
18
app/_utils/snippets/backup-rsync.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
# @id: backup-rsync
|
||||
# @title: Backup with rsync
|
||||
# @description: Create a backup using rsync with progress and exclude options
|
||||
# @category: File Operations
|
||||
# @tags: backup,rsync,sync,copy
|
||||
|
||||
# Backup source directory to destination
|
||||
# Change SOURCE_DIR and DEST_DIR to your paths
|
||||
SOURCE_DIR="/path/to/source"
|
||||
DEST_DIR="/path/to/backup"
|
||||
|
||||
rsync -av --progress --delete \
|
||||
--exclude='*.tmp' \
|
||||
--exclude='*.log' \
|
||||
--exclude='node_modules' \
|
||||
"$SOURCE_DIR/" "$DEST_DIR/"
|
||||
|
||||
echo "Backup completed at $(date)"
|
||||
23
app/_utils/snippets/copy-files.sh
Normal file
23
app/_utils/snippets/copy-files.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
# @id: copy-files
|
||||
# @title: Copy files with confirmation
|
||||
# @description: Copy files with interactive confirmation and error handling
|
||||
# @category: File Operations
|
||||
# @tags: copy,cp,file,confirmation
|
||||
|
||||
# Copy files with confirmation
|
||||
# Change SOURCE and DEST to your paths
|
||||
SOURCE="/path/to/source"
|
||||
DEST="/path/to/destination"
|
||||
|
||||
if [ -f "$SOURCE" ]; then
|
||||
cp -i "$SOURCE" "$DEST"
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "File copied successfully"
|
||||
else
|
||||
echo "Error copying file"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Source file does not exist"
|
||||
exit 1
|
||||
fi
|
||||
19
app/_utils/snippets/for-loop-files.sh
Normal file
19
app/_utils/snippets/for-loop-files.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
# @id: for-loop-files
|
||||
# @title: For loop through files
|
||||
# @description: Process multiple files in a directory
|
||||
# @category: Loops
|
||||
# @tags: loop,files,for,process
|
||||
|
||||
# Loop through files in a directory
|
||||
# Change DIRECTORY to your target directory
|
||||
DIRECTORY="/path/to/files"
|
||||
|
||||
for file in "$DIRECTORY"/*; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Processing: $file"
|
||||
# Add your processing logic here
|
||||
# Example: process_file "$file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All files processed"
|
||||
18
app/_utils/snippets/for-loop-range.sh
Normal file
18
app/_utils/snippets/for-loop-range.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
# @id: for-loop-range
|
||||
# @title: For loop with range
|
||||
# @description: Loop through a range of numbers
|
||||
# @category: Loops
|
||||
# @tags: loop,range,numbers,seq
|
||||
|
||||
# Loop through a range of numbers
|
||||
# Change START and END to your desired range
|
||||
START=1
|
||||
END=10
|
||||
|
||||
for i in $(seq $START $END); do
|
||||
echo "Processing item $i"
|
||||
# Add your processing logic here
|
||||
# Example: process_item $i
|
||||
done
|
||||
|
||||
echo "Finished processing range $START to $END"
|
||||
22
app/_utils/snippets/if-directory-exists.sh
Normal file
22
app/_utils/snippets/if-directory-exists.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
# @id: if-directory-exists
|
||||
# @title: Check if directory exists
|
||||
# @description: Check directory existence and create if needed
|
||||
# @category: Conditionals
|
||||
# @tags: if,directory,mkdir,create
|
||||
|
||||
# Check if directory exists and create if needed
|
||||
# Change DIR_PATH to your directory path
|
||||
DIR_PATH="/path/to/directory"
|
||||
|
||||
if [ -d "$DIR_PATH" ]; then
|
||||
echo "Directory exists: $DIR_PATH"
|
||||
else
|
||||
echo "Creating directory: $DIR_PATH"
|
||||
mkdir -p "$DIR_PATH"
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Directory created successfully"
|
||||
else
|
||||
echo "Failed to create directory"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
17
app/_utils/snippets/if-else-basic.sh
Normal file
17
app/_utils/snippets/if-else-basic.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
# @id: if-else-basic
|
||||
# @title: Basic if/else condition
|
||||
# @description: Simple conditional logic with error handling
|
||||
# @category: Conditionals
|
||||
# @tags: if,else,condition,basic
|
||||
|
||||
# Basic if/else condition
|
||||
# Change CONDITION to your actual condition
|
||||
CONDITION="test"
|
||||
|
||||
if [ "$CONDITION" = "test" ]; then
|
||||
echo "Condition is true"
|
||||
# Add your logic here
|
||||
else
|
||||
echo "Condition is false"
|
||||
# Add your alternative logic here
|
||||
fi
|
||||
17
app/_utils/snippets/if-file-exists.sh
Normal file
17
app/_utils/snippets/if-file-exists.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
# @id: if-file-exists
|
||||
# @title: Check if file exists
|
||||
# @description: Check file existence and handle accordingly
|
||||
# @category: Conditionals
|
||||
# @tags: if,file,exists,check
|
||||
|
||||
# Check if file exists
|
||||
# Change FILE_PATH to your file path
|
||||
FILE_PATH="/path/to/file"
|
||||
|
||||
if [ -f "$FILE_PATH" ]; then
|
||||
echo "File exists: $FILE_PATH"
|
||||
# Add your logic for existing file
|
||||
else
|
||||
echo "File does not exist: $FILE_PATH"
|
||||
# Add your logic for missing file
|
||||
fi
|
||||
28
app/_utils/snippets/log-rotation.sh
Normal file
28
app/_utils/snippets/log-rotation.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
# @id: log-rotation
|
||||
# @title: Log rotation
|
||||
# @description: Rotate log files with compression
|
||||
# @category: System Operations
|
||||
# @tags: log,rotation,compress,gzip
|
||||
|
||||
# Rotate log files
|
||||
# Change LOG_FILE to your log file path
|
||||
LOG_FILE="/var/log/your-app.log"
|
||||
MAX_SIZE="100M"
|
||||
|
||||
# Check if log file exists and is larger than max size
|
||||
if [ -f "$LOG_FILE" ] && [ $(stat -c%s "$LOG_FILE") -gt $(numfmt --from=iec $MAX_SIZE) ]; then
|
||||
echo "Rotating log file: $LOG_FILE"
|
||||
|
||||
# Create backup with timestamp
|
||||
mv "$LOG_FILE" "${LOG_FILE}.$(date +%Y%m%d_%H%M%S)"
|
||||
|
||||
# Compress old log
|
||||
gzip "${LOG_FILE}.$(date +%Y%m%d_%H%M%S)"
|
||||
|
||||
# Create new log file
|
||||
touch "$LOG_FILE"
|
||||
|
||||
echo "Log rotation completed"
|
||||
else
|
||||
echo "Log file does not need rotation"
|
||||
fi
|
||||
20
app/_utils/snippets/move-files.sh
Normal file
20
app/_utils/snippets/move-files.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
# @id: move-files
|
||||
# @title: Move files safely
|
||||
# @description: Move files with backup and error handling
|
||||
# @category: File Operations
|
||||
# @tags: move,mv,backup,safe
|
||||
|
||||
# Move files with backup
|
||||
# Change SOURCE and DEST to your paths
|
||||
SOURCE="/path/to/source"
|
||||
DEST="/path/to/destination"
|
||||
|
||||
# Create backup before moving
|
||||
if [ -f "$SOURCE" ]; then
|
||||
cp "$SOURCE" "${SOURCE}.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
mv "$SOURCE" "$DEST"
|
||||
echo "File moved successfully with backup created"
|
||||
else
|
||||
echo "Source file does not exist"
|
||||
exit 1
|
||||
fi
|
||||
32
app/_utils/snippets/mysql-backup.sh
Normal file
32
app/_utils/snippets/mysql-backup.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
# @id: mysql-backup
|
||||
# @title: MySQL database backup
|
||||
# @description: Create a MySQL database backup with timestamp
|
||||
# @category: Database Operations
|
||||
# @tags: mysql,database,backup,mysqldump
|
||||
|
||||
# MySQL database backup
|
||||
# Change these variables to your database details
|
||||
DB_NAME="your_database"
|
||||
DB_USER="your_username"
|
||||
DB_PASS="your_password"
|
||||
BACKUP_DIR="/path/to/backups"
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Create backup filename with timestamp
|
||||
BACKUP_FILE="$BACKUP_DIR/${DB_NAME}_$(date +%Y%m%d_%H%M%S).sql"
|
||||
|
||||
# Create the backup
|
||||
mysqldump -u "$DB_USER" -p"$DB_PASS" "$DB_NAME" > "$BACKUP_FILE"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Database backup created: $BACKUP_FILE"
|
||||
|
||||
# Compress the backup
|
||||
gzip "$BACKUP_FILE"
|
||||
echo "Backup compressed: ${BACKUP_FILE}.gz"
|
||||
else
|
||||
echo "Database backup failed"
|
||||
exit 1
|
||||
fi
|
||||
31
app/_utils/snippets/postgres-backup.sh
Normal file
31
app/_utils/snippets/postgres-backup.sh
Normal file
@@ -0,0 +1,31 @@
|
||||
# @id: postgres-backup
|
||||
# @title: PostgreSQL database backup
|
||||
# @description: Create a PostgreSQL database backup with timestamp
|
||||
# @category: Database Operations
|
||||
# @tags: postgres,postgresql,database,backup,pg_dump
|
||||
|
||||
# PostgreSQL database backup
|
||||
# Change these variables to your database details
|
||||
DB_NAME="your_database"
|
||||
DB_USER="your_username"
|
||||
BACKUP_DIR="/path/to/backups"
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Create backup filename with timestamp
|
||||
BACKUP_FILE="$BACKUP_DIR/${DB_NAME}_$(date +%Y%m%d_%H%M%S).sql"
|
||||
|
||||
# Create the backup
|
||||
pg_dump -U "$DB_USER" "$DB_NAME" > "$BACKUP_FILE"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Database backup created: $BACKUP_FILE"
|
||||
|
||||
# Compress the backup
|
||||
gzip "$BACKUP_FILE"
|
||||
echo "Backup compressed: ${BACKUP_FILE}.gz"
|
||||
else
|
||||
echo "Database backup failed"
|
||||
exit 1
|
||||
fi
|
||||
26
app/_utils/snippets/process-monitor.sh
Normal file
26
app/_utils/snippets/process-monitor.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
# @id: process-monitor
|
||||
# @title: Process monitoring
|
||||
# @description: Monitor a process and restart if needed
|
||||
# @category: System Operations
|
||||
# @tags: process,monitor,restart,service
|
||||
|
||||
# Monitor and restart process if needed
|
||||
# Change PROCESS_NAME to your process name
|
||||
PROCESS_NAME="your-process"
|
||||
RESTART_CMD="systemctl restart your-service"
|
||||
|
||||
if pgrep -x "$PROCESS_NAME" > /dev/null; then
|
||||
echo "$PROCESS_NAME is running"
|
||||
else
|
||||
echo "$PROCESS_NAME is not running, restarting..."
|
||||
$RESTART_CMD
|
||||
|
||||
# Wait a moment and check again
|
||||
sleep 5
|
||||
if pgrep -x "$PROCESS_NAME" > /dev/null; then
|
||||
echo "$PROCESS_NAME restarted successfully"
|
||||
else
|
||||
echo "Failed to restart $PROCESS_NAME"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
16
app/_utils/snippets/system-info.sh
Normal file
16
app/_utils/snippets/system-info.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
# @id: system-info
|
||||
# @title: System information
|
||||
# @description: Get basic system information
|
||||
# @category: System Operations
|
||||
# @tags: system,info,hostname,uptime
|
||||
|
||||
# Get system information
|
||||
echo "=== System Information ==="
|
||||
echo "Hostname: $(hostname)"
|
||||
echo "OS: $(uname -s)"
|
||||
echo "Kernel: $(uname -r)"
|
||||
echo "Architecture: $(uname -m)"
|
||||
echo "Uptime: $(uptime)"
|
||||
echo "Memory: $(free -h | grep Mem | awk '{print $2}')"
|
||||
echo "Disk Usage: $(df -h / | tail -1 | awk '{print $5}')"
|
||||
echo "========================"
|
||||
15
app/_utils/snippets/while-loop-read.sh
Normal file
15
app/_utils/snippets/while-loop-read.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
# @id: while-loop-read
|
||||
# @title: While loop reading input
|
||||
# @description: Read input line by line and process it
|
||||
# @category: Loops
|
||||
# @tags: loop,while,read,input
|
||||
|
||||
# Read input line by line
|
||||
# You can pipe input: echo "line1\nline2" | ./script.sh
|
||||
while IFS= read -r line; do
|
||||
echo "Processing line: $line"
|
||||
# Add your processing logic here
|
||||
# Example: process_line "$line"
|
||||
done
|
||||
|
||||
echo "Finished processing all input"
|
||||
@@ -20,6 +20,8 @@ services:
|
||||
- ${NEXT_PUBLIC_HOST_PROJECT_DIR}/scripts:/app/scripts
|
||||
# Mount data directory for persistence
|
||||
- ${NEXT_PUBLIC_HOST_PROJECT_DIR}/data:/app/data
|
||||
# Mount snippets directory for user-defined snippets
|
||||
- ${NEXT_PUBLIC_HOST_PROJECT_DIR}/snippets:/app/snippets
|
||||
# Run with host network to access system information
|
||||
network_mode: host
|
||||
# Run as root to access system commands (needed for cron operations)
|
||||
|
||||
66
snippets/README.md
Normal file
66
snippets/README.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# User Snippets Directory
|
||||
|
||||
This directory allows you to create your own bash script snippets that will automatically be recognized by the Cronjob Manager application.
|
||||
|
||||
## How to Create a Snippet
|
||||
|
||||
1. Create a new `.sh` file in this directory
|
||||
2. Add metadata comments at the top of the file using the following format:
|
||||
|
||||
```bash
|
||||
# @id: your-snippet-id
|
||||
# @title: Your Snippet Title
|
||||
# @description: A brief description of what this snippet does
|
||||
# @category: Your Category
|
||||
# @tags: tag1,tag2,tag3
|
||||
|
||||
# Your bash script content goes here
|
||||
echo "Hello World!"
|
||||
```
|
||||
|
||||
## Metadata Fields
|
||||
|
||||
- **@id**: A unique identifier for your snippet (use lowercase, hyphens for spaces)
|
||||
- **@title**: A human-readable title for your snippet
|
||||
- **@description**: A brief description of what the snippet does
|
||||
- **@category**: The category this snippet belongs to (e.g., "File Operations", "System Operations", etc.)
|
||||
- **@tags**: Comma-separated list of tags for searching
|
||||
|
||||
## Example
|
||||
|
||||
Here's an example snippet file:
|
||||
|
||||
```bash
|
||||
# @id: my-custom-backup
|
||||
# @title: My Custom Backup Script
|
||||
# @description: A custom backup script for my specific needs
|
||||
# @category: File Operations
|
||||
# @tags: backup,custom,personal
|
||||
|
||||
# My custom backup logic
|
||||
SOURCE_DIR="/home/user/documents"
|
||||
BACKUP_DIR="/backup/documents"
|
||||
|
||||
rsync -av "$SOURCE_DIR/" "$BACKUP_DIR/"
|
||||
echo "Custom backup completed at $(date)"
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Only files with `.sh` extension will be recognized
|
||||
- All metadata fields are required for the snippet to be loaded
|
||||
- The script content should start after the metadata comments
|
||||
- Your snippets will appear alongside the built-in snippets in the application
|
||||
- You can organize snippets into subdirectories if needed
|
||||
|
||||
## Categories
|
||||
|
||||
You can use any category name you want, but here are some common ones:
|
||||
|
||||
- File Operations
|
||||
- System Operations
|
||||
- Database Operations
|
||||
- Loops
|
||||
- Conditionals
|
||||
- User Examples
|
||||
- Custom Scripts
|
||||
16
snippets/example-user-snippet.sh
Normal file
16
snippets/example-user-snippet.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
# @id: example-user-snippet
|
||||
# @title: Example User Snippet
|
||||
# @description: This is an example of how users can create their own snippets
|
||||
# @category: User Snippets
|
||||
# @tags: example,user,custom,demo
|
||||
|
||||
# This is an example user-created snippet
|
||||
# Users can add their own .sh files to the ./snippets directory
|
||||
# and they will automatically be recognized by the system
|
||||
|
||||
echo "Hello from user snippet!"
|
||||
echo "Current time: $(date)"
|
||||
echo "Working directory: $(pwd)"
|
||||
|
||||
# Add your custom logic here
|
||||
# This could be any bash script you want to use as a template
|
||||
Reference in New Issue
Block a user