update search command to use FTS5 SQLite table for complex searches
This commit is contained in:
parent
d839089153
commit
167829704a
8 changed files with 1359 additions and 267 deletions
|
@ -1,88 +0,0 @@
|
|||
//
|
||||
// config_handler.js
|
||||
// handle the /sigma-config command
|
||||
//
|
||||
const util = require('util');
|
||||
const { exec } = require('child_process');
|
||||
const { SIGMA_CLI_PATH } = require('../../config/constants');
|
||||
const { loadConfig, updateConfig } = require('../../config/config-manager');
|
||||
const { updateSigmaDatabase } = require('../../services/sigma/sigma_repository_service');
|
||||
const logger = require('../../utils/logger');
|
||||
|
||||
// Promisify exec for async/await usage
|
||||
const execPromise = util.promisify(exec);
|
||||
|
||||
module.exports = (app) => {
|
||||
app.command('/sigma-config', async ({ command, ack, respond }) => {
|
||||
await ack();
|
||||
logger.info(`Sigma config command received: ${command.text}`);
|
||||
|
||||
const args = command.text.split(' ');
|
||||
|
||||
if (args.length === 0 || args[0] === '') {
|
||||
// Display current configuration
|
||||
const config = loadConfig();
|
||||
logger.info('Displaying current configuration');
|
||||
await respond(`Current configuration:\nSIEM: ${config.siem}\nLanguage: ${config.lang}\nOutput: ${config.output}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const configType = args[0];
|
||||
|
||||
if (configType === 'update') {
|
||||
logger.info('Starting database update from command');
|
||||
try {
|
||||
await respond('Updating Sigma database... This may take a moment.');
|
||||
await updateSigmaDatabase();
|
||||
logger.info('Database update completed from command');
|
||||
await respond('Sigma database updated successfully');
|
||||
} catch (error) {
|
||||
logger.error(`Database update failed: ${error.message}`);
|
||||
await respond(`Error updating Sigma database: ${error.message}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (args.length < 2) {
|
||||
logger.warn(`Invalid config command format: ${command.text}`);
|
||||
await respond(`Invalid command format. Usage: /sigma-config ${configType} [value]`);
|
||||
return;
|
||||
}
|
||||
|
||||
const configValue = args[1];
|
||||
const config = loadConfig();
|
||||
|
||||
if (configType === 'siem') {
|
||||
// Verify the SIEM backend is installed
|
||||
logger.info(`Attempting to change SIEM to: ${configValue}`);
|
||||
try {
|
||||
await execPromise(`${SIGMA_CLI_PATH} list targets | grep ${configValue}`);
|
||||
updateConfig('siem', configValue);
|
||||
logger.info(`SIEM configuration updated to: ${configValue}`);
|
||||
await respond(`SIEM configuration updated to: ${configValue}`);
|
||||
} catch (error) {
|
||||
logger.error(`SIEM backend '${configValue}' not found or not installed`);
|
||||
await respond(`Error: SIEM backend '${configValue}' not found or not installed. Please install it with: sigma plugin install ${configValue}`);
|
||||
}
|
||||
} else if (configType === 'lang') {
|
||||
logger.info(`Changing language to: ${configValue}`);
|
||||
updateConfig('lang', configValue);
|
||||
await respond(`Language configuration updated to: ${configValue}`);
|
||||
} else if (configType === 'output') {
|
||||
// Check if output format is supported by the current backend
|
||||
logger.info(`Attempting to change output format to: ${configValue}`);
|
||||
try {
|
||||
await execPromise(`${SIGMA_CLI_PATH} list formats ${config.siem} | grep ${configValue}`);
|
||||
updateConfig('output', configValue);
|
||||
logger.info(`Output configuration updated to: ${configValue}`);
|
||||
await respond(`Output configuration updated to: ${configValue}`);
|
||||
} catch (error) {
|
||||
logger.error(`Output format '${configValue}' not supported by SIEM backend '${config.siem}'`);
|
||||
await respond(`Error: Output format '${configValue}' not supported by SIEM backend '${config.siem}'. Run 'sigma list formats ${config.siem}' to see available formats.`);
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Unknown configuration type: ${configType}`);
|
||||
await respond(`Unknown configuration type: ${configType}. Available types: siem, lang, output, update`);
|
||||
}
|
||||
});
|
||||
};
|
|
@ -1,142 +1,209 @@
|
|||
/**
|
||||
* fylgja_command_handler.js
|
||||
*
|
||||
* Unified command handler for the Fylgja Slack bot.
|
||||
* Processes natural language commands and routes to appropriate handlers.
|
||||
* Main handler for the /fylgja slash command
|
||||
* Parses natural language commands and routes to appropriate handlers
|
||||
*/
|
||||
|
||||
const logger = require('../utils/logger');
|
||||
const { parseCommand } = require('../lang/command_parser');
|
||||
const { handleError } = require('../utils/error_handler');
|
||||
const { handleCommand: handleSigmaSearch, handleComplexSearch } = require('./sigma/sigma_search_handler');
|
||||
const { handleCommand: handleSigmaDetails } = require('./sigma/sigma_details_handler');
|
||||
const { handleCommand: handleSigmaStats } = require('./sigma/sigma_stats_handler');
|
||||
const { handleCommand: handleSigmaCreate } = require('./sigma/sigma_create_handler');
|
||||
const { handleCommand: handleAlerts } = require('./alerts/alerts_handler');
|
||||
const { handleCommand: handleCase } = require('./case/case_handler');
|
||||
const { handleCommand: handleConfig } = require('./config/config_handler');
|
||||
const { handleCommand: handleStats } = require('./stats/stats_handler');
|
||||
|
||||
const FILE_NAME = 'fylgja_command_handler.js';
|
||||
|
||||
// Import command handlers
|
||||
const sigmaDetailsHandler = require('./sigma/sigma_details_handler');
|
||||
const sigmaSearchHandler = require('./sigma/sigma_search_handler');
|
||||
const sigmaCreateHandler = require('./sigma/sigma_create_handler');
|
||||
const sigmaStatsHandler = require('./sigma/sigma_stats_handler');
|
||||
|
||||
// Import language processing utilities
|
||||
const commandParser = require('../lang/command_parser');
|
||||
|
||||
/**
|
||||
* Handle the universal fylgja command
|
||||
* Main handler for the /fylgja command
|
||||
* Parses natural language input and routes to appropriate module handlers
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleCommand = async (command, respond) => {
|
||||
try {
|
||||
if (!command || !command.text) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received for fylgja`);
|
||||
await respond({
|
||||
text: 'Please provide a command. Try `/fylgja help` for available commands.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
try {
|
||||
logger.info(`${FILE_NAME}: Received command: ${command.text}`);
|
||||
|
||||
if (!command.text.trim()) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received`);
|
||||
await respond({
|
||||
text: "Please provide a command. Try `/fylgja help` for usage examples.",
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the natural language command
|
||||
const parsedCommand = await parseCommand(command.text);
|
||||
logger.debug(`${FILE_NAME}: Parsed command result: ${JSON.stringify(parsedCommand)}`);
|
||||
|
||||
if (!parsedCommand.success) {
|
||||
logger.warn(`${FILE_NAME}: Command parsing failed: ${parsedCommand.message}`);
|
||||
await respond({
|
||||
text: parsedCommand.message || "I couldn't understand that command. Try `/fylgja help` for examples.",
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract the structured command
|
||||
const { action, module, params } = parsedCommand.command;
|
||||
logger.info(`${FILE_NAME}: Routing command - Module: ${module}, Action: ${action}`);
|
||||
|
||||
// Route to the appropriate handler based on module and action
|
||||
switch (module) {
|
||||
case 'sigma':
|
||||
await handleSigmaCommand(action, params, command, respond);
|
||||
break;
|
||||
|
||||
case 'alerts':
|
||||
await handleAlerts(command, respond);
|
||||
break;
|
||||
|
||||
case 'case':
|
||||
await handleCase(command, respond);
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
await handleConfig(command, respond);
|
||||
break;
|
||||
|
||||
case 'stats':
|
||||
await handleStats(command, respond);
|
||||
break;
|
||||
|
||||
case 'help':
|
||||
await handleHelpCommand(respond);
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.warn(`${FILE_NAME}: Unknown module: ${module}`);
|
||||
await respond({
|
||||
text: `Unknown command module: ${module}. Try \`/fylgja help\` for usage examples.`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Processing fylgja command: ${command.text}`);
|
||||
|
||||
// Parse the natural language command
|
||||
const parsedCommand = await commandParser.parseCommand(command.text);
|
||||
|
||||
if (!parsedCommand.success) {
|
||||
logger.warn(`${FILE_NAME}: Failed to parse command: ${command.text}`);
|
||||
await respond({
|
||||
text: `I couldn't understand that command. ${parsedCommand.message || ''}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Route to the appropriate handler based on the parsed command
|
||||
await routeCommand(parsedCommand.command, command, respond);
|
||||
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Route the command to the appropriate handler
|
||||
* Handle Sigma-related commands
|
||||
*
|
||||
* @param {Object} parsedCommand - The parsed command object
|
||||
* @param {Object} originalCommand - The original Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
* @param {string} action - The action to perform
|
||||
* @param {Array} params - Command parameters
|
||||
* @param {Object} command - The original Slack command
|
||||
* @param {Function} respond - Function to send response
|
||||
*/
|
||||
const routeCommand = async (parsedCommand, originalCommand, respond) => {
|
||||
const { action, module, params } = parsedCommand;
|
||||
const handleSigmaCommand = async (action, params, command, respond) => {
|
||||
logger.debug(`${FILE_NAME}: Handling Sigma command - Action: ${action}, Params: ${JSON.stringify(params)}`);
|
||||
|
||||
// Create a modified command object with the extracted parameters
|
||||
const modifiedCommand = {
|
||||
...originalCommand,
|
||||
text: params.join(' ')
|
||||
};
|
||||
try {
|
||||
switch (action) {
|
||||
case 'search':
|
||||
// Update the command object with the keyword parameter
|
||||
command.text = params[0] || '';
|
||||
await handleSigmaSearch(command, respond);
|
||||
break;
|
||||
|
||||
// Log the routing decision
|
||||
logger.debug(`${FILE_NAME}: Routing command - Action: ${action}, Module: ${module}, Params: ${JSON.stringify(params)}`);
|
||||
case 'complexSearch':
|
||||
// Update the command object with the complex query
|
||||
command.text = params[0] || '';
|
||||
await handleComplexSearch(command, respond);
|
||||
break;
|
||||
|
||||
// Route to the appropriate handler
|
||||
switch (`${module}:${action}`) {
|
||||
case 'sigma:details':
|
||||
case 'sigma:explain':
|
||||
await sigmaDetailsHandler.handleCommand(modifiedCommand, respond);
|
||||
break;
|
||||
case 'details':
|
||||
// Update the command object with the rule ID parameter
|
||||
command.text = params[0] || '';
|
||||
await handleSigmaDetails(command, respond);
|
||||
break;
|
||||
|
||||
case 'sigma:search':
|
||||
await sigmaSearchHandler.handleCommand(modifiedCommand, respond);
|
||||
break;
|
||||
case 'stats':
|
||||
await handleSigmaStats(command, respond);
|
||||
break;
|
||||
|
||||
case 'sigma:create':
|
||||
await sigmaCreateHandler.handleCommand(modifiedCommand, respond);
|
||||
break;
|
||||
case 'create':
|
||||
// Update the command object with the rule ID parameter
|
||||
command.text = params[0] || '';
|
||||
await handleSigmaCreate(command, respond);
|
||||
break;
|
||||
|
||||
case 'sigma:stats':
|
||||
await sigmaStatsHandler.handleCommand(modifiedCommand, respond);
|
||||
break;
|
||||
|
||||
case 'help:general':
|
||||
await showHelp(respond);
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.warn(`${FILE_NAME}: Unknown command combination: ${module}:${action}`);
|
||||
await respond({
|
||||
text: `I don't know how to ${action} in ${module}. Try \`/fylgja help\` for available commands.`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
default:
|
||||
logger.warn(`${FILE_NAME}: Unknown Sigma action: ${action}`);
|
||||
await respond({
|
||||
text: `Unknown Sigma action: ${action}. Try \`/fylgja help\` for usage examples.`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Sigma command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Show help information
|
||||
* Handle help command
|
||||
*
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
* @param {Function} respond - Function to send response
|
||||
*/
|
||||
const showHelp = async (respond) => {
|
||||
await respond({
|
||||
text: "Here are some example commands you can use with Fylgja:",
|
||||
blocks: [
|
||||
{
|
||||
type: "section",
|
||||
text: {
|
||||
type: "mrkdwn",
|
||||
text: "*Fylgja Commands*\nHere are some example commands you can use:"
|
||||
}
|
||||
},
|
||||
{
|
||||
type: "section",
|
||||
text: {
|
||||
type: "mrkdwn",
|
||||
text: "• `/fylgja explain rule from sigma where id=<rule_id>`\n• `/fylgja search sigma for <query>`\n• `/fylgja create rule in sigma with <parameters>`\n• `/fylgja show stats for sigma`"
|
||||
}
|
||||
}
|
||||
],
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
const handleHelpCommand = async (respond) => {
|
||||
try {
|
||||
const helpText = `
|
||||
*Fylgja Command Help*
|
||||
|
||||
*Basic Commands:*
|
||||
• \`/fylgja search <keyword>\` - Search for Sigma rules by keyword
|
||||
• \`/fylgja details <rule_id>\` - Get details about a specific Sigma rule
|
||||
• \`/fylgja stats\` - Get statistics about Sigma rules database
|
||||
|
||||
*Advanced Search Commands:*
|
||||
• \`/fylgja search sigma rules where title contains "ransomware"\` - Search by title
|
||||
• \`/fylgja find rules where tags include privilege_escalation\` - Search by tags
|
||||
• \`/fylgja search rules where logsource.category == "process_creation"\` - Search by log source
|
||||
• \`/fylgja find rules where modified after 2024-01-01\` - Search by modification date
|
||||
• \`/fylgja search where level is "high" and tags include "attack.t1055"\` - Combined search
|
||||
|
||||
*Supported Conditions:*
|
||||
• Title: \`title contains "text"\`
|
||||
• Description: \`description contains "text"\`
|
||||
• Log Source: \`logsource.category == "value"\`, \`logsource.product == "value"\`
|
||||
• Tags: \`tags include "value"\`
|
||||
• Dates: \`modified after YYYY-MM-DD\`, \`modified before YYYY-MM-DD\`
|
||||
• Author: \`author is "name"\`
|
||||
• Level: \`level is "high"\`
|
||||
|
||||
*Logical Operators:*
|
||||
• AND: \`condition1 AND condition2\`
|
||||
• OR: \`condition1 OR condition2\`
|
||||
|
||||
*Pagination:*
|
||||
• Add \`page=N\` to see page N of results
|
||||
• Add \`limit=N\` to change number of results per page
|
||||
|
||||
For more information, visit the Fylgja documentation.
|
||||
`;
|
||||
|
||||
await respond({
|
||||
text: helpText,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Help command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
handleCommand
|
||||
};
|
|
@ -3,11 +3,11 @@
|
|||
*
|
||||
* Handles Sigma rule search requests from Slack commands
|
||||
*/
|
||||
const { searchSigmaRules } = require('../../services/sigma/sigma_search_service');
|
||||
|
||||
const { searchSigmaRules, searchSigmaRulesComplex } = require('../../services/sigma/sigma_search_service');
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { getSearchResultBlocks } = require('../../blocks/sigma/sigma_search_results_block');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
|
@ -70,10 +70,8 @@ const handleCommand = async (command, respond) => {
|
|||
|
||||
// Search for rules using the service function with pagination
|
||||
const searchResult = await searchSigmaRules(keyword, page, pageSize);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Search result status: ${searchResult.success}`);
|
||||
logger.debug(`${FILE_NAME}: Found ${searchResult.results?.length || 0} results out of ${searchResult.pagination?.totalResults || 0} total matches`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: About to generate blocks for search results`);
|
||||
|
||||
if (!searchResult.success) {
|
||||
|
@ -91,7 +89,6 @@ const handleCommand = async (command, respond) => {
|
|||
// Check if search returned too many results
|
||||
if (totalCount > MAX_RESULTS_THRESHOLD) {
|
||||
logger.warn(`${FILE_NAME}: Search for "${keyword}" returned too many results (${totalCount}), displaying first page with warning`);
|
||||
|
||||
// Continue processing but add a notification
|
||||
searchResult.tooManyResults = true;
|
||||
}
|
||||
|
@ -117,11 +114,9 @@ const handleCommand = async (command, respond) => {
|
|||
let blocks;
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Calling getSearchResultBlocks with ${searchResult.results.length} results`);
|
||||
|
||||
// If we have too many results, add a warning block at the beginning
|
||||
if (searchResult.tooManyResults) {
|
||||
blocks = getSearchResultBlocks(keyword, searchResult.results, searchResult.pagination);
|
||||
|
||||
// Insert warning at the beginning of blocks (after the header)
|
||||
blocks.splice(1, 0, {
|
||||
"type": "section",
|
||||
|
@ -133,7 +128,6 @@ const handleCommand = async (command, respond) => {
|
|||
} else {
|
||||
blocks = getSearchResultBlocks(keyword, searchResult.results, searchResult.pagination);
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Successfully generated ${blocks?.length || 0} blocks`);
|
||||
} catch (blockError) {
|
||||
// Use error handler for block generation errors
|
||||
|
@ -166,6 +160,128 @@ const handleCommand = async (command, respond) => {
|
|||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
/**
|
||||
* Handle the complex search command for Sigma rules
|
||||
* Processes advanced search queries with multiple conditions
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleComplexSearch = async (command, respond) => {
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Processing complex search command: ${JSON.stringify(command.text)}`);
|
||||
|
||||
if (!command || !command.text) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received for complex search`);
|
||||
await respond('Invalid command. Usage: /sigma-search where [conditions]');
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract query string
|
||||
let queryString = command.text.trim();
|
||||
let page = 1;
|
||||
let pageSize = MAX_RESULTS_PER_PAGE;
|
||||
|
||||
// Check for pagination format: query page=X
|
||||
const pagingMatch = queryString.match(/(.+)\s+page=(\d+)$/i);
|
||||
if (pagingMatch) {
|
||||
queryString = pagingMatch[1].trim();
|
||||
page = parseInt(pagingMatch[2], 10) || 1;
|
||||
logger.debug(`${FILE_NAME}: Detected pagination request in complex search: page ${page}`);
|
||||
}
|
||||
|
||||
// Check for page size format: query limit=X
|
||||
const limitMatch = queryString.match(/(.+)\s+limit=(\d+)$/i);
|
||||
if (limitMatch) {
|
||||
queryString = limitMatch[1].trim();
|
||||
pageSize = parseInt(limitMatch[2], 10) || MAX_RESULTS_PER_PAGE;
|
||||
// Ensure the page size is within reasonable limits
|
||||
pageSize = Math.min(Math.max(pageSize, 1), 100);
|
||||
logger.debug(`${FILE_NAME}: Detected page size request in complex search: limit ${pageSize}`);
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Performing complex search with query: ${queryString}`);
|
||||
|
||||
await respond({
|
||||
text: 'Processing complex search query... This may take a moment.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Perform the complex search
|
||||
const searchResult = await searchSigmaRulesComplex(queryString, page, pageSize);
|
||||
|
||||
if (!searchResult.success) {
|
||||
logger.error(`${FILE_NAME}: Complex search failed: ${searchResult.message}`);
|
||||
await respond({
|
||||
text: `Search failed: ${searchResult.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we have results
|
||||
if (!searchResult.results || searchResult.results.length === 0) {
|
||||
logger.warn(`${FILE_NAME}: No rules found matching complex query criteria`);
|
||||
await respond({
|
||||
text: `No rules found matching the specified criteria.`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate blocks with pagination support
|
||||
let blocks;
|
||||
try {
|
||||
// Use the standard search result blocks but with a modified header
|
||||
blocks = getSearchResultBlocks(
|
||||
`Complex Query: ${queryString}`,
|
||||
searchResult.results,
|
||||
searchResult.pagination
|
||||
);
|
||||
|
||||
// Replace the header to indicate it's a complex search
|
||||
if (blocks && blocks.length > 0) {
|
||||
blocks[0] = {
|
||||
type: "header",
|
||||
text: {
|
||||
type: "plain_text",
|
||||
text: `Sigma Rule Search Results - Query`,
|
||||
emoji: true
|
||||
}
|
||||
};
|
||||
|
||||
// Add a description of the search criteria
|
||||
blocks.splice(1, 0, {
|
||||
type: "section",
|
||||
text: {
|
||||
type: "mrkdwn",
|
||||
text: `*Query:* \`${queryString}\``
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (blockError) {
|
||||
await handleError(blockError, `${FILE_NAME}: Complex search block generation`, respond, {
|
||||
responseType: 'ephemeral',
|
||||
customMessage: `Error generating results view: ${blockError.message}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Respond with the search results
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
response_type: 'ephemeral' // Complex searches are usually more specific to the user
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Complex search response sent successfully with ${searchResult.results.length} results`);
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Complex search handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand,
|
||||
handleComplexSearch
|
||||
};
|
|
@ -34,17 +34,10 @@ const commandPatterns = [
|
|||
// Sigma search patterns
|
||||
{
|
||||
name: 'sigma-search',
|
||||
regex: /^(search|find|look\s+for)\s+(rules|detections)?\s*(in|from)?\s*sigma\s+(for|where|with)?\s+(.+)$/i,
|
||||
action: 'search',
|
||||
regex: /^(search|find)\s+(sigma\s+)?(rules|detections)?\s*(where|with)\s+(.+)$/i,
|
||||
action: 'complexSearch',
|
||||
module: 'sigma',
|
||||
params: [5] // search query is in capturing group 5
|
||||
},
|
||||
{
|
||||
name: 'sigma-search-simple',
|
||||
regex: /^(search|find)\s+(.+)$/i,
|
||||
action: 'search',
|
||||
module: 'sigma',
|
||||
params: [2] // search query is in capturing group 2
|
||||
params: [5] // complex query conditions in capturing group 5
|
||||
},
|
||||
|
||||
// Sigma create patterns
|
||||
|
|
197
src/lang/query_parser.js
Normal file
197
src/lang/query_parser.js
Normal file
|
@ -0,0 +1,197 @@
|
|||
/**
|
||||
* query_parser.js
|
||||
*
|
||||
* Utility to parse complex search queries for Sigma rules
|
||||
* Handles conditions like title contains "X", tags include "Y", etc.
|
||||
*/
|
||||
|
||||
const logger = require('../utils/logger');
|
||||
const FILE_NAME = 'query_parser.js';
|
||||
|
||||
/**
|
||||
* Parse a complex query string into structured search parameters
|
||||
* Supports conditions like:
|
||||
* - title contains "ransomware"
|
||||
* - logsource.category == "process_creation"
|
||||
* - tags include privilege_escalation
|
||||
* - modified after 2024-01-01
|
||||
* - author is "John Doe"
|
||||
*
|
||||
* Also supports logical operators:
|
||||
* - AND, and
|
||||
* - OR, or
|
||||
*
|
||||
* @param {string} queryString - The complex query string to parse
|
||||
* @returns {Object} Structured search parameters
|
||||
*/
|
||||
function parseComplexQuery(queryString) {
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Parsing complex query: ${queryString}`);
|
||||
|
||||
if (!queryString || typeof queryString !== 'string') {
|
||||
logger.warn(`${FILE_NAME}: Invalid query string`);
|
||||
return { valid: false, error: 'Invalid query string' };
|
||||
}
|
||||
|
||||
// Initialize the result object
|
||||
const result = {
|
||||
valid: true,
|
||||
conditions: [],
|
||||
operator: 'AND' // Default to AND for multiple conditions
|
||||
};
|
||||
|
||||
// Check for explicit logical operators
|
||||
if (/ AND /i.test(queryString)) {
|
||||
result.operator = 'AND';
|
||||
// Split by AND and parse each part
|
||||
const parts = queryString.split(/ AND /i);
|
||||
for (const part of parts) {
|
||||
const condition = parseCondition(part.trim());
|
||||
if (condition) {
|
||||
result.conditions.push(condition);
|
||||
}
|
||||
}
|
||||
} else if (/ OR /i.test(queryString)) {
|
||||
result.operator = 'OR';
|
||||
// Split by OR and parse each part
|
||||
const parts = queryString.split(/ OR /i);
|
||||
for (const part of parts) {
|
||||
const condition = parseCondition(part.trim());
|
||||
if (condition) {
|
||||
result.conditions.push(condition);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Single condition
|
||||
const condition = parseCondition(queryString.trim());
|
||||
if (condition) {
|
||||
result.conditions.push(condition);
|
||||
}
|
||||
}
|
||||
|
||||
// If no valid conditions found, mark as invalid
|
||||
if (result.conditions.length === 0) {
|
||||
result.valid = false;
|
||||
result.error = 'No valid search conditions found';
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Parsed query result: ${JSON.stringify(result)}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error parsing complex query: ${error.message}`);
|
||||
return {
|
||||
valid: false,
|
||||
error: `Error parsing query: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a single condition from the query string
|
||||
*
|
||||
* @param {string} conditionStr - The condition string to parse
|
||||
* @returns {Object|null} Parsed condition object or null if invalid
|
||||
*/
|
||||
function parseCondition(conditionStr) {
|
||||
logger.debug(`${FILE_NAME}: Parsing condition: ${conditionStr}`);
|
||||
|
||||
// Define regex patterns for different condition types
|
||||
const patterns = [
|
||||
// title contains "value"
|
||||
{
|
||||
regex: /^(title|name)\s+(contains|has|like|includes)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'title',
|
||||
operator: 'contains',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// description contains "value"
|
||||
{
|
||||
regex: /^(description|desc)\s+(contains|has|like|includes)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'description',
|
||||
operator: 'contains',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// logsource.category == "value" or logsource.category = "value"
|
||||
{
|
||||
regex: /^logsource\.(\w+)\s*(==|=|equals?)\s*"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'logsource',
|
||||
subfield: matches[1].toLowerCase(),
|
||||
operator: 'equals',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// tags include "value" or tag contains "value"
|
||||
{
|
||||
regex: /^tags?\s+(includes?|contains|has)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'tags',
|
||||
operator: 'contains',
|
||||
value: matches[2].trim()
|
||||
})
|
||||
},
|
||||
// modified after YYYY-MM-DD
|
||||
{
|
||||
regex: /^(modified|updated|created|date)\s+(after|before|on|since)\s+"?(\d{4}-\d{2}-\d{2})"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'date',
|
||||
type: matches[1].toLowerCase(),
|
||||
operator: matches[2].toLowerCase(),
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// author is "value" or author = "value"
|
||||
{
|
||||
regex: /^(author|creator)\s+(is|equals?|==|=)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'author',
|
||||
operator: 'equals',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// level is "value" or level = "value"
|
||||
{
|
||||
regex: /^(level|severity)\s+(is|equals?|==|=)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'level',
|
||||
operator: 'equals',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
},
|
||||
// id is "value" or id = "value"
|
||||
{
|
||||
regex: /^(id|identifier)\s+(is|equals?|==|=)\s+"?([^"]+)"?$/i,
|
||||
handler: (matches) => ({
|
||||
field: 'id',
|
||||
operator: 'equals',
|
||||
value: matches[3].trim()
|
||||
})
|
||||
}
|
||||
];
|
||||
|
||||
// Try each pattern
|
||||
for (const pattern of patterns) {
|
||||
const matches = conditionStr.match(pattern.regex);
|
||||
if (matches) {
|
||||
return pattern.handler(matches);
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, no patterns matched
|
||||
logger.warn(`${FILE_NAME}: No pattern matched condition: ${conditionStr}`);
|
||||
|
||||
// Default to simple keyword search if no specific pattern matches
|
||||
return {
|
||||
field: 'keyword',
|
||||
operator: 'contains',
|
||||
value: conditionStr.trim()
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseComplexQuery
|
||||
};
|
|
@ -1,14 +1,15 @@
|
|||
/**
|
||||
* sigma_search_service.js
|
||||
*
|
||||
* This service provides functionality for searching Sigma rules by keywords.
|
||||
* This service provides functionality for searching Sigma rules by keywords and complex queries.
|
||||
* It processes search results and returns them in a structured format.
|
||||
* Supports pagination for large result sets.
|
||||
*/
|
||||
const { searchRules } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { searchRules, searchRulesComplex } = require('../../sigma_db/sigma_db_queries');
|
||||
const { parseComplexQuery } = require('../../lang/query_parser');
|
||||
const logger = require('../../utils/logger');
|
||||
const { convertSigmaRule } = require('./sigma_converter_service');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
|
@ -153,6 +154,109 @@ async function searchSigmaRules(keyword, page = 1, pageSize = 10) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches for Sigma rules using complex query conditions
|
||||
* Supports filtering by title, logsource, tags, dates, and more
|
||||
*
|
||||
* @param {string} queryString - The complex query string to parse
|
||||
* @param {number} page - Page number (1-based index, default: 1)
|
||||
* @param {number} pageSize - Number of results per page (default: 10)
|
||||
* @returns {Promise<Object>} Result object with success flag and processed results
|
||||
*/
|
||||
async function searchSigmaRulesComplex(queryString, page = 1, pageSize = 10) {
|
||||
if (!queryString || typeof queryString !== 'string') {
|
||||
logger.warn(`${FILE_NAME}: Cannot perform complex search: Missing or invalid query string`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing or invalid complex query'
|
||||
};
|
||||
}
|
||||
|
||||
// Validate pagination parameters
|
||||
if (typeof page !== 'number' || page < 1) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page number: ${page}, defaulting to 1`);
|
||||
page = 1;
|
||||
}
|
||||
|
||||
if (typeof pageSize !== 'number' || pageSize < 1 || pageSize > 100) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page size: ${pageSize}, defaulting to 10`);
|
||||
pageSize = 10;
|
||||
}
|
||||
|
||||
// Calculate the offset based on page number
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
logger.info(`${FILE_NAME}: Performing complex search with query: "${queryString}" (page ${page}, size ${pageSize})`);
|
||||
|
||||
try {
|
||||
// Parse the complex query string
|
||||
const parsedQuery = parseComplexQuery(queryString);
|
||||
|
||||
if (!parsedQuery.valid) {
|
||||
logger.warn(`${FILE_NAME}: Invalid complex query: ${parsedQuery.error}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Invalid query: ${parsedQuery.error}`
|
||||
};
|
||||
}
|
||||
|
||||
// Perform the database search with the parsed query
|
||||
const searchResult = await searchRulesComplex(parsedQuery, pageSize, offset);
|
||||
|
||||
// Defensive handling of possible return formats
|
||||
let allResults = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Handle search results
|
||||
if (searchResult) {
|
||||
if (Array.isArray(searchResult.results)) {
|
||||
allResults = searchResult.results;
|
||||
totalCount = searchResult.totalCount || 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (allResults.length === 0) {
|
||||
return {
|
||||
success: true,
|
||||
results: [],
|
||||
message: `No rules found matching the complex query criteria`,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: Math.ceil(totalCount / pageSize),
|
||||
totalResults: totalCount,
|
||||
hasMore: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate pagination info
|
||||
const totalPages = Math.ceil(totalCount / pageSize);
|
||||
const hasMore = (offset + pageSize) < totalCount;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
results: allResults,
|
||||
count: allResults.length,
|
||||
query: parsedQuery,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: totalPages,
|
||||
totalResults: totalCount,
|
||||
hasMore: hasMore
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in complex search: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error performing complex search: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced search that returns fully converted rule objects with pagination support
|
||||
* This is a more expensive operation than basic search
|
||||
|
@ -210,5 +314,6 @@ async function searchAndConvertRules(keyword, page = 1, pageSize = 10) {
|
|||
|
||||
module.exports = {
|
||||
searchSigmaRules,
|
||||
searchSigmaRulesComplex,
|
||||
searchAndConvertRules
|
||||
};
|
|
@ -58,41 +58,49 @@ async function initializeDatabase(db) {
|
|||
return;
|
||||
}
|
||||
|
||||
// Create rules table with basic information
|
||||
const createRulesTableSql = `
|
||||
CREATE TABLE sigma_rules (
|
||||
id TEXT PRIMARY KEY,
|
||||
file_path TEXT,
|
||||
content TEXT,
|
||||
date DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`;
|
||||
|
||||
db.run(createRulesTableSql, (err) => {
|
||||
// Drop FTS table if exists
|
||||
db.run('DROP TABLE IF EXISTS rule_search', (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create rule_parameters table for individual parameters
|
||||
const createParamsTableSql = `
|
||||
CREATE TABLE rule_parameters (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id TEXT,
|
||||
param_name TEXT,
|
||||
param_value TEXT,
|
||||
param_type TEXT,
|
||||
FOREIGN KEY (rule_id) REFERENCES sigma_rules(id) ON DELETE CASCADE
|
||||
// Create rules table with basic information
|
||||
const createRulesTableSql = `
|
||||
CREATE TABLE sigma_rules (
|
||||
id TEXT PRIMARY KEY,
|
||||
file_path TEXT,
|
||||
content TEXT,
|
||||
date DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`;
|
||||
|
||||
db.run(createParamsTableSql, (err) => {
|
||||
db.run(createRulesTableSql, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Database schema initialized`);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Create rule_parameters table for individual parameters
|
||||
const createParamsTableSql = `
|
||||
CREATE TABLE rule_parameters (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id TEXT,
|
||||
param_name TEXT,
|
||||
param_value TEXT,
|
||||
param_type TEXT,
|
||||
FOREIGN KEY (rule_id) REFERENCES sigma_rules(id) ON DELETE CASCADE
|
||||
)
|
||||
`;
|
||||
|
||||
db.run(createParamsTableSql, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Database schema initialized`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -100,6 +108,70 @@ async function initializeDatabase(db) {
|
|||
});
|
||||
}
|
||||
|
||||
// Create FTS5 virtual table for full-text search
|
||||
async function createFtsTable(db) {
|
||||
return new Promise((resolve, reject) => {
|
||||
logger.info(`${FILE_NAME}: Creating FTS5 virtual table for full-text search`);
|
||||
|
||||
// Create the FTS5 virtual table
|
||||
const createFtsTableSql = `
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS rule_search USING fts5(
|
||||
rule_id,
|
||||
title,
|
||||
description,
|
||||
logsource,
|
||||
tags,
|
||||
author,
|
||||
level,
|
||||
content,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
`;
|
||||
|
||||
db.run(createFtsTableSql, (err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Failed to create FTS5 table: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: FTS5 virtual table created successfully`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Populate FTS table with rule data for full-text search
|
||||
async function populateFtsTable(db) {
|
||||
return new Promise((resolve, reject) => {
|
||||
logger.info(`${FILE_NAME}: Populating FTS5 table with rule data`);
|
||||
|
||||
// Insert query that aggregates data from both tables
|
||||
const populateFtsSql = `
|
||||
INSERT INTO rule_search(rule_id, title, description, logsource, tags, author, level, content)
|
||||
SELECT
|
||||
r.id,
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'title' LIMIT 1),
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'description' LIMIT 1),
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'logsource' LIMIT 1),
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'tags' LIMIT 1),
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'author' LIMIT 1),
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'level' LIMIT 1),
|
||||
r.content
|
||||
FROM sigma_rules r
|
||||
`;
|
||||
|
||||
db.run(populateFtsSql, (err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Failed to populate FTS5 table: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: FTS5 table populated successfully`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Determine if a YAML document is a Sigma rule
|
||||
function isSigmaRule(doc) {
|
||||
// Check for essential Sigma rule properties
|
||||
|
@ -531,6 +603,12 @@ async function main() {
|
|||
// Create indexes
|
||||
await createIndexes(db);
|
||||
|
||||
// Create FTS5 table
|
||||
await createFtsTable(db);
|
||||
|
||||
// Populate FTS5 table with rule data
|
||||
await populateFtsTable(db);
|
||||
|
||||
// Close database connection
|
||||
db.close((err) => {
|
||||
if (err) {
|
||||
|
@ -556,5 +634,7 @@ if (require.main === module) {
|
|||
module.exports = {
|
||||
initializeDatabase,
|
||||
importRules,
|
||||
createIndexes
|
||||
createIndexes,
|
||||
createFtsTable,
|
||||
populateFtsTable
|
||||
};
|
|
@ -198,6 +198,17 @@ async function searchRules(keyword, limit = 10, offset = 0) {
|
|||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Database connection established for search`);
|
||||
|
||||
// Use FTS5 for faster searching if available
|
||||
const ftsAvailable = await checkFtsAvailable(db);
|
||||
|
||||
if (ftsAvailable) {
|
||||
logger.debug(`${FILE_NAME}: Using FTS5 for keyword search`);
|
||||
return searchRulesFTS(keyword, limit, offset);
|
||||
}
|
||||
|
||||
// If FTS5 is not available, use the legacy search method
|
||||
logger.debug(`${FILE_NAME}: FTS5 not available, using legacy search method`);
|
||||
|
||||
// First get the total count of matching rules (for pagination info)
|
||||
const countQuery = `
|
||||
SELECT COUNT(*) as count
|
||||
|
@ -264,6 +275,613 @@ async function searchRules(keyword, limit = 10, offset = 0) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if FTS5 virtual table is available
|
||||
*
|
||||
* @param {Object} db - Database connection
|
||||
* @returns {Promise<boolean>} Whether FTS5 is available
|
||||
*/
|
||||
async function checkFtsAvailable(db) {
|
||||
try {
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
db.get("SELECT name FROM sqlite_master WHERE type='table' AND name='rule_search'", (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error checking for FTS5 table: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row !== undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: FTS5 table availability check: ${result ? 'Available' : 'Not available'}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error checking FTS availability: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for Sigma rules using FTS5
|
||||
* Performs a full-text search and returns matching rules with pagination
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} limit - Maximum number of results to return (default: 10)
|
||||
* @param {number} offset - Number of results to skip (for pagination, default: 0)
|
||||
* @returns {Promise<Object>} Object with results array and total count
|
||||
*/
|
||||
async function searchRulesFTS(keyword, limit = 10, offset = 0) {
|
||||
if (!keyword) {
|
||||
logger.warn(`${FILE_NAME}: Empty search keyword provided for FTS search`);
|
||||
return { results: [], totalCount: 0 };
|
||||
}
|
||||
|
||||
// Prepare FTS query - add * for prefix matching if not already present
|
||||
let ftsQuery = keyword.trim();
|
||||
if (!ftsQuery.endsWith('*')) {
|
||||
ftsQuery = `${ftsQuery}*`;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Performing FTS search with query: "${ftsQuery}" (limit: ${limit}, offset: ${offset})`);
|
||||
|
||||
let db;
|
||||
try {
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Database connection established for FTS search`);
|
||||
|
||||
// First get the total count of matching rules
|
||||
const countQuery = `
|
||||
SELECT COUNT(*) as count
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
`;
|
||||
|
||||
const countResult = await new Promise((resolve, reject) => {
|
||||
db.get(countQuery, [ftsQuery], (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: FTS count query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row || { count: 0 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const totalCount = countResult.count;
|
||||
logger.debug(`${FILE_NAME}: Total matching rules for FTS query "${ftsQuery}": ${totalCount}`);
|
||||
|
||||
// Now get the actual results with pagination
|
||||
const searchQuery = `
|
||||
SELECT rule_id, title
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
ORDER BY rank
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
|
||||
const results = await new Promise((resolve, reject) => {
|
||||
db.all(searchQuery, [ftsQuery, limit, offset], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: FTS search query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: FTS search query returned ${rows ? rows.length : 0} results`);
|
||||
resolve(rows || []);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: FTS search results page for query "${ftsQuery}": ${results.length} matches (page ${Math.floor(offset / limit) + 1})`);
|
||||
|
||||
return {
|
||||
results: results.map(r => ({ id: r.rule_id, title: r.title || r.rule_id })),
|
||||
totalCount
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in FTS search operation: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: FTS search error stack: ${error.stack}`);
|
||||
return { results: [], totalCount: 0 };
|
||||
} finally {
|
||||
if (db) {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after FTS search operation`);
|
||||
} catch (closeError) {
|
||||
logger.error(`${FILE_NAME}: Error closing database connection after FTS search: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for Sigma rules using complex query conditions
|
||||
* Supports filtering by multiple attributes like title, logsource, tags, etc.
|
||||
*
|
||||
* @param {Object} parsedQuery - The parsed query object containing conditions and operator
|
||||
* @param {number} limit - Maximum number of results to return
|
||||
* @param {number} offset - Number of results to skip (for pagination)
|
||||
* @returns {Promise<Object>} Object with results array and total count
|
||||
*/
|
||||
async function searchRulesComplex(parsedQuery, limit = 10, offset = 0) {
|
||||
if (!parsedQuery || !parsedQuery.valid) {
|
||||
logger.warn(`${FILE_NAME}: Invalid query object provided`);
|
||||
return { results: [], totalCount: 0 };
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Performing complex search with ${parsedQuery.conditions.length} conditions (limit: ${limit}, offset: ${offset})`);
|
||||
|
||||
let db;
|
||||
// Declare this at function scope so it's available in the finally block
|
||||
let usingFts = false;
|
||||
|
||||
try {
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Database connection established for complex search`);
|
||||
|
||||
// Check if FTS5 is available
|
||||
const ftsAvailable = await checkFtsAvailable(db);
|
||||
|
||||
if (ftsAvailable) {
|
||||
logger.debug(`${FILE_NAME}: Using FTS5 for complex search`);
|
||||
// Set flag that we're using FTS
|
||||
usingFts = true;
|
||||
// Pass db connection to searchRulesComplexFTS and let that function manage it
|
||||
const results = await searchRulesComplexFTS(parsedQuery, limit, offset, db);
|
||||
return results;
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: FTS5 not available, using legacy complex search method`);
|
||||
|
||||
// Build the SQL query based on the conditions
|
||||
const { sqlQuery, sqlCountQuery, params } = buildComplexSqlQuery(parsedQuery, limit, offset);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Executing complex search SQL: ${sqlQuery}`);
|
||||
logger.debug(`${FILE_NAME}: Query parameters: ${JSON.stringify(params)}`);
|
||||
|
||||
// First get the total count of matching results
|
||||
const countResult = await new Promise((resolve, reject) => {
|
||||
db.get(sqlCountQuery, params.slice(0, params.length - 2), (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Complex search count query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row || { count: 0 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const totalCount = countResult.count;
|
||||
logger.debug(`${FILE_NAME}: Total matching rules for complex query: ${totalCount}`);
|
||||
|
||||
// Now get the actual results with pagination
|
||||
const results = await new Promise((resolve, reject) => {
|
||||
db.all(sqlQuery, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Complex search query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Complex search query returned ${rows ? rows.length : 0} results`);
|
||||
resolve(rows || []);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Format the results
|
||||
const formattedResults = results.map(r => ({
|
||||
id: r.rule_id,
|
||||
title: r.title || r.rule_id
|
||||
}));
|
||||
|
||||
logger.debug(`${FILE_NAME}: Returning ${formattedResults.length} results for complex search`);
|
||||
|
||||
return {
|
||||
results: formattedResults,
|
||||
totalCount
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in complex search operation: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Complex search error stack: ${error.stack}`);
|
||||
return { results: [], totalCount: 0 };
|
||||
} finally {
|
||||
// IMPORTANT: Only close the db connection if we're not using FTS
|
||||
// When using FTS, let searchRulesComplexFTS manage the connection
|
||||
if (db && !usingFts) {
|
||||
try {
|
||||
await new Promise((resolve) => db.close(() => resolve()));
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after complex search operation`);
|
||||
} catch (closeError) {
|
||||
logger.error(`${FILE_NAME}: Error closing database after complex search: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for Sigma rules using complex query conditions with FTS5
|
||||
* Uses the FTS5 virtual table for faster text searching
|
||||
*
|
||||
* @param {Object} parsedQuery - The parsed query object
|
||||
* @param {number} limit - Maximum number of results to return
|
||||
* @param {number} offset - Number of results to skip (for pagination)
|
||||
* @param {Object} providedDb - Database connection (optional, will create one if not provided)
|
||||
* @returns {Promise<Object>} Object with results array and total count
|
||||
*/
|
||||
async function searchRulesComplexFTS(parsedQuery, limit = 10, offset = 0, providedDb = null) {
|
||||
if (!parsedQuery || !parsedQuery.valid) {
|
||||
logger.warn(`${FILE_NAME}: Invalid query object provided for FTS complex search`);
|
||||
return { results: [], totalCount: 0 };
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Performing complex FTS search with ${parsedQuery.conditions.length} conditions`);
|
||||
|
||||
let db;
|
||||
let shouldCloseDb = false;
|
||||
|
||||
try {
|
||||
// Use provided db connection or create a new one
|
||||
if (providedDb) {
|
||||
db = providedDb;
|
||||
} else {
|
||||
db = await getDbConnection();
|
||||
shouldCloseDb = true;
|
||||
logger.debug(`${FILE_NAME}: Created new database connection for complex FTS search`);
|
||||
}
|
||||
|
||||
// Build FTS query from conditions
|
||||
const { ftsQuery, whereClause, params } = buildComplexFtsQuery(parsedQuery);
|
||||
|
||||
logger.debug(`${FILE_NAME}: FTS query: "${ftsQuery}", additional where: ${whereClause ? whereClause : 'none'}`);
|
||||
logger.debug(`${FILE_NAME}: Query parameters: ${JSON.stringify(params)}`);
|
||||
|
||||
// Build count query
|
||||
let countQuery;
|
||||
let countParams;
|
||||
|
||||
if (whereClause) {
|
||||
countQuery = `
|
||||
SELECT COUNT(*) as count
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
AND ${whereClause}
|
||||
`;
|
||||
countParams = [ftsQuery, ...params];
|
||||
} else {
|
||||
countQuery = `
|
||||
SELECT COUNT(*) as count
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
`;
|
||||
countParams = [ftsQuery];
|
||||
}
|
||||
|
||||
// Get total count
|
||||
const countResult = await new Promise((resolve, reject) => {
|
||||
db.get(countQuery, countParams, (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Complex FTS count query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row || { count: 0 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const totalCount = countResult.count;
|
||||
logger.debug(`${FILE_NAME}: Total matching rules for complex FTS query: ${totalCount}`);
|
||||
|
||||
// Build results query with pagination
|
||||
let searchQuery;
|
||||
let searchParams;
|
||||
|
||||
if (whereClause) {
|
||||
searchQuery = `
|
||||
SELECT rule_id, title
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
AND ${whereClause}
|
||||
ORDER BY rank
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
searchParams = [ftsQuery, ...params, limit, offset];
|
||||
} else {
|
||||
searchQuery = `
|
||||
SELECT rule_id, title
|
||||
FROM rule_search
|
||||
WHERE rule_search MATCH ?
|
||||
ORDER BY rank
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
searchParams = [ftsQuery, limit, offset];
|
||||
}
|
||||
|
||||
// Get paginated results
|
||||
const results = await new Promise((resolve, reject) => {
|
||||
db.all(searchQuery, searchParams, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Complex FTS search query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Complex FTS search query returned ${rows ? rows.length : 0} results`);
|
||||
resolve(rows || []);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Format the results
|
||||
const formattedResults = results.map(r => ({
|
||||
id: r.rule_id,
|
||||
title: r.title || r.rule_id
|
||||
}));
|
||||
|
||||
logger.debug(`${FILE_NAME}: Returning ${formattedResults.length} results for complex FTS search`);
|
||||
|
||||
return {
|
||||
results: formattedResults,
|
||||
totalCount
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in complex FTS search operation: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Complex FTS search error stack: ${error.stack}`);
|
||||
return { results: [], totalCount: 0 };
|
||||
} finally {
|
||||
// Only close the database if we created it AND we're not in the middle of a transaction
|
||||
if (db && shouldCloseDb) {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after complex FTS search`);
|
||||
} catch (closeError) {
|
||||
logger.error(`${FILE_NAME}: Error closing database after complex FTS search: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build FTS query and WHERE clause from parsed query conditions
|
||||
*
|
||||
* @param {Object} parsedQuery - The parsed query object
|
||||
* @returns {Object} Object with FTS query, additional WHERE clause, and parameters
|
||||
*/
|
||||
function buildComplexFtsQuery(parsedQuery) {
|
||||
const { conditions, operator } = parsedQuery;
|
||||
|
||||
// Separate text search conditions from other conditions
|
||||
const textConditions = [];
|
||||
const nonTextConditions = [];
|
||||
|
||||
for (const condition of conditions) {
|
||||
switch (condition.field) {
|
||||
case 'title':
|
||||
case 'description':
|
||||
case 'author':
|
||||
case 'tags':
|
||||
case 'keyword':
|
||||
// These can be handled by FTS directly
|
||||
textConditions.push(condition);
|
||||
break;
|
||||
default:
|
||||
// These need additional WHERE clauses
|
||||
nonTextConditions.push(condition);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Build FTS MATCH query
|
||||
let ftsQueryParts = [];
|
||||
|
||||
for (const condition of textConditions) {
|
||||
let fieldPrefix = '';
|
||||
|
||||
// Add field-specific prefix if available
|
||||
if (condition.field !== 'keyword') {
|
||||
fieldPrefix = `${condition.field}:`;
|
||||
}
|
||||
|
||||
// Add wildcard for partial matching if not already present
|
||||
let value = condition.value.trim();
|
||||
if (!value.endsWith('*')) {
|
||||
value = `${value}*`;
|
||||
}
|
||||
|
||||
ftsQueryParts.push(`${fieldPrefix}${value}`);
|
||||
}
|
||||
|
||||
// If no text conditions, use a match-all query
|
||||
const ftsQuery = ftsQueryParts.length > 0
|
||||
? ftsQueryParts.join(operator === 'AND' ? ' AND ' : ' OR ')
|
||||
: '*';
|
||||
|
||||
// Build additional WHERE clauses for non-text conditions
|
||||
let whereClauseParts = [];
|
||||
const params = [];
|
||||
|
||||
for (const condition of nonTextConditions) {
|
||||
switch (condition.field) {
|
||||
case 'date':
|
||||
const dateOperator = condition.operator === 'after' ? '>' :
|
||||
condition.operator === 'before' ? '<' : '=';
|
||||
whereClauseParts.push(`date ${dateOperator} date(?)`);
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'level':
|
||||
whereClauseParts.push(`level = ?`);
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'logsource':
|
||||
whereClauseParts.push(`logsource LIKE ?`);
|
||||
params.push(`%${condition.subfield}%${condition.value}%`);
|
||||
break;
|
||||
|
||||
case 'id':
|
||||
whereClauseParts.push(`rule_id = ?`);
|
||||
params.push(condition.value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Combine WHERE clauses
|
||||
const whereClause = whereClauseParts.length > 0
|
||||
? whereClauseParts.join(operator === 'AND' ? ' AND ' : ' OR ')
|
||||
: '';
|
||||
|
||||
return { ftsQuery, whereClause, params };
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the SQL query for complex search based on parsed conditions
|
||||
*
|
||||
* @param {Object} parsedQuery - The parsed query object
|
||||
* @param {number} limit - Results limit
|
||||
* @param {number} offset - Results offset
|
||||
* @returns {Object} Object with SQL query, count query, and parameters
|
||||
*/
|
||||
function buildComplexSqlQuery(parsedQuery, limit, offset) {
|
||||
const { conditions, operator } = parsedQuery;
|
||||
const params = [];
|
||||
|
||||
// Start building the primary table selection
|
||||
let sqlSelectPart = `
|
||||
SELECT DISTINCT r.id as rule_id,
|
||||
(SELECT param_value FROM rule_parameters WHERE rule_id = r.id AND param_name = 'title' LIMIT 1) as title
|
||||
FROM sigma_rules r
|
||||
`;
|
||||
|
||||
// Build WHERE clause based on conditions
|
||||
let whereClauses = [];
|
||||
let joinIdx = 0;
|
||||
|
||||
for (const condition of conditions) {
|
||||
let whereClause = '';
|
||||
|
||||
switch (condition.field) {
|
||||
case 'title':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'title'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER(?)) > 0
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'description':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'description'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER(?)) > 0
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'logsource':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'logsource'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER('"${condition.subfield}":"${condition.value}"')) > 0
|
||||
)`;
|
||||
break;
|
||||
|
||||
case 'tags':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'tags'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER(?)) > 0
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'date':
|
||||
joinIdx++;
|
||||
const dateOperator = condition.operator === 'after' ? '>' :
|
||||
condition.operator === 'before' ? '<' : '=';
|
||||
whereClause = `r.date ${dateOperator} date(?)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'author':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'author'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER(?)) > 0
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'level':
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'level'
|
||||
AND LOWER(p${joinIdx}.param_value) = LOWER(?)
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'id':
|
||||
whereClause = `LOWER(r.id) = LOWER(?)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
|
||||
case 'keyword':
|
||||
default:
|
||||
// Default to searching in title
|
||||
joinIdx++;
|
||||
whereClause = `EXISTS (
|
||||
SELECT 1 FROM rule_parameters p${joinIdx}
|
||||
WHERE p${joinIdx}.rule_id = r.id
|
||||
AND p${joinIdx}.param_name = 'title'
|
||||
AND INSTR(LOWER(p${joinIdx}.param_value), LOWER(?)) > 0
|
||||
)`;
|
||||
params.push(condition.value);
|
||||
break;
|
||||
}
|
||||
|
||||
if (whereClause) {
|
||||
whereClauses.push(whereClause);
|
||||
}
|
||||
}
|
||||
|
||||
// Combine the WHERE clauses with the appropriate operator
|
||||
let whereStatement = '';
|
||||
if (whereClauses.length > 0) {
|
||||
const combiner = operator === 'AND' ? ' AND ' : ' OR ';
|
||||
whereStatement = `WHERE ${whereClauses.join(combiner)}`;
|
||||
}
|
||||
|
||||
// Complete queries
|
||||
const sqlQuery = `
|
||||
${sqlSelectPart}
|
||||
${whereStatement}
|
||||
ORDER BY rule_id
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
|
||||
const sqlCountQuery = `
|
||||
SELECT COUNT(DISTINCT r.id) as count
|
||||
FROM sigma_rules r
|
||||
${whereStatement}
|
||||
`;
|
||||
|
||||
// Add pagination parameters
|
||||
params.push(limit);
|
||||
params.push(offset);
|
||||
|
||||
return { sqlQuery, sqlCountQuery, params };
|
||||
}
|
||||
|
||||
/**
|
||||
* Debug function to retrieve detailed information about a rule's content
|
||||
* Useful for diagnosing issues with rule retrieval and content parsing
|
||||
|
@ -579,7 +1197,11 @@ module.exports = {
|
|||
getAllRuleIds,
|
||||
findRuleById,
|
||||
searchRules,
|
||||
searchRulesFTS,
|
||||
searchRulesComplex,
|
||||
searchRulesComplexFTS,
|
||||
debugRuleContent,
|
||||
getRuleYamlContent,
|
||||
getStatsFromDatabase
|
||||
getStatsFromDatabase,
|
||||
checkFtsAvailable
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue