first commit
This commit is contained in:
commit
7988853b57
43 changed files with 8415 additions and 0 deletions
138
src/app.js
Normal file
138
src/app.js
Normal file
|
@ -0,0 +1,138 @@
|
|||
/**
|
||||
* app.js
|
||||
*
|
||||
* Main application file for Fylgja Slack bot
|
||||
* Initializes the Slack Bolt app with custom ExpressReceiver Registers command handlers
|
||||
*
|
||||
*/
|
||||
const { App, ExpressReceiver } = require('@slack/bolt');
|
||||
const fs = require('fs');
|
||||
const logger = require('./utils/logger');
|
||||
const { SIGMA_CLI_PATH, SIGMA_CLI_CONFIG, SLACK_CONFIG } = require('./config/appConfig');
|
||||
|
||||
const { getFileName } = require('./utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
// Import individual command handlers
|
||||
const sigmaDetailsHandler = require('./handlers/sigma/sigma_details_handler');
|
||||
const sigmaSearchHandler = require('./handlers/sigma/sigma_search_handler');
|
||||
const sigmaCreateHandler = require('./handlers/sigma/sigma_create_handler');
|
||||
const sigmaActionHandlers = require('./handlers/sigma/sigma_action_handlers');
|
||||
//const configCommand = require('./commands/config/index.js');
|
||||
//const alertsCommand = require('./commands/alerts/index.js');
|
||||
//const caseCommand = require('./commands/case/index.js');
|
||||
//const statsCommand = require('./commands/stats/index.js');
|
||||
|
||||
// Verify sigma-cli is installed
|
||||
if (!fs.existsSync(SIGMA_CLI_PATH)) {
|
||||
logger.error(`Error: Sigma CLI not found at specified path: ${SIGMA_CLI_PATH}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Log the loaded configuration
|
||||
logger.info(`Loaded sigma-cli configuration: ${JSON.stringify(SIGMA_CLI_CONFIG)}`);
|
||||
|
||||
/**
|
||||
* Create a custom ExpressReceiver for more control over the HTTP server
|
||||
*/
|
||||
const expressReceiver = new ExpressReceiver({
|
||||
signingSecret: SLACK_CONFIG.signingSecret,
|
||||
processBeforeResponse: true
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the Slack app with the custom receiver
|
||||
*/
|
||||
const app = new App({
|
||||
token: SLACK_CONFIG.botToken,
|
||||
receiver: expressReceiver
|
||||
});
|
||||
|
||||
// Register individual command handlers for all sigma commands
|
||||
logger.info('Registering command handlers');
|
||||
|
||||
// Register sigma command handlers directly
|
||||
app.command('/sigma-create', async ({ command, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.info(`Received sigma-create command: ${command.text}`);
|
||||
await sigmaCreateHandler.handleCommand(command, respond);
|
||||
} catch (error) {
|
||||
logger.error(`Error handling sigma-create command: ${error.message}`);
|
||||
logger.debug(`Error stack: ${error.stack}`);
|
||||
await respond({
|
||||
text: `An error occurred: ${error.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.command('/sigma-details', async ({ command, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.info(`Received sigma-details command: ${command.text}`);
|
||||
await sigmaDetailsHandler.handleCommand(command, respond);
|
||||
} catch (error) {
|
||||
logger.error(`Error handling sigma-details command: ${error.message}`);
|
||||
logger.debug(`Error stack: ${error.stack}`);
|
||||
await respond({
|
||||
text: `An error occurred: ${error.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.command('/sigma-search', async ({ command, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.info(`Received sigma-search command: ${command.text}`);
|
||||
await sigmaSearchHandler.handleCommand(command, respond);
|
||||
} catch (error) {
|
||||
logger.error(`Error handling sigma-search command: ${error.message}`);
|
||||
logger.debug(`Error stack: ${error.stack}`);
|
||||
await respond({
|
||||
text: `An error occurred: ${error.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.command('/sigma-stats', async ({ command, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.info(`Received sigma-stats command`);
|
||||
const sigmaStatsHandler = require('./handlers/sigma/sigma_stats_handler');
|
||||
await sigmaStatsHandler.handleCommand(command, respond);
|
||||
} catch (error) {
|
||||
logger.error(`Error handling sigma-stats command: ${error.message}`);
|
||||
logger.debug(`Error stack: ${error.stack}`);
|
||||
await respond({
|
||||
text: `An error occurred: ${error.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Register all button action handlers from centralized module
|
||||
sigmaActionHandlers.registerActionHandlers(app);
|
||||
|
||||
/**
|
||||
* Listen for any message in DMs
|
||||
* This allows users to interact with the bot directly
|
||||
*/
|
||||
app.message(async ({ message, say }) => {
|
||||
// Only respond to DMs (message.channel starts with 'D')
|
||||
if (message.channel.startsWith('D')) {
|
||||
logger.info(`DM received from user ${message.user}`);
|
||||
logger.debug(`DM content: ${message.text}`);
|
||||
|
||||
// For now, we're just logging DMs but not responding
|
||||
// Uncomment below to enable responses to DMs
|
||||
// await say(`I received your message: "${message.text}"`);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Export the configured app for use in the main server file
|
||||
*/
|
||||
module.exports = app;
|
124
src/blocks/sigma_conversion_block.js
Normal file
124
src/blocks/sigma_conversion_block.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
/**
|
||||
* sigma_conversion_block.js
|
||||
*
|
||||
* Provides block templates for displaying Sigma rule conversion results in Slack
|
||||
*/
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Generate blocks for displaying a Sigma rule conversion result
|
||||
*
|
||||
* @param {Object} conversionResult - The result of the conversion operation
|
||||
* @returns {Array} Array of blocks for Slack message
|
||||
*/
|
||||
function getConversionResultBlocks(conversionResult) {
|
||||
logger.debug(`${FILE_NAME}: Generating blocks for conversion result`);
|
||||
|
||||
if (!conversionResult || !conversionResult.success) {
|
||||
logger.warn(`${FILE_NAME}: Invalid conversion result provided for block generation`);
|
||||
return [{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: 'Error: Failed to generate conversion result blocks'
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
const rule = conversionResult.rule || {
|
||||
id: 'unknown',
|
||||
title: 'Unknown Rule',
|
||||
description: 'No rule metadata available'
|
||||
};
|
||||
|
||||
const details = conversionResult.conversionDetails || {
|
||||
backend: 'lucene',
|
||||
target: 'ecs_windows',
|
||||
format: 'siem_rule_ndjson'
|
||||
};
|
||||
|
||||
// Truncate output if it's too long for Slack
|
||||
let output = conversionResult.output || '';
|
||||
const maxOutputLength = 2900; // Slack has a limit of ~3000 chars in a code block
|
||||
const isTruncated = output.length > maxOutputLength;
|
||||
|
||||
if (isTruncated) {
|
||||
output = output.substring(0, maxOutputLength) + '... [truncated]';
|
||||
}
|
||||
|
||||
// Create the blocks
|
||||
const blocks = [
|
||||
{
|
||||
type: 'header',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: `Converted Rule: ${rule.title}`,
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Rule ID:* ${rule.id}\n*Description:* ${rule.description}`
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Conversion Settings:*\nBackend: \`${details.backend}\` | Target: \`${details.target}\` | Format: \`${details.format}\``
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'divider'
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Converted Output:*${isTruncated ? ' (truncated for display)' : ''}\n\`\`\`\n${output}\n\`\`\``
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
// Action buttons
|
||||
blocks.push({
|
||||
type: 'actions',
|
||||
elements: [
|
||||
{
|
||||
type: 'button',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: 'send_sigma_rule_to_siem',
|
||||
emoji: true
|
||||
},
|
||||
value: `send_sigma_rule_to_siem_${rule.id}`,
|
||||
action_id: 'send_sigma_rule_to_siem'
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
// Warning if output was truncated
|
||||
if (isTruncated) {
|
||||
blocks.push({
|
||||
type: 'context',
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: ':warning: The output was truncated for display in Slack. Use the copy button to get the full content.'
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Generated ${blocks.length} blocks for conversion result`);
|
||||
return blocks;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getConversionResultBlocks
|
||||
};
|
298
src/blocks/sigma_details_block.js
Normal file
298
src/blocks/sigma_details_block.js
Normal file
|
@ -0,0 +1,298 @@
|
|||
/**
|
||||
* sigma_details_block.js
|
||||
*
|
||||
* Creates Slack Block Kit blocks for displaying Sigma rule explanations
|
||||
*
|
||||
*/
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Create Slack block kit blocks for rule explanation
|
||||
*
|
||||
* @param {Object} details - The rule details object containing all rule metadata
|
||||
* @returns {Array} Formatted Slack blocks ready for display
|
||||
*/
|
||||
function getRuleExplanationBlocks(details) {
|
||||
logger.debug(`${FILE_NAME}: Creating rule explanation blocks for rule: ${details?.id || 'unknown'}`);
|
||||
|
||||
if (!details) {
|
||||
logger.error('Failed to create explanation blocks: No details object provided');
|
||||
return [
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: 'Error: No explanation data provided'
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
// Map severity levels to emojis for visual representation
|
||||
const severityConfig = {
|
||||
'critical': { emoji: '🔴', text: 'Critical' },
|
||||
'high': { emoji: '🟠', text: 'High' },
|
||||
'medium': { emoji: '🟡', text: 'Medium' },
|
||||
'low': { emoji: '🟢', text: 'Low' },
|
||||
'informational': { emoji: '🔵', text: 'Info' }
|
||||
};
|
||||
|
||||
// Normalize severity to lowercase for matching
|
||||
const normalizedSeverity = (details.severity || '').toLowerCase();
|
||||
const severityInfo = severityConfig[normalizedSeverity] || { emoji: '⚪', text: details.severity || 'Unknown' };
|
||||
|
||||
// Create a formatted severity indicator
|
||||
const severityDisplay = `${severityInfo.emoji} *${severityInfo.text}*`;
|
||||
|
||||
logger.debug(`Rule severity: ${normalizedSeverity} (${severityDisplay})`);
|
||||
|
||||
/**
|
||||
* Format tags with MITRE links where applicable
|
||||
*
|
||||
* @param {Array} tags - Array of tag strings to format
|
||||
* @returns {Array} Array of formatted tags with links where appropriate
|
||||
*/
|
||||
const formatTags = (tags = []) => {
|
||||
if (!tags || tags.length === 0 || (tags.length === 2 && tags.includes('error'))) {
|
||||
logger.debug('No valid tags to format');
|
||||
return [];
|
||||
}
|
||||
|
||||
logger.debug(`Formatting ${tags.length} tags`);
|
||||
|
||||
return tags.map(tag => {
|
||||
let formattedTag = tag.trim();
|
||||
let url = '';
|
||||
let displayText = formattedTag;
|
||||
|
||||
// Handle MITRE ATT&CK Technique IDs
|
||||
if (/^T\d{4}(\.\d{3})?$/.test(formattedTag)) {
|
||||
// Technique ID (e.g., T1234 or T1234.001)
|
||||
url = `https://attack.mitre.org/techniques/${formattedTag}/`;
|
||||
logger.debug(`Formatted MITRE technique: ${formattedTag}`);
|
||||
}
|
||||
// Handle MITRE ATT&CK Tactic IDs
|
||||
else if (/^TA\d{4}$/.test(formattedTag)) {
|
||||
// Tactic ID (e.g., TA0001)
|
||||
url = `https://attack.mitre.org/tactics/${formattedTag}/`;
|
||||
logger.debug(`Formatted MITRE tactic: ${formattedTag}`);
|
||||
}
|
||||
// Handle CWE IDs
|
||||
else if (/^S\d{4}$/.test(formattedTag)) {
|
||||
// CWE ID
|
||||
url = `https://cwe.mitre.org/data/definitions/${formattedTag.substring(1)}.html`;
|
||||
logger.debug(`Formatted CWE: ${formattedTag}`);
|
||||
}
|
||||
// Handle attack.* tactics
|
||||
else if (formattedTag.startsWith('attack.')) {
|
||||
const tacticName = formattedTag.substring(7); // Remove 'attack.' prefix
|
||||
|
||||
// Handle specific techniques with T#### format
|
||||
if (/^t\d{4}(\.\d{3})?$/.test(tacticName)) {
|
||||
const techniqueId = tacticName.toUpperCase();
|
||||
url = `https://attack.mitre.org/techniques/${techniqueId}/`;
|
||||
displayText = techniqueId;
|
||||
logger.debug(`Formatted MITRE technique from attack. format: ${techniqueId}`);
|
||||
}
|
||||
// Handle regular tactics
|
||||
else {
|
||||
// Map common tactics to their MITRE ATT&CK IDs
|
||||
const tacticMappings = {
|
||||
'reconnaissance': 'TA0043',
|
||||
'resourcedevelopment': 'TA0042',
|
||||
'initialaccess': 'TA0001',
|
||||
'execution': 'TA0002',
|
||||
'persistence': 'TA0003',
|
||||
'privilegeescalation': 'TA0004',
|
||||
'defenseevasion': 'TA0005',
|
||||
'credentialaccess': 'TA0006',
|
||||
'discovery': 'TA0007',
|
||||
'lateralmovement': 'TA0008',
|
||||
'collection': 'TA0009',
|
||||
'command-and-control': 'TA0011',
|
||||
'exfiltration': 'TA0010',
|
||||
'impact': 'TA0040'
|
||||
};
|
||||
|
||||
// Remove hyphens and convert to lowercase for matching
|
||||
const normalizedTactic = tacticName.toLowerCase().replace(/-/g, '');
|
||||
|
||||
if (tacticMappings[normalizedTactic]) {
|
||||
url = `https://attack.mitre.org/tactics/${tacticMappings[normalizedTactic]}/`;
|
||||
logger.debug(`Mapped tactic ${tacticName} to ${tacticMappings[normalizedTactic]}`);
|
||||
} else {
|
||||
// If we don't have a specific mapping, try a search
|
||||
url = `https://attack.mitre.org/search/?q=${encodeURIComponent(tacticName)}`;
|
||||
logger.debug(`Created search URL for unmapped tactic: ${tacticName}`);
|
||||
}
|
||||
|
||||
// Format the display text with proper capitalization
|
||||
displayText = tacticName.replace(/-/g, ' ')
|
||||
.split(' ')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
}
|
||||
// Handle CVE IDs
|
||||
else if (/^CVE-\d{4}-\d{4,}$/i.test(formattedTag)) {
|
||||
url = `https://nvd.nist.gov/vuln/detail/${formattedTag.toUpperCase()}`;
|
||||
displayText = formattedTag.toUpperCase();
|
||||
logger.debug(`Formatted CVE: ${displayText}`);
|
||||
}
|
||||
|
||||
if (url) {
|
||||
return `<${url}|${displayText}>`;
|
||||
} else {
|
||||
return displayText;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Define header based on title - check if it contains error messages
|
||||
const isErrorMessage = details.title.toLowerCase().includes('error') ||
|
||||
details.title.toLowerCase().includes('missing');
|
||||
|
||||
if (isErrorMessage) {
|
||||
logger.warn(`Rule appears to have errors: ${details.title}`);
|
||||
}
|
||||
|
||||
// Start with header block
|
||||
const blocks = [
|
||||
{
|
||||
type: 'header',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: details.title || 'Rule Explanation',
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'context',
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `ID: ${details.id || 'Unknown'}`
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
fields: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Severity:* ${severityDisplay}`
|
||||
},
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Author:* ${details.author || 'Unknown'}`
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
// Add divider for visual separation
|
||||
blocks.push({ type: 'divider' });
|
||||
|
||||
// Add description section
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Description:*\n${details.description || 'No description available'}`
|
||||
}
|
||||
});
|
||||
|
||||
// Detection explanation section - only add if not an error case or has useful detection info
|
||||
if (!isErrorMessage || details.detectionExplanation !== 'Content missing') {
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*What This Rule Detects:*\n${details.detectionExplanation || 'No detection information available'}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// False positives section - only add if not an error case with N/A values
|
||||
if (details.falsePositives && !details.falsePositives.includes('N/A - Content missing')) {
|
||||
const fpItems = Array.isArray(details.falsePositives)
|
||||
? details.falsePositives.map(item => `• ${item}`).join('\n')
|
||||
: `• ${details.falsePositives}`;
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Possible False Positives:*\n${fpItems || 'None specified'}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add tags if they exist and are formatted
|
||||
const formattedTags = formatTags(details.tags);
|
||||
if (formattedTags.length > 0) {
|
||||
logger.debug(`Added ${formattedTags.length} formatted tags to the block`);
|
||||
blocks.push({
|
||||
type: 'context',
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Tags:* ${formattedTags.join(' | ')}`
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// If this is an error message, add a troubleshooting section
|
||||
if (isErrorMessage) {
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: ':warning: *Troubleshooting:*\nThis rule appears to have issues in the database. You may want to check the rule import process or run a database maintenance task to fix this issue.'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add action buttons for better interactivity
|
||||
blocks.push({
|
||||
type: 'actions',
|
||||
elements: [
|
||||
{
|
||||
type: 'button',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: 'View YAML',
|
||||
emoji: true
|
||||
},
|
||||
action_id: 'view_yaml',
|
||||
value: `view_yaml_${details.id}`
|
||||
},
|
||||
{
|
||||
type: 'button',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: 'Convert to SIEM Rule',
|
||||
emoji: true
|
||||
},
|
||||
action_id: 'convert_rule_to_siem',
|
||||
value: `convert_rule_to_siem_${details.id}`
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
// Add a divider at the end
|
||||
blocks.push({
|
||||
type: 'divider'
|
||||
});
|
||||
|
||||
logger.debug(`Created ${blocks.length} blocks for rule explanation`);
|
||||
return blocks;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getRuleExplanationBlocks
|
||||
};
|
206
src/blocks/sigma_search_results_block.js
Normal file
206
src/blocks/sigma_search_results_block.js
Normal file
|
@ -0,0 +1,206 @@
|
|||
/**
|
||||
* sigma_search_results_block.js
|
||||
*
|
||||
* Generates Slack Block Kit blocks for displaying Sigma rule search results
|
||||
* Includes pagination controls for navigating large result sets
|
||||
*
|
||||
* @author Fylgja Development Team
|
||||
*/
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Generate blocks for Slack UI to display search results with pagination
|
||||
*
|
||||
* @param {string} keyword - The search keyword used for the query
|
||||
* @param {Array} results - Array of rule results from the search
|
||||
* @param {Object} pagination - Pagination information object
|
||||
* @returns {Array} - Slack blocks for displaying results
|
||||
*/
|
||||
const getSearchResultBlocks = (keyword, results, pagination = {}) => {
|
||||
|
||||
logger.debug(`${FILE_NAME}: Creating search result blocks for keyword: "${keyword}"`);
|
||||
|
||||
// Add debug for input validation
|
||||
logger.debug(`${FILE_NAME}: Results type: ${typeof results}, isArray: ${Array.isArray(results)}, length: ${Array.isArray(results) ? results.length : 'N/A'}`);
|
||||
logger.debug(`${FILE_NAME}: Pagination: ${JSON.stringify(pagination)}`);
|
||||
|
||||
// Ensure results is always an array
|
||||
const safeResults = Array.isArray(results) ? results : [];
|
||||
|
||||
// Default pagination values if not provided
|
||||
const pagingInfo = {
|
||||
currentPage: pagination.currentPage || 1,
|
||||
pageSize: pagination.pageSize || 10,
|
||||
totalPages: pagination.totalPages || 0,
|
||||
totalResults: pagination.totalResults || 0,
|
||||
hasMore: pagination.hasMore || false
|
||||
};
|
||||
|
||||
logger.debug(`${FILE_NAME}: Processing ${safeResults.length} search results (page ${pagingInfo.currentPage} of ${pagingInfo.totalPages}, total: ${pagingInfo.totalResults})`);
|
||||
|
||||
// Initialize with header block that includes pagination info
|
||||
const blocks = [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": `*Search Results for "${keyword}"*\n${
|
||||
pagingInfo.totalResults > 0
|
||||
? `Showing ${safeResults.length} of ${pagingInfo.totalResults} matching rules (page ${pagingInfo.currentPage} of ${pagingInfo.totalPages})`
|
||||
: `Found ${safeResults.length} matching rules:`
|
||||
}`
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
// Debug log as we build blocks
|
||||
logger.debug(`${FILE_NAME}: Added header block`);
|
||||
|
||||
// Add blocks for each result if we have any
|
||||
if (safeResults.length === 0) {
|
||||
logger.debug(`${FILE_NAME}: No search results to display`);
|
||||
blocks.push({
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": pagingInfo.totalResults > 0
|
||||
? "No rules on this page. Try a different page."
|
||||
: "No matching rules found."
|
||||
}
|
||||
});
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Creating blocks for ${safeResults.length} search results`);
|
||||
safeResults.forEach((rule, index) => {
|
||||
// Ensure rule is an object with expected properties
|
||||
const safeRule = rule || {};
|
||||
const ruleId = safeRule.id || 'unknown';
|
||||
logger.debug(`${FILE_NAME}: Adding result #${index + 1}: ${ruleId} - ${safeRule.title || 'Untitled'}`);
|
||||
|
||||
// Combine rule information and action button into a single line
|
||||
blocks.push({
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": `*${safeRule.title || 'Untitled Rule'}*\nID: \`${ruleId}\``
|
||||
},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Details",
|
||||
"emoji": true
|
||||
},
|
||||
"value": ruleId,
|
||||
"action_id": "view_rule_details"
|
||||
}
|
||||
});
|
||||
|
||||
// Add a divider between results (except after the last one)
|
||||
if (index < safeResults.length - 1) {
|
||||
blocks.push({
|
||||
"type": "divider"
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Debug log for pagination controls
|
||||
logger.debug(`${FILE_NAME}: Checking if pagination controls needed (totalPages: ${pagingInfo.totalPages})`);
|
||||
|
||||
// Add pagination navigation if there are multiple pages
|
||||
if (pagingInfo.totalPages > 1) {
|
||||
// Add a divider before pagination controls
|
||||
blocks.push({
|
||||
"type": "divider"
|
||||
});
|
||||
|
||||
// Create pagination navigation buttons
|
||||
const paginationButtons = [];
|
||||
|
||||
// Previous page button (if not on first page)
|
||||
if (pagingInfo.currentPage > 1) {
|
||||
paginationButtons.push({
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Previous",
|
||||
"emoji": true
|
||||
},
|
||||
"value": JSON.stringify({
|
||||
keyword,
|
||||
page: pagingInfo.currentPage - 1,
|
||||
pageSize: pagingInfo.pageSize
|
||||
}),
|
||||
"action_id": "search_prev_page"
|
||||
});
|
||||
logger.debug(`${FILE_NAME}: Added Previous page button for page ${pagingInfo.currentPage - 1}`);
|
||||
}
|
||||
|
||||
// Next page button (if there are more pages)
|
||||
if (pagingInfo.hasMore) {
|
||||
paginationButtons.push({
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Next",
|
||||
"emoji": true
|
||||
},
|
||||
"value": JSON.stringify({
|
||||
keyword,
|
||||
page: pagingInfo.currentPage + 1,
|
||||
pageSize: pagingInfo.pageSize
|
||||
}),
|
||||
"action_id": "search_next_page"
|
||||
});
|
||||
logger.debug(`${FILE_NAME}: Added Next page button for page ${pagingInfo.currentPage + 1}`);
|
||||
}
|
||||
|
||||
// Add the pagination buttons block if we have buttons to show
|
||||
if (paginationButtons.length > 0) {
|
||||
blocks.push({
|
||||
"type": "actions",
|
||||
"elements": paginationButtons
|
||||
});
|
||||
logger.debug(`${FILE_NAME}: Added ${paginationButtons.length} pagination buttons`);
|
||||
}
|
||||
|
||||
// Add page indicator text
|
||||
blocks.push({
|
||||
"type": "context",
|
||||
"elements": [
|
||||
{
|
||||
"type": "plain_text",
|
||||
"text": `Page ${pagingInfo.currentPage} of ${pagingInfo.totalPages}`,
|
||||
"emoji": true
|
||||
}
|
||||
]
|
||||
});
|
||||
logger.debug(`${FILE_NAME}: Added page indicator text`);
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Created ${blocks.length} blocks for search results`);
|
||||
|
||||
// Final validation of blocks array
|
||||
if (!Array.isArray(blocks) || blocks.length === 0) {
|
||||
logger.error(`${FILE_NAME}: Generated blocks is not a valid array or is empty`);
|
||||
// Return a minimal valid blocks array
|
||||
return [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": `Search Results for "${keyword}": Unable to generate proper blocks. Please try again.`
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
return blocks;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getSearchResultBlocks
|
||||
};
|
269
src/blocks/sigma_stats_block.js
Normal file
269
src/blocks/sigma_stats_block.js
Normal file
|
@ -0,0 +1,269 @@
|
|||
/**
|
||||
* sigma_stats_block.js
|
||||
*
|
||||
* Creates Slack Block Kit blocks for displaying Sigma rule database statistics
|
||||
*/
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Create Slack block kit blocks for statistics display
|
||||
*
|
||||
* @param {Object} stats - The statistics object with all statistical data
|
||||
* @returns {Array} Formatted Slack blocks ready for display
|
||||
*/
|
||||
function getStatsBlocks(stats) {
|
||||
logger.debug(`${FILE_NAME}: Creating statistics display blocks`);
|
||||
|
||||
if (!stats) {
|
||||
logger.error(`${FILE_NAME}: Failed to create statistics blocks: No stats object provided`);
|
||||
return [
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: 'Error: No statistics data provided'
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
// Format the date for display
|
||||
const formatDate = (dateString) => {
|
||||
if (!dateString) return 'Unknown';
|
||||
|
||||
try {
|
||||
const date = new Date(dateString);
|
||||
return date.toLocaleString();
|
||||
} catch (error) {
|
||||
return dateString;
|
||||
}
|
||||
};
|
||||
|
||||
// Start with header block
|
||||
const blocks = [
|
||||
{
|
||||
type: 'header',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: 'Sigma Rule Database Statistics',
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'context',
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `Last updated: ${formatDate(stats.lastUpdate)}`
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
// Add divider for visual separation
|
||||
blocks.push({ type: 'divider' });
|
||||
|
||||
// Overall statistics section
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '*Overall Statistics*'
|
||||
}
|
||||
});
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
fields: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Total Rules:* ${stats.totalRules.toLocaleString()}`
|
||||
},
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Database Health:* ${stats.databaseHealth.contentPercentage}% Complete`
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
// Add divider for visual separation
|
||||
blocks.push({ type: 'divider' });
|
||||
|
||||
// Operating system breakdown
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '*Rules by Operating System*'
|
||||
}
|
||||
});
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
fields: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Windows:* ${stats.operatingSystems.windows.toLocaleString()} rules`
|
||||
},
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Linux:* ${stats.operatingSystems.linux.toLocaleString()} rules`
|
||||
},
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*macOS:* ${stats.operatingSystems.macos.toLocaleString()} rules`
|
||||
},
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: `*Other/Unknown:* ${stats.operatingSystems.other.toLocaleString()} rules`
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
// Add divider for visual separation
|
||||
blocks.push({ type: 'divider' });
|
||||
|
||||
// Severity breakdown
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '*Rules by Severity Level*'
|
||||
}
|
||||
});
|
||||
|
||||
// Create a colorful representation of severity levels
|
||||
const severityEmoji = {
|
||||
'critical': '🔴',
|
||||
'high': '🟠',
|
||||
'medium': '🟡',
|
||||
'low': '🟢',
|
||||
'informational': '🔵'
|
||||
};
|
||||
|
||||
let severityFields = [];
|
||||
stats.severityLevels.forEach(level => {
|
||||
const emoji = severityEmoji[level.level?.toLowerCase()] || '⚪';
|
||||
severityFields.push({
|
||||
type: 'mrkdwn',
|
||||
text: `*${emoji} ${level.level ? (level.level.charAt(0).toUpperCase() + level.level.slice(1)) : 'Unknown'}:* ${level.count.toLocaleString()} rules`
|
||||
});
|
||||
});
|
||||
|
||||
// Ensure we have an even number of fields for layout
|
||||
if (severityFields.length % 2 !== 0) {
|
||||
severityFields.push({
|
||||
type: 'mrkdwn',
|
||||
text: ' ' // Empty space to balance fields
|
||||
});
|
||||
}
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
fields: severityFields
|
||||
});
|
||||
|
||||
// Add divider for visual separation
|
||||
blocks.push({ type: 'divider' });
|
||||
|
||||
// Top MITRE ATT&CK tactics
|
||||
if (stats.mitreTactics && stats.mitreTactics.length > 0) {
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '*Top MITRE ATT&CK Tactics*'
|
||||
}
|
||||
});
|
||||
|
||||
const mitreFields = stats.mitreTactics.map(tactic => {
|
||||
// Format tactic name for better readability
|
||||
const formattedTactic = tactic.tactic
|
||||
.replace(/-/g, ' ')
|
||||
.split(' ')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
|
||||
return {
|
||||
type: 'mrkdwn',
|
||||
text: `*${formattedTactic}:* ${tactic.count.toLocaleString()} rules`
|
||||
};
|
||||
});
|
||||
|
||||
// Split into multiple sections if needed for layout
|
||||
for (let i = 0; i < mitreFields.length; i += 2) {
|
||||
const sectionFields = mitreFields.slice(i, Math.min(i + 2, mitreFields.length));
|
||||
|
||||
// If we have an odd number at the end, add an empty field
|
||||
if (sectionFields.length === 1) {
|
||||
sectionFields.push({
|
||||
type: 'mrkdwn',
|
||||
text: ' ' // Empty space to balance fields
|
||||
});
|
||||
}
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
fields: sectionFields
|
||||
});
|
||||
}
|
||||
|
||||
blocks.push({ type: 'divider' });
|
||||
}
|
||||
|
||||
// Top authors
|
||||
if (stats.topAuthors && stats.topAuthors.length > 0) {
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '*Top Rule Authors*'
|
||||
}
|
||||
});
|
||||
|
||||
const authorFields = stats.topAuthors.map(author => ({
|
||||
type: 'mrkdwn',
|
||||
text: `*${author.name || 'Unknown'}:* ${author.count.toLocaleString()} rules`
|
||||
}));
|
||||
|
||||
// Split into multiple sections if needed for layout
|
||||
for (let i = 0; i < authorFields.length; i += 2) {
|
||||
const sectionFields = authorFields.slice(i, Math.min(i + 2, authorFields.length));
|
||||
|
||||
// If we have an odd number at the end, add an empty field
|
||||
if (sectionFields.length === 1) {
|
||||
sectionFields.push({
|
||||
type: 'mrkdwn',
|
||||
text: ' ' // Empty space to balance fields
|
||||
});
|
||||
}
|
||||
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
fields: sectionFields
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add a footer
|
||||
blocks.push({ type: 'divider' });
|
||||
blocks.push({
|
||||
type: 'context',
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: 'Use `/sigma-search [keyword]` to search for specific rules and `/sigma-details [id]` to get detailed information about a rule.'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: Created ${blocks.length} blocks for statistics display`);
|
||||
return blocks;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStatsBlocks
|
||||
};
|
83
src/blocks/sigma_view_yaml_block.js
Normal file
83
src/blocks/sigma_view_yaml_block.js
Normal file
|
@ -0,0 +1,83 @@
|
|||
/**
|
||||
* sigma_view_yaml_block.js
|
||||
*
|
||||
* Creates Slack Block Kit blocks for displaying Sigma rule YAML content
|
||||
*
|
||||
*/
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Create Slack block kit blocks for displaying YAML content
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule
|
||||
* @param {string} yamlContent - The YAML content to display
|
||||
* @returns {Array} Formatted Slack blocks ready for display
|
||||
*/
|
||||
function getYamlViewBlocks(ruleId, yamlContent) {
|
||||
logger.debug(`${FILE_NAME}: Creating YAML view blocks for rule: ${ruleId || 'unknown'}`);
|
||||
|
||||
if (!yamlContent) {
|
||||
logger.warn(`${FILE_NAME}: Empty YAML content for rule: ${ruleId}`);
|
||||
return [
|
||||
{
|
||||
type: 'header',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: `YAML for Rule: ${ruleId}`,
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '_No YAML content available_'
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
const blocks = [
|
||||
{
|
||||
type: 'header',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: `YAML for Rule: ${ruleId}`,
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: '```\n' + yamlContent + '```'
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
blocks.push({
|
||||
type: 'actions',
|
||||
elements: [
|
||||
{
|
||||
type: 'button',
|
||||
text: {
|
||||
type: 'plain_text',
|
||||
text: 'Convert to SIEM Rule',
|
||||
emoji: true
|
||||
},
|
||||
action_id: 'convert_rule_to_siem',
|
||||
value: `convert_rule_to_siem_${ruleId}`
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: Created ${blocks.length} blocks for YAML view`);
|
||||
return blocks;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getYamlViewBlocks
|
||||
};
|
83
src/config/appConfig.js
Normal file
83
src/config/appConfig.js
Normal file
|
@ -0,0 +1,83 @@
|
|||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const yaml = require('js-yaml');
|
||||
|
||||
// Load YAML configuration file
|
||||
let yamlConfig = {};
|
||||
try {
|
||||
const configPath = path.join(__dirname, '..', '..', 'fylgja.yml');
|
||||
const fileContents = fs.readFileSync(configPath, 'utf8');
|
||||
yamlConfig = yaml.load(fileContents);
|
||||
console.log('Successfully loaded fylgja.yml configuration');
|
||||
} catch (error) {
|
||||
console.error(`Error loading fylgja.yml: ${error.message}`);
|
||||
console.log('Using default configuration values');
|
||||
// Default values will be used if file cannot be loaded
|
||||
}
|
||||
|
||||
// Base directory for resolving relative paths from config
|
||||
const baseDir = path.join(__dirname, '..', '..');
|
||||
|
||||
// Configuration paths
|
||||
module.exports = {
|
||||
|
||||
SLACK_CONFIG: {
|
||||
botToken: yamlConfig?.slack?.bot_token || process.env.SLACK_BOT_TOKEN,
|
||||
signingSecret: yamlConfig?.slack?.signing_secret || process.env.SLACK_SIGNING_SECRET
|
||||
},
|
||||
|
||||
// Server configuration from YAML (with fallback to env vars)
|
||||
SERVER_CONFIG: {
|
||||
port: parseInt(yamlConfig?.server?.port || process.env.PORT || 3000)
|
||||
},
|
||||
|
||||
// Path configurations
|
||||
SIGMA_REPO_DIR: yamlConfig?.paths?.sigma_repo_dir
|
||||
? path.resolve(baseDir, yamlConfig.paths.sigma_repo_dir)
|
||||
: path.join(baseDir, 'sigma-repo'),
|
||||
|
||||
DB_PATH: yamlConfig?.paths?.db_path
|
||||
? path.resolve(baseDir, yamlConfig.paths.db_path)
|
||||
: path.resolve(baseDir, 'sigma.db'),
|
||||
|
||||
// Load SIGMA_CLI_PATH from YAML, env, or use default path
|
||||
SIGMA_CLI_PATH: yamlConfig?.sigma?.['sigma-cli']?.path
|
||||
? path.resolve(baseDir, yamlConfig.sigma['sigma-cli'].path)
|
||||
: path.join(process.env.VIRTUAL_ENV || './.venv', 'bin', 'sigma'),
|
||||
|
||||
// Sigma CLI configuration from YAML
|
||||
SIGMA_CLI_CONFIG: {
|
||||
backend: yamlConfig?.sigma?.['sigma-cli']?.backend || "lucene",
|
||||
target: yamlConfig?.sigma?.['sigma-cli']?.target || "ecs_windows",
|
||||
format: yamlConfig?.sigma?.['sigma-cli']?.format || "siem_rule_ndjson"
|
||||
},
|
||||
|
||||
// Sigma Repository configuration from YAML
|
||||
SIGMA_REPO_CONFIG: {
|
||||
url: yamlConfig?.sigma?.repo?.url || "https://github.com/SigmaHQ/sigma.git",
|
||||
branch: yamlConfig?.sigma?.repo?.branch || "main"
|
||||
},
|
||||
|
||||
// Elasticsearch configuration from YAML
|
||||
ELASTICSEARCH_CONFIG: {
|
||||
apiEndpoint: yamlConfig?.elastic?.['api-endpoint'] ||
|
||||
"http://localhost:5601/api/detection_engine/rules",
|
||||
credentials: yamlConfig?.elastic?.['elastic-authentication-credentials'] ||
|
||||
"elastic:changeme"
|
||||
},
|
||||
|
||||
// Logging configuration from YAML
|
||||
LOGGING_CONFIG: {
|
||||
level: yamlConfig?.logging?.level || "info",
|
||||
file: yamlConfig?.logging?.file || "./logs/fylgja.log"
|
||||
},
|
||||
|
||||
// Default configuration (fallback)
|
||||
DEFAULT_CONFIG: {
|
||||
siem: 'elasticsearch',
|
||||
lang: 'lucene',
|
||||
output: 'ndjson',
|
||||
repoUrl: yamlConfig?.sigma?.repo?.url || "https://github.com/SigmaHQ/sigma.git",
|
||||
repoBranch: yamlConfig?.sigma?.repo?.branch || "main"
|
||||
}
|
||||
};
|
0
src/handlers/alerts/alerts_handler.js
Normal file
0
src/handlers/alerts/alerts_handler.js
Normal file
0
src/handlers/case/case_handler.js
Normal file
0
src/handlers/case/case_handler.js
Normal file
88
src/handlers/config/config_handler.js
Normal file
88
src/handlers/config/config_handler.js
Normal file
|
@ -0,0 +1,88 @@
|
|||
//
|
||||
// config_handler.js
|
||||
// handle the /sigma-config command
|
||||
//
|
||||
const util = require('util');
|
||||
const { exec } = require('child_process');
|
||||
const { SIGMA_CLI_PATH } = require('../../config/constants');
|
||||
const { loadConfig, updateConfig } = require('../../config/config-manager');
|
||||
const { updateSigmaDatabase } = require('../../services/sigma/sigma_repository_service');
|
||||
const logger = require('../../utils/logger');
|
||||
|
||||
// Promisify exec for async/await usage
|
||||
const execPromise = util.promisify(exec);
|
||||
|
||||
module.exports = (app) => {
|
||||
app.command('/sigma-config', async ({ command, ack, respond }) => {
|
||||
await ack();
|
||||
logger.info(`Sigma config command received: ${command.text}`);
|
||||
|
||||
const args = command.text.split(' ');
|
||||
|
||||
if (args.length === 0 || args[0] === '') {
|
||||
// Display current configuration
|
||||
const config = loadConfig();
|
||||
logger.info('Displaying current configuration');
|
||||
await respond(`Current configuration:\nSIEM: ${config.siem}\nLanguage: ${config.lang}\nOutput: ${config.output}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const configType = args[0];
|
||||
|
||||
if (configType === 'update') {
|
||||
logger.info('Starting database update from command');
|
||||
try {
|
||||
await respond('Updating Sigma database... This may take a moment.');
|
||||
await updateSigmaDatabase();
|
||||
logger.info('Database update completed from command');
|
||||
await respond('Sigma database updated successfully');
|
||||
} catch (error) {
|
||||
logger.error(`Database update failed: ${error.message}`);
|
||||
await respond(`Error updating Sigma database: ${error.message}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (args.length < 2) {
|
||||
logger.warn(`Invalid config command format: ${command.text}`);
|
||||
await respond(`Invalid command format. Usage: /sigma-config ${configType} [value]`);
|
||||
return;
|
||||
}
|
||||
|
||||
const configValue = args[1];
|
||||
const config = loadConfig();
|
||||
|
||||
if (configType === 'siem') {
|
||||
// Verify the SIEM backend is installed
|
||||
logger.info(`Attempting to change SIEM to: ${configValue}`);
|
||||
try {
|
||||
await execPromise(`${SIGMA_CLI_PATH} list targets | grep ${configValue}`);
|
||||
updateConfig('siem', configValue);
|
||||
logger.info(`SIEM configuration updated to: ${configValue}`);
|
||||
await respond(`SIEM configuration updated to: ${configValue}`);
|
||||
} catch (error) {
|
||||
logger.error(`SIEM backend '${configValue}' not found or not installed`);
|
||||
await respond(`Error: SIEM backend '${configValue}' not found or not installed. Please install it with: sigma plugin install ${configValue}`);
|
||||
}
|
||||
} else if (configType === 'lang') {
|
||||
logger.info(`Changing language to: ${configValue}`);
|
||||
updateConfig('lang', configValue);
|
||||
await respond(`Language configuration updated to: ${configValue}`);
|
||||
} else if (configType === 'output') {
|
||||
// Check if output format is supported by the current backend
|
||||
logger.info(`Attempting to change output format to: ${configValue}`);
|
||||
try {
|
||||
await execPromise(`${SIGMA_CLI_PATH} list formats ${config.siem} | grep ${configValue}`);
|
||||
updateConfig('output', configValue);
|
||||
logger.info(`Output configuration updated to: ${configValue}`);
|
||||
await respond(`Output configuration updated to: ${configValue}`);
|
||||
} catch (error) {
|
||||
logger.error(`Output format '${configValue}' not supported by SIEM backend '${config.siem}'`);
|
||||
await respond(`Error: Output format '${configValue}' not supported by SIEM backend '${config.siem}'. Run 'sigma list formats ${config.siem}' to see available formats.`);
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Unknown configuration type: ${configType}`);
|
||||
await respond(`Unknown configuration type: ${configType}. Available types: siem, lang, output, update`);
|
||||
}
|
||||
});
|
||||
};
|
552
src/handlers/sigma/sigma_action_handlers.js
Normal file
552
src/handlers/sigma/sigma_action_handlers.js
Normal file
|
@ -0,0 +1,552 @@
|
|||
/**
|
||||
* sigma_action_handlers.js
|
||||
*
|
||||
* Centralized action handlers for Sigma-related Slack interactions
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { explainSigmaRule, getSigmaRuleYaml } = require('../../services/sigma/sigma_details_service');
|
||||
const { convertRuleToBackend } = require('../../services/sigma/sigma_backend_converter');
|
||||
const { searchSigmaRules } = require('../../services/sigma/sigma_search_service');
|
||||
const { getYamlViewBlocks } = require('../../blocks/sigma_view_yaml_block');
|
||||
const { getSearchResultBlocks } = require('../../blocks/sigma_search_results_block');
|
||||
const { getConversionResultBlocks } = require('../../blocks/sigma_conversion_block');
|
||||
const { getRuleExplanationBlocks } = require('../../blocks/sigma_details_block');
|
||||
const { sendRuleToSiem } = require('../../services/elastic/elastic_api_service');
|
||||
|
||||
const { SIGMA_CLI_CONFIG, ELASTICSEARCH_CONFIG } = require('../../config/appConfig');
|
||||
|
||||
const FILE_NAME = 'sigma_action_handlers.js';
|
||||
|
||||
/**
|
||||
* Process and display details for a Sigma rule
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to get details for
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
* @param {boolean} replaceOriginal - Whether to replace the original message
|
||||
* @param {string} responseType - Response type (ephemeral or in_channel)
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const processRuleDetails = async (ruleId, respond, replaceOriginal = false, responseType = 'in_channel') => {
|
||||
try {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Missing rule ID in processRuleDetails`);
|
||||
await respond({
|
||||
text: 'Error: Missing rule ID for details',
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Processing details for sigma rule: ${ruleId}`);
|
||||
|
||||
// Get Sigma rule details
|
||||
logger.info(`${FILE_NAME}: Calling explainSigmaRule with ID: '${ruleId}'`);
|
||||
const result = await explainSigmaRule(ruleId);
|
||||
|
||||
if (!result.success) {
|
||||
logger.error(`${FILE_NAME}: Rule details retrieval failed: ${result.message}`);
|
||||
await respond({
|
||||
text: `Error: ${result.message}`,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!result.explanation) {
|
||||
logger.error(`${FILE_NAME}: Rule details succeeded but no explanation object was returned`);
|
||||
await respond({
|
||||
text: 'Error: Generated details were empty',
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Rule ${ruleId} details retrieved successfully`);
|
||||
|
||||
// Generate blocks
|
||||
let blocks;
|
||||
try {
|
||||
blocks = getRuleExplanationBlocks(result.explanation);
|
||||
} catch (blockError) {
|
||||
await handleError(blockError, `${FILE_NAME}: Block generation`, respond, {
|
||||
replaceOriginal: replaceOriginal,
|
||||
responseType: responseType,
|
||||
customMessage: `Rule ${result.explanation.id}: ${result.explanation.title}\n${result.explanation.description}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Respond with the details
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Process rule details`, respond, {
|
||||
replaceOriginal: replaceOriginal,
|
||||
responseType: responseType
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Process and convert a Sigma rule to the target backend format
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to convert
|
||||
* @param {Object} config - Configuration for the conversion (backend, target, format)
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
* @param {boolean} replaceOriginal - Whether to replace the original message
|
||||
* @param {string} responseType - Response type (ephemeral or in_channel)
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const processRuleConversion = async (ruleId, config, respond, replaceOriginal = false, responseType = 'in_channel') => {
|
||||
try {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Missing rule ID in processRuleConversion`);
|
||||
await respond({
|
||||
text: 'Error: Missing rule ID for conversion',
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Processing conversion for sigma rule: ${ruleId}`);
|
||||
|
||||
// Set default configuration from YAML config if not provided
|
||||
const conversionConfig = config || {
|
||||
backend: SIGMA_CLI_CONFIG.backend,
|
||||
target: SIGMA_CLI_CONFIG.target,
|
||||
format: SIGMA_CLI_CONFIG.format
|
||||
};
|
||||
|
||||
await respond({
|
||||
text: `Converting rule ${ruleId} using ${conversionConfig.backend}/${conversionConfig.target} to ${conversionConfig.format}...`,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Get the rule and convert it
|
||||
const conversionResult = await convertRuleToBackend(ruleId, conversionConfig);
|
||||
|
||||
if (!conversionResult.success) {
|
||||
logger.error(`${FILE_NAME}: Rule conversion failed: ${conversionResult.message}`);
|
||||
await respond({
|
||||
text: `Error: ${conversionResult.message}`,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate blocks for displaying the result
|
||||
let blocks;
|
||||
try {
|
||||
blocks = getConversionResultBlocks(conversionResult);
|
||||
} catch (blockError) {
|
||||
await handleError(blockError, `${FILE_NAME}: Block generation`, respond, {
|
||||
replaceOriginal: replaceOriginal,
|
||||
responseType: responseType,
|
||||
customMessage: `Rule ${ruleId} converted successfully. Use the following output with your SIEM:\n\`\`\`\n${conversionResult.output}\n\`\`\``
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Respond with the conversion result
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Process rule conversion`, respond, {
|
||||
replaceOriginal: replaceOriginal,
|
||||
responseType: responseType
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle pagination actions (Previous, Next)
|
||||
*
|
||||
* @param {Object} body - The action payload body
|
||||
* @param {Function} ack - Function to acknowledge the action
|
||||
* @param {Function} respond - Function to send response
|
||||
*/
|
||||
const handlePaginationAction = async (body, ack, respond) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.debug(`${FILE_NAME}: Pagination action received: ${JSON.stringify(body.actions)}`);
|
||||
|
||||
if (!body || !body.actions || !body.actions[0] || !body.actions[0].value) {
|
||||
logger.error(`${FILE_NAME}: Invalid pagination action payload: missing parameters`);
|
||||
await respond({
|
||||
text: 'Error: Could not process pagination request',
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the action value which contains our pagination parameters
|
||||
const action = body.actions[0];
|
||||
let valueData;
|
||||
|
||||
try {
|
||||
valueData = JSON.parse(action.value);
|
||||
} catch (parseError) {
|
||||
await handleError(parseError, `${FILE_NAME}: Pagination value parsing`, respond, {
|
||||
replaceOriginal: false,
|
||||
customMessage: 'Error: Invalid pagination parameters'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { keyword, page, pageSize } = valueData;
|
||||
|
||||
if (!keyword) {
|
||||
logger.warn(`${FILE_NAME}: Missing keyword in pagination action`);
|
||||
await respond({
|
||||
text: 'Error: Missing search keyword in pagination request',
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Processing pagination request for "${keyword}" (page ${page}, size ${pageSize})`);
|
||||
|
||||
// Perform the search with the new pagination parameters
|
||||
const searchResult = await searchSigmaRules(keyword, page, pageSize);
|
||||
|
||||
if (!searchResult.success) {
|
||||
logger.error(`${FILE_NAME}: Search failed during pagination: ${searchResult.message}`);
|
||||
await respond({
|
||||
text: `Error: ${searchResult.message}`,
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate the updated blocks for the search results
|
||||
let blocks;
|
||||
try {
|
||||
blocks = getSearchResultBlocks(
|
||||
keyword,
|
||||
searchResult.results,
|
||||
searchResult.pagination
|
||||
);
|
||||
} catch (blockError) {
|
||||
await handleError(blockError, `${FILE_NAME}: Pagination block generation`, respond, {
|
||||
replaceOriginal: false,
|
||||
customMessage: `Error generating results view: ${blockError.message}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Return the response that will update the original message
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
replace_original: true
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Pagination action handler`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Register all Sigma-related action handlers
|
||||
*
|
||||
* @param {Object} app - The Slack app instance
|
||||
*/
|
||||
const registerActionHandlers = (app) => {
|
||||
logger.info(`${FILE_NAME}: Registering consolidated sigma action handlers`);
|
||||
|
||||
// Handle "Send to SIEM" button clicks
|
||||
app.action('send_sigma_rule_to_siem', async ({ body, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.debug(`${FILE_NAME}: send_sigma_rule_to_siem action received: ${JSON.stringify(body.actions)}`);
|
||||
|
||||
if (!body || !body.actions || !body.actions[0] || !body.actions[0].value) {
|
||||
logger.error(`${FILE_NAME}: Invalid action payload: missing rule ID`);
|
||||
await respond({
|
||||
text: 'Error: Could not determine which rule to send',
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract rule ID from action value
|
||||
// Value format is "send_sigma_rule_to_siem_[ruleID]"
|
||||
const actionValue = body.actions[0].value;
|
||||
const ruleId = actionValue.replace('send_sigma_rule_to_siem_', '');
|
||||
|
||||
if (!ruleId) {
|
||||
logger.error(`${FILE_NAME}: Missing rule ID in action value: ${actionValue}`);
|
||||
await respond({
|
||||
text: 'Error: Missing rule ID in button data',
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Sending rule ${ruleId} to SIEM`);
|
||||
|
||||
// Inform user that processing is happening
|
||||
await respond({
|
||||
text: `Sending rule ${ruleId} to Elasticsearch SIEM...`,
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Get the converted rule in Elasticsearch format using config from YAML
|
||||
const config = {
|
||||
backend: SIGMA_CLI_CONFIG.backend,
|
||||
target: SIGMA_CLI_CONFIG.target,
|
||||
format: SIGMA_CLI_CONFIG.format
|
||||
};
|
||||
|
||||
logger.info(`${FILE_NAME}: Converting rule ${ruleId} for SIEM export`);
|
||||
const conversionResult = await convertRuleToBackend(ruleId, config);
|
||||
|
||||
if (!conversionResult.success) {
|
||||
logger.error(`${FILE_NAME}: Rule conversion failed: ${conversionResult.message}`);
|
||||
await respond({
|
||||
text: `Error: Failed to convert rule for SIEM: ${conversionResult.message}`,
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the converted rule JSON
|
||||
let rulePayload;
|
||||
try {
|
||||
rulePayload = JSON.parse(conversionResult.output);
|
||||
|
||||
// Add required fields if not present
|
||||
rulePayload.rule_id = rulePayload.rule_id || ruleId;
|
||||
rulePayload.from = rulePayload.from || "now-360s";
|
||||
rulePayload.to = rulePayload.to || "now";
|
||||
rulePayload.interval = rulePayload.interval || "5m";
|
||||
|
||||
// Make sure required fields are present
|
||||
if (!rulePayload.name) {
|
||||
rulePayload.name = conversionResult.rule?.title || `Sigma Rule ${ruleId}`;
|
||||
}
|
||||
|
||||
if (!rulePayload.description) {
|
||||
rulePayload.description = conversionResult.rule?.description ||
|
||||
`Converted from Sigma rule: ${ruleId}`;
|
||||
}
|
||||
|
||||
if (!rulePayload.risk_score) {
|
||||
// Map Sigma level to risk score
|
||||
const levelMap = {
|
||||
'critical': 90,
|
||||
'high': 73,
|
||||
'medium': 50,
|
||||
'low': 25,
|
||||
'informational': 10
|
||||
};
|
||||
|
||||
rulePayload.risk_score = levelMap[conversionResult.rule?.level] || 50;
|
||||
}
|
||||
|
||||
if (!rulePayload.severity) {
|
||||
rulePayload.severity = conversionResult.rule?.level || 'medium';
|
||||
}
|
||||
|
||||
if (!rulePayload.enabled) {
|
||||
rulePayload.enabled = true;
|
||||
}
|
||||
|
||||
} catch (parseError) {
|
||||
logger.error(`${FILE_NAME}: Failed to parse converted rule JSON: ${parseError.message}`);
|
||||
await respond({
|
||||
text: `Error: The converted rule is not valid JSON: ${parseError.message}`,
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Send the rule to Elasticsearch using api service
|
||||
try {
|
||||
const result = await sendRuleToSiem(rulePayload);
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`${FILE_NAME}: Successfully sent rule ${ruleId} to SIEM`);
|
||||
await respond({
|
||||
text: `✅ Success! Rule "${rulePayload.name}" has been added to your Elasticsearch SIEM.`,
|
||||
replace_original: false,
|
||||
response_type: 'in_channel'
|
||||
});
|
||||
} else {
|
||||
logger.error(`${FILE_NAME}: Error sending rule to SIEM: ${result.message}`);
|
||||
await respond({
|
||||
text: `Error: Failed to add rule to SIEM: ${result.message}`,
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: send_sigma_rule_to_siem action`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: send_sigma_rule_to_siem action`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle View YAML button clicks
|
||||
app.action('view_yaml', async ({ body, ack, respond }) => {
|
||||
logger.info(`${FILE_NAME}: VIEW_YAML ACTION TRIGGERED`);
|
||||
try {
|
||||
await ack();
|
||||
logger.debug(`${FILE_NAME}: View YAML action received: ${JSON.stringify(body.actions)}`);
|
||||
|
||||
if (!body || !body.actions || !body.actions[0] || !body.actions[0].value) {
|
||||
logger.error(`${FILE_NAME}: Invalid action payload: missing rule ID`);
|
||||
await respond({
|
||||
text: 'Error: Could not determine which rule to get YAML for',
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract rule ID from button value
|
||||
// Handle both formats: direct ID from search results or view_yaml_{ruleId} from details view
|
||||
let ruleId = body.actions[0].value;
|
||||
if (ruleId.startsWith('view_yaml_')) {
|
||||
ruleId = ruleId.replace('view_yaml_', '');
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: View YAML button clicked for rule: ${ruleId}`);
|
||||
|
||||
// Get Sigma rule YAML
|
||||
const result = await getSigmaRuleYaml(ruleId);
|
||||
logger.debug(`${FILE_NAME}: YAML retrieval result: ${JSON.stringify(result, null, 2)}`);
|
||||
|
||||
if (!result.success) {
|
||||
logger.error(`${FILE_NAME}: Rule YAML retrieval failed: ${result.message}`);
|
||||
await respond({
|
||||
text: `Error: ${result.message}`,
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Rule ${ruleId} YAML retrieved successfully via button click`);
|
||||
|
||||
// Use the module to generate blocks
|
||||
const blocks = getYamlViewBlocks(ruleId, result.yaml || '');
|
||||
|
||||
// Respond with the YAML content
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
replace_original: false
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: View YAML action`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle convert_rule_to_siem button clicks
|
||||
app.action('convert_rule_to_siem', async ({ body, ack, respond }) => {
|
||||
try {
|
||||
await ack();
|
||||
logger.debug(`${FILE_NAME}: convert_rule_to_siem action received: ${JSON.stringify(body.actions)}`);
|
||||
|
||||
if (!body || !body.actions || !body.actions[0] || !body.actions[0].value) {
|
||||
logger.error(`${FILE_NAME}: Invalid action payload: missing rule ID`);
|
||||
await respond({
|
||||
text: 'Error: Could not determine which rule to convert',
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract rule ID from button value
|
||||
const ruleId = body.actions[0].value.replace('convert_rule_to_siem_', '');
|
||||
logger.info(`${FILE_NAME}: convert_rule_to_siem button clicked for rule: ${ruleId}`);
|
||||
|
||||
const config = {
|
||||
backend: 'lucene',
|
||||
target: 'ecs_windows',
|
||||
format: 'siem_rule_ndjson'
|
||||
};
|
||||
|
||||
await processRuleConversion(ruleId, config, respond, false, 'in_channel');
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: convert_rule_to_siem action`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Handle "View Rule Details" button clicks from search results
|
||||
app.action('view_rule_details', async ({ body, ack, respond }) => {
|
||||
logger.info(`${FILE_NAME}: VIEW_RULE_DETAILS ACTION TRIGGERED`);
|
||||
try {
|
||||
await ack();
|
||||
logger.debug(`${FILE_NAME}: View Rule Details action received: ${JSON.stringify(body.actions)}`);
|
||||
|
||||
if (!body || !body.actions || !body.actions[0] || !body.actions[0].value) {
|
||||
logger.error(`${FILE_NAME}: Invalid action payload: missing rule ID`);
|
||||
await respond({
|
||||
text: 'Error: Could not determine which rule to explain',
|
||||
replace_original: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const ruleId = body.actions[0].value;
|
||||
logger.info(`${FILE_NAME}: Rule details button clicked for rule ID: ${ruleId}`);
|
||||
|
||||
// Inform user we're processing
|
||||
await respond({
|
||||
text: `Processing details for rule ${ruleId}...`,
|
||||
replace_original: false,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
await processRuleDetails(ruleId, respond, false, 'in_channel');
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: View rule details action`, respond, {
|
||||
replaceOriginal: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle pagination button clicks
|
||||
app.action('search_prev_page', async ({ body, ack, respond }) => {
|
||||
await handlePaginationAction(body, ack, respond);
|
||||
});
|
||||
|
||||
app.action('search_next_page', async ({ body, ack, respond }) => {
|
||||
await handlePaginationAction(body, ack, respond);
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: All sigma action handlers registered successfully`);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
registerActionHandlers,
|
||||
processRuleDetails,
|
||||
processRuleConversion
|
||||
};
|
62
src/handlers/sigma/sigma_create_handler.js
Normal file
62
src/handlers/sigma/sigma_create_handler.js
Normal file
|
@ -0,0 +1,62 @@
|
|||
/**
|
||||
* sigma_create_handler.js
|
||||
*
|
||||
* Handles Sigma rule conversion requests from Slack commands
|
||||
* Action handlers moved to sigma_action_handlers.js
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { processRuleConversion } = require('./sigma_action_handlers');
|
||||
const { SIGMA_CLI_CONFIG } = require('../../config/appConfig');
|
||||
|
||||
const FILE_NAME = 'sigma_create_handler.js';
|
||||
|
||||
/**
|
||||
* Handle the sigma-create command for converting Sigma rules
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleCommand = async (command, respond) => {
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Processing sigma-create command: ${JSON.stringify(command.text)}`);
|
||||
|
||||
if (!command || !command.text) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received for sigma-create`);
|
||||
await respond('Invalid command. Usage: /sigma-create [id]');
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract rule ID and parameters
|
||||
const params = command.text.trim().split(/\s+/);
|
||||
const ruleId = params[0];
|
||||
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Missing rule ID in sigma-create command`);
|
||||
await respond('Invalid command: missing rule ID. Usage: /sigma-create [id]');
|
||||
return;
|
||||
}
|
||||
|
||||
await respond({
|
||||
text: 'Processing your request... This may take a moment.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Use configuration from YAML through constants
|
||||
const config = {
|
||||
backend: SIGMA_CLI_CONFIG.backend,
|
||||
target: SIGMA_CLI_CONFIG.target,
|
||||
format: SIGMA_CLI_CONFIG.format
|
||||
};
|
||||
|
||||
await processRuleConversion(ruleId, config, respond, false, 'in_channel');
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Create command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
};
|
56
src/handlers/sigma/sigma_details_handler.js
Normal file
56
src/handlers/sigma/sigma_details_handler.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
/**
|
||||
* sigma_details_handler.js
|
||||
*
|
||||
* Handles Sigma rule details requests from Slack commands
|
||||
* Processes requests for rule explanations
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { explainSigmaRule } = require('../../services/sigma/sigma_details_service');
|
||||
const { processRuleDetails } = require('./sigma_action_handlers');
|
||||
|
||||
const FILE_NAME = 'sigma_details_handler.js';
|
||||
|
||||
/**
|
||||
* Handle the sigma-details command for Sigma rules
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleCommand = async (command, respond) => {
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Processing sigma-details command: ${JSON.stringify(command.text)}`);
|
||||
|
||||
if (!command || !command.text) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received for sigma-details`);
|
||||
await respond('Invalid command. Usage: /sigma-details [id]');
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract rule ID
|
||||
const ruleId = command.text.trim();
|
||||
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Missing rule ID in sigma-details command`);
|
||||
await respond('Invalid command: missing rule ID. Usage: /sigma-details [id]');
|
||||
return;
|
||||
}
|
||||
|
||||
// Inform user we're processing
|
||||
await respond({
|
||||
text: 'Processing your request... This may take a moment.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Use the shared processRuleDetails function from action handlers
|
||||
await processRuleDetails(ruleId, respond, false, 'in_channel');
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Details command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
};
|
171
src/handlers/sigma/sigma_search_handler.js
Normal file
171
src/handlers/sigma/sigma_search_handler.js
Normal file
|
@ -0,0 +1,171 @@
|
|||
/**
|
||||
* sigma_search_handler.js
|
||||
*
|
||||
* Handles Sigma rule search requests from Slack commands
|
||||
*/
|
||||
const { searchSigmaRules } = require('../../services/sigma/sigma_search_service');
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { getSearchResultBlocks } = require('../../blocks/sigma_search_results_block');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
const MAX_RESULTS_PER_PAGE = 10;
|
||||
const MAX_RESULTS_THRESHOLD = 99;
|
||||
|
||||
/**
|
||||
* Handle the sigma-search command for Sigma rules
|
||||
* Searches for rules based on keywords and displays results with pagination
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleCommand = async (command, respond) => {
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Processing sigma-search command: ${JSON.stringify(command.text)}`);
|
||||
|
||||
if (!command || !command.text) {
|
||||
logger.warn(`${FILE_NAME}: Empty command received for sigma-search`);
|
||||
await respond('Invalid command. Usage: /sigma-search [keyword]');
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract search keyword and check for pagination parameters
|
||||
let keyword = command.text.trim();
|
||||
let page = 1;
|
||||
let pageSize = MAX_RESULTS_PER_PAGE;
|
||||
|
||||
// Check for pagination format: keyword page=X
|
||||
const pagingMatch = keyword.match(/(.+)\s+page=(\d+)$/i);
|
||||
if (pagingMatch) {
|
||||
keyword = pagingMatch[1].trim();
|
||||
page = parseInt(pagingMatch[2], 10) || 1;
|
||||
logger.debug(`${FILE_NAME}: Detected pagination request: "${keyword}" page ${page}`);
|
||||
}
|
||||
|
||||
// Check for page size format: keyword limit=X
|
||||
const limitMatch = keyword.match(/(.+)\s+limit=(\d+)$/i);
|
||||
if (limitMatch) {
|
||||
keyword = limitMatch[1].trim();
|
||||
pageSize = parseInt(limitMatch[2], 10) || MAX_RESULTS_PER_PAGE;
|
||||
// Ensure the page size is within reasonable limits
|
||||
pageSize = Math.min(Math.max(pageSize, 1), 100);
|
||||
logger.debug(`${FILE_NAME}: Detected page size request: "${keyword}" limit ${pageSize}`);
|
||||
}
|
||||
|
||||
if (!keyword) {
|
||||
logger.warn(`${FILE_NAME}: Missing keyword in sigma-search command`);
|
||||
await respond('Invalid command: missing keyword. Usage: /sigma-search [keyword]');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Searching for rules with keyword: ${keyword} (page ${page}, size ${pageSize})`);
|
||||
logger.debug(`${FILE_NAME}: Search keyword length: ${keyword.length}`);
|
||||
|
||||
await respond({
|
||||
text: 'Searching for rules... This may take a moment.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Search for rules using the service function with pagination
|
||||
const searchResult = await searchSigmaRules(keyword, page, pageSize);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Search result status: ${searchResult.success}`);
|
||||
logger.debug(`${FILE_NAME}: Found ${searchResult.results?.length || 0} results out of ${searchResult.pagination?.totalResults || 0} total matches`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: About to generate blocks for search results`);
|
||||
|
||||
if (!searchResult.success) {
|
||||
logger.error(`${FILE_NAME}: Search failed: ${searchResult.message}`);
|
||||
await respond({
|
||||
text: `Search failed: ${searchResult.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get total count for validation
|
||||
const totalCount = searchResult.pagination?.totalResults || 0;
|
||||
|
||||
// Check if search returned too many results
|
||||
if (totalCount > MAX_RESULTS_THRESHOLD) {
|
||||
logger.warn(`${FILE_NAME}: Search for "${keyword}" returned too many results (${totalCount}), displaying first page with warning`);
|
||||
|
||||
// Continue processing but add a notification
|
||||
searchResult.tooManyResults = true;
|
||||
}
|
||||
|
||||
if (!searchResult.results || searchResult.results.length === 0) {
|
||||
if (totalCount > 0) {
|
||||
logger.warn(`${FILE_NAME}: No rules found on page ${page} for "${keyword}", but ${totalCount} total matches exist`);
|
||||
await respond({
|
||||
text: `No rules found on page ${page} for "${keyword}". Try a different page or refine your search.`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
} else {
|
||||
logger.warn(`${FILE_NAME}: No rules found matching "${keyword}"`);
|
||||
await respond({
|
||||
text: `No rules found matching "${keyword}"`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate blocks with pagination support
|
||||
let blocks;
|
||||
try {
|
||||
logger.debug(`${FILE_NAME}: Calling getSearchResultBlocks with ${searchResult.results.length} results`);
|
||||
|
||||
// If we have too many results, add a warning block at the beginning
|
||||
if (searchResult.tooManyResults) {
|
||||
blocks = getSearchResultBlocks(keyword, searchResult.results, searchResult.pagination);
|
||||
|
||||
// Insert warning at the beginning of blocks (after the header)
|
||||
blocks.splice(1, 0, {
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": `:warning: Your search for "${keyword}" returned ${totalCount} results, which is a lot. Displaying the first page. Consider using a more specific keyword for narrower results.`
|
||||
}
|
||||
});
|
||||
} else {
|
||||
blocks = getSearchResultBlocks(keyword, searchResult.results, searchResult.pagination);
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Successfully generated ${blocks?.length || 0} blocks`);
|
||||
} catch (blockError) {
|
||||
// Use error handler for block generation errors
|
||||
await handleError(blockError, `${FILE_NAME}: Block generation`, respond, {
|
||||
responseType: 'in_channel',
|
||||
customMessage: `Found ${searchResult.results.length} of ${totalCount} rules matching "${keyword}" (page ${page} of ${searchResult.pagination?.totalPages || 1}). Use /sigma-details [id] to view details.`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add debug log before sending response
|
||||
logger.debug(`${FILE_NAME}: About to send response with ${blocks?.length || 0} blocks`);
|
||||
|
||||
// Determine if this should be visible to everyone or just the user
|
||||
const isEphemeral = totalCount > 20;
|
||||
|
||||
// Respond with the search results
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
response_type: isEphemeral ? 'ephemeral' : 'in_channel'
|
||||
});
|
||||
|
||||
// Add debug log after sending response
|
||||
logger.debug(`${FILE_NAME}: Response sent successfully`);
|
||||
} catch (error) {
|
||||
// Use error handler for unexpected errors
|
||||
await handleError(error, `${FILE_NAME}: Search command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
};
|
68
src/handlers/sigma/sigma_stats_handler.js
Normal file
68
src/handlers/sigma/sigma_stats_handler.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
/**
|
||||
* sigma_stats_handler.js
|
||||
*
|
||||
* Handles Sigma rule statistics requests from Slack commands
|
||||
* Processes requests for database statistics
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { handleError } = require('../../utils/error_handler');
|
||||
const { getSigmaStats } = require('../../services/sigma/sigma_stats_service');
|
||||
const { getStatsBlocks } = require('../../blocks/sigma_stats_block');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Handle the sigma-stats command for Sigma rules
|
||||
*
|
||||
* @param {Object} command - The Slack command object
|
||||
* @param {Function} respond - Function to send response back to Slack
|
||||
*/
|
||||
const handleCommand = async (command, respond) => {
|
||||
try {
|
||||
logger.info(`${FILE_NAME}: Processing sigma-stats command`);
|
||||
|
||||
await respond({
|
||||
text: 'Gathering Sigma rule statistics... This may take a moment.',
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
|
||||
// Get statistics from service
|
||||
const statsResult = await getSigmaStats();
|
||||
|
||||
if (!statsResult.success) {
|
||||
logger.error(`${FILE_NAME}: Failed to retrieve statistics: ${statsResult.message}`);
|
||||
await respond({
|
||||
text: `Error retrieving statistics: ${statsResult.message}`,
|
||||
response_type: 'ephemeral'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate blocks for displaying statistics
|
||||
let blocks;
|
||||
try {
|
||||
blocks = getStatsBlocks(statsResult.stats);
|
||||
} catch (blockError) {
|
||||
await handleError(blockError, `${FILE_NAME}: Block generation`, respond, {
|
||||
responseType: 'ephemeral',
|
||||
customMessage: 'Error generating statistics view'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Return the response
|
||||
await respond({
|
||||
blocks: blocks,
|
||||
response_type: 'in_channel'
|
||||
});
|
||||
} catch (error) {
|
||||
await handleError(error, `${FILE_NAME}: Stats command handler`, respond, {
|
||||
responseType: 'ephemeral'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleCommand
|
||||
};
|
3
src/handlers/stats/stats_handler.js
Normal file
3
src/handlers/stats/stats_handler.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
//
|
||||
// stats_handler.js
|
||||
//
|
63
src/index.js
Normal file
63
src/index.js
Normal file
|
@ -0,0 +1,63 @@
|
|||
/**
|
||||
* index.js
|
||||
*
|
||||
* Entry point for the Fylgja application.
|
||||
* This module initializes the Slack bot server using configuration from fylgja.yml.
|
||||
* It handles startup errors and provides logging.
|
||||
*/
|
||||
const app = require('./app');
|
||||
const logger = require('./utils/logger');
|
||||
const { version } = require('../package.json');
|
||||
const { SERVER_CONFIG } = require('./config/appConfig');
|
||||
|
||||
// Start the app
|
||||
const PORT = SERVER_CONFIG.port;
|
||||
const ENV = process.env.NODE_ENV || 'development';
|
||||
const { getFileName } = require('./utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Gracefully handles server shutdown
|
||||
* Logs shutdown information and exits the process
|
||||
*
|
||||
* @param {string} reason - The reason for the shutdown
|
||||
*/
|
||||
function handleShutdown(reason) {
|
||||
logger.info(`${FILE_NAME}: Shutting down server (${reason})`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Immediately-invoked async function to start the server
|
||||
* Allows for proper async/await error handling
|
||||
*/
|
||||
(async () => {
|
||||
try {
|
||||
logger.info(`${FILE_NAME}: Starting Fylgja Slack bot v${version} on port ${PORT} (${ENV} environment)`);
|
||||
|
||||
// Set up signal handlers for graceful shutdown
|
||||
process.on('SIGTERM', () => handleShutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => handleShutdown('SIGINT'));
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
logger.error(`${FILE_NAME}: Uncaught exception: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Handle unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
logger.error(`${FILE_NAME}: Unhandled rejection at: ${promise}, reason: ${reason}`);
|
||||
// Continue running despite unhandled rejection
|
||||
});
|
||||
|
||||
await app.start(PORT);
|
||||
logger.info(`${FILE_NAME}: ✅ Fylgja Slack bot is running on port ${PORT}`);
|
||||
logger.info(`${FILE_NAME}: Server URL: http://localhost:${PORT}`);
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: 💥 Fatal error starting app: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
0
src/services/elastic/cases.js
Normal file
0
src/services/elastic/cases.js
Normal file
0
src/services/elastic/clients.js
Normal file
0
src/services/elastic/clients.js
Normal file
154
src/services/elastic/elastic_api_service.js
Normal file
154
src/services/elastic/elastic_api_service.js
Normal file
|
@ -0,0 +1,154 @@
|
|||
/**
|
||||
* elastic_api_service.js
|
||||
*
|
||||
* Service for interacting with Elasticsearch API endpoints
|
||||
*/
|
||||
const axios = require('axios');
|
||||
const logger = require('../../utils/logger');
|
||||
const { ELASTICSEARCH_CONFIG } = require('../../config/appConfig');
|
||||
|
||||
const FILE_NAME = 'elastic_api_service.js';
|
||||
|
||||
/**
|
||||
* Get Elasticsearch configuration with credentials
|
||||
*
|
||||
* @returns {Object} Configuration object with URL and credentials
|
||||
*/
|
||||
const getElasticConfig = () => {
|
||||
return {
|
||||
url: ELASTICSEARCH_CONFIG.apiEndpoint.split('/api/')[0] || process.env.ELASTIC_URL,
|
||||
username: ELASTICSEARCH_CONFIG.credentials.split(':')[0] || process.env.ELASTIC_USERNAME,
|
||||
password: ELASTICSEARCH_CONFIG.credentials.split(':')[1] || process.env.ELASTIC_PASSWORD,
|
||||
apiEndpoint: ELASTICSEARCH_CONFIG.apiEndpoint
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a rule to Elasticsearch SIEM
|
||||
*
|
||||
* @param {Object} rulePayload - The rule payload to send to Elasticsearch
|
||||
* @returns {Promise<Object>} - Object containing success status and response/error information
|
||||
*/
|
||||
const sendRuleToSiem = async (rulePayload) => {
|
||||
logger.info(`${FILE_NAME}: Sending rule to Elasticsearch SIEM`);
|
||||
|
||||
try {
|
||||
const elasticConfig = getElasticConfig();
|
||||
const apiUrl = elasticConfig.apiEndpoint;
|
||||
|
||||
logger.debug(`${FILE_NAME}: Using Elasticsearch API URL: ${apiUrl}`);
|
||||
|
||||
// Send the request to Elasticsearch
|
||||
const response = await axios({
|
||||
method: 'post',
|
||||
url: apiUrl,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'kbn-xsrf': 'true'
|
||||
},
|
||||
auth: {
|
||||
username: elasticConfig.username,
|
||||
password: elasticConfig.password
|
||||
},
|
||||
data: rulePayload
|
||||
});
|
||||
|
||||
// Process the response
|
||||
if (response.status >= 200 && response.status < 300) {
|
||||
logger.info(`${FILE_NAME}: Successfully sent rule to SIEM`);
|
||||
return {
|
||||
success: true,
|
||||
status: response.status,
|
||||
data: response.data
|
||||
};
|
||||
} else {
|
||||
logger.error(`${FILE_NAME}: Error sending rule to SIEM. Status: ${response.status}, Response: ${JSON.stringify(response.data)}`);
|
||||
return {
|
||||
success: false,
|
||||
status: response.status,
|
||||
message: `Failed to add rule to SIEM. Status: ${response.status}`,
|
||||
data: response.data
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: API error sending rule to SIEM: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: API error details: ${error.response ? JSON.stringify(error.response.data) : 'No response data'}`);
|
||||
|
||||
const errorMessage = error.response && error.response.data && error.response.data.message
|
||||
? error.response.data.message
|
||||
: error.message;
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: errorMessage,
|
||||
error: error
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Make a generic request to an Elasticsearch API endpoint
|
||||
*
|
||||
* @param {Object} options - Request options
|
||||
* @param {string} options.method - HTTP method (get, post, put, delete)
|
||||
* @param {string} options.endpoint - API endpoint (appended to base URL)
|
||||
* @param {Object} options.data - Request payload
|
||||
* @param {Object} options.params - URL parameters
|
||||
* @param {Object} options.headers - Additional headers
|
||||
* @returns {Promise<Object>} - Response object
|
||||
*/
|
||||
const makeElasticRequest = async (options) => {
|
||||
try {
|
||||
const elasticConfig = getElasticConfig();
|
||||
const baseUrl = elasticConfig.url;
|
||||
|
||||
// Build the full URL - use provided endpoint or default API endpoint
|
||||
const url = options.endpoint ?
|
||||
`${baseUrl}${options.endpoint.startsWith('/') ? '' : '/'}${options.endpoint}` :
|
||||
elasticConfig.apiEndpoint;
|
||||
|
||||
logger.debug(`${FILE_NAME}: Making ${options.method} request to: ${url}`);
|
||||
|
||||
// Set up default headers
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'kbn-xsrf': 'true',
|
||||
...(options.headers || {})
|
||||
};
|
||||
|
||||
// Make the request
|
||||
const response = await axios({
|
||||
method: options.method || 'get',
|
||||
url: url,
|
||||
headers: headers,
|
||||
auth: {
|
||||
username: elasticConfig.username,
|
||||
password: elasticConfig.password
|
||||
},
|
||||
data: options.data || null,
|
||||
params: options.params || null
|
||||
});
|
||||
|
||||
// Return a standardized response
|
||||
return {
|
||||
success: response.status >= 200 && response.status < 300,
|
||||
status: response.status,
|
||||
data: response.data
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in Elasticsearch API request: ${error.message}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: error.response?.data?.message || error.message,
|
||||
status: error.response?.status,
|
||||
error: error
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
sendRuleToSiem,
|
||||
makeElasticRequest,
|
||||
getElasticConfig
|
||||
};
|
0
src/services/elastic/rules.js
Normal file
0
src/services/elastic/rules.js
Normal file
0
src/services/elastic/spaces.js
Normal file
0
src/services/elastic/spaces.js
Normal file
153
src/services/sigma/sigma_backend_converter.js
Normal file
153
src/services/sigma/sigma_backend_converter.js
Normal file
|
@ -0,0 +1,153 @@
|
|||
/**
|
||||
* sigma_backend_converter.js
|
||||
*
|
||||
* Service for converting Sigma rules to various backend SIEM formats
|
||||
* Uses the sigma-cli tool for conversion operations
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { execSync } = require('child_process');
|
||||
const logger = require('../../utils/logger');
|
||||
const { SIGMA_CLI_PATH, SIGMA_CLI_CONFIG } = require('../../config/appConfig');
|
||||
const { convertSigmaRule } = require('./sigma_converter_service');
|
||||
const { getRuleYamlContent } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Convert a Sigma rule to a specific backend format using the sigma-cli
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to convert
|
||||
* @param {Object} config - Configuration for the conversion
|
||||
* @param {string} config.backend - Target backend (default from YAML config)
|
||||
* @param {string} config.target - Query target (default from YAML config)
|
||||
* @param {string} config.format - Output format (default from YAML config)
|
||||
* @returns {Promise<Object>} Conversion result with output or error
|
||||
*/
|
||||
async function convertRuleToBackend(ruleId, config = {}) {
|
||||
try {
|
||||
// Validate configuration and set defaults from YAML config
|
||||
const backend = config.backend || SIGMA_CLI_CONFIG.backend;
|
||||
const target = config.target || SIGMA_CLI_CONFIG.target;
|
||||
const format = config.format || SIGMA_CLI_CONFIG.format;
|
||||
|
||||
logger.info(`${FILE_NAME}: Converting rule ${ruleId} using backend: ${backend}, target: ${target}, format: ${format}`);
|
||||
|
||||
// Verify sigma-cli path
|
||||
if (!fs.existsSync(SIGMA_CLI_PATH)) {
|
||||
logger.error(`${FILE_NAME}: Sigma CLI not found at path: ${SIGMA_CLI_PATH}`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Sigma CLI tool not found'
|
||||
};
|
||||
}
|
||||
|
||||
// Get the rule YAML content
|
||||
const yamlResult = await getRuleYamlContent(ruleId);
|
||||
if (!yamlResult.success || !yamlResult.content) {
|
||||
logger.warn(`${FILE_NAME}: Failed to retrieve YAML for rule ${ruleId}: ${yamlResult.message || 'No content'}`);
|
||||
return {
|
||||
success: false,
|
||||
message: yamlResult.message || 'Failed to retrieve rule content'
|
||||
};
|
||||
}
|
||||
|
||||
// Save the YAML to a temporary file
|
||||
const tempDir = os.tmpdir();
|
||||
const tempFilePath = path.join(tempDir, `sigma_rule_${ruleId}_${Date.now()}.yml`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Writing rule YAML to temp file: ${tempFilePath}`);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(tempFilePath, yamlResult.content);
|
||||
} catch (fileError) {
|
||||
logger.error(`${FILE_NAME}: Error writing temporary file: ${fileError.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error preparing rule for conversion: ${fileError.message}`
|
||||
};
|
||||
}
|
||||
|
||||
// Build the sigma-cli command
|
||||
// Command syntax: sigma convert -t "$backend" -p "$target" -f "$format"
|
||||
const command = `"${SIGMA_CLI_PATH}" convert -t ${backend} -p ${target} -f ${format} "${tempFilePath}"`;
|
||||
|
||||
// Execute the command
|
||||
logger.debug(`${FILE_NAME}: Executing sigma-cli command: ${command}`);
|
||||
let result;
|
||||
|
||||
try {
|
||||
result = execSync(command, { encoding: 'utf8' });
|
||||
} catch (execError) {
|
||||
logger.error(`${FILE_NAME}: Sigma-cli execution error: ${execError.message}`);
|
||||
|
||||
// Clean up temporary file
|
||||
try {
|
||||
fs.unlinkSync(tempFilePath);
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`${FILE_NAME}: Error removing temporary file: ${cleanupError.message}`);
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error during rule conversion: ${execError.message}`
|
||||
};
|
||||
}
|
||||
|
||||
// Clean up temporary file
|
||||
try {
|
||||
fs.unlinkSync(tempFilePath);
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`${FILE_NAME}: Error removing temporary file: ${cleanupError.message}`);
|
||||
}
|
||||
|
||||
// Get rule metadata for context
|
||||
const ruleData = await convertSigmaRule(ruleId);
|
||||
|
||||
if (!ruleData.success || !ruleData.rule) {
|
||||
logger.warn(`${FILE_NAME}: Failed to get metadata for rule ${ruleId}`);
|
||||
|
||||
// return the conversion output
|
||||
return {
|
||||
success: true,
|
||||
output: result.trim(),
|
||||
rule: {
|
||||
id: ruleId,
|
||||
title: 'Unknown Rule',
|
||||
description: 'Rule metadata could not be retrieved'
|
||||
},
|
||||
conversionDetails: {
|
||||
backend,
|
||||
target,
|
||||
format
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Return the output with rule metadata
|
||||
return {
|
||||
success: true,
|
||||
output: result.trim(),
|
||||
rule: ruleData.rule,
|
||||
conversionDetails: {
|
||||
backend,
|
||||
target,
|
||||
format
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error converting rule ${ruleId} to backend: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error converting rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convertRuleToBackend
|
||||
};
|
422
src/services/sigma/sigma_converter_service.js
Normal file
422
src/services/sigma/sigma_converter_service.js
Normal file
|
@ -0,0 +1,422 @@
|
|||
//
|
||||
// sigma_converter_service.js
|
||||
// converts Sigma rules to a structured object
|
||||
//
|
||||
const logger = require('../../utils/logger');
|
||||
const yaml = require('js-yaml');
|
||||
const { findRuleById } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Convert a Sigma rule to a structured object
|
||||
* Can be called with either a rule object or a rule ID
|
||||
*
|
||||
* @param {Object|String} input - Either a raw rule object or a rule ID
|
||||
* @param {Object} [config] - Optional configuration
|
||||
* @returns {Promise<Object>} Converted rule or result object
|
||||
*/
|
||||
async function convertSigmaRule(input, config = null) {
|
||||
// Check if we're dealing with a rule ID (string)
|
||||
if (typeof input === 'string') {
|
||||
try {
|
||||
const ruleId = input;
|
||||
logger.info(`Converting rule by ID: ${ruleId}`);
|
||||
// Find the rule in database
|
||||
const rawRule = await findRuleById(ruleId);
|
||||
if (!rawRule) {
|
||||
logger.warn(`Rule with ID ${ruleId} not found`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Rule with ID ${ruleId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
// Debug: Log what we found
|
||||
logger.debug(`Retrieved rule ${ruleId} from database: content ${rawRule.content ? 'present' : 'missing'}, parameters ${rawRule.parameters ? Object.keys(rawRule.parameters).length : 0}`);
|
||||
|
||||
// Check if content is missing (flag set by findRuleById)
|
||||
if (rawRule.content_missing || !rawRule.content) {
|
||||
logger.warn(`Rule with ID ${ruleId} has missing content, attempting to build from parameters`);
|
||||
|
||||
// Try to build from parameters
|
||||
if (rawRule.parameters && Object.keys(rawRule.parameters).length > 0) {
|
||||
const builtRule = buildRuleFromParameters(rawRule);
|
||||
|
||||
if (builtRule) {
|
||||
logger.info(`Successfully built rule ${ruleId} from parameters`);
|
||||
return {
|
||||
success: true,
|
||||
rule: builtRule,
|
||||
built_from_parameters: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn(`Could not build rule ${ruleId} from parameters, returning placeholder`);
|
||||
return {
|
||||
success: true,
|
||||
rule: {
|
||||
id: ruleId,
|
||||
title: 'Rule Found But Content Missing',
|
||||
description: `The rule with ID ${ruleId} exists in the database, but its content field is empty. This may indicate a problem with the rule import process.`,
|
||||
author: 'Unknown',
|
||||
level: 'unknown',
|
||||
status: 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: 'Content missing' },
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
file_path: rawRule.file_path || 'unknown'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Process the raw rule
|
||||
const processedRule = processRuleContent(rawRule);
|
||||
if (!processedRule) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to process rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`Processing rule content for ${rawRule.id}:`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
rule: processedRule
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Error converting rule by ID: ${error.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error converting rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
if (!input) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'No rule data provided'
|
||||
};
|
||||
}
|
||||
|
||||
// Check for missing content
|
||||
if (!input.content) {
|
||||
logger.warn('Rule object has missing content, attempting to build from parameters');
|
||||
|
||||
// Try to build from parameters
|
||||
if (input.parameters && Object.keys(input.parameters).length > 0) {
|
||||
const builtRule = buildRuleFromParameters(input);
|
||||
|
||||
if (builtRule) {
|
||||
logger.info(`Successfully built rule ${input.id} from parameters`);
|
||||
return {
|
||||
success: true,
|
||||
rule: builtRule,
|
||||
built_from_parameters: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn(`Could not build rule from parameters, returning placeholder`);
|
||||
return {
|
||||
success: true,
|
||||
rule: {
|
||||
id: input.id || 'unknown',
|
||||
title: 'Rule Found But Content Missing',
|
||||
description: 'The rule exists in the database, but its content field is empty. This may indicate a problem with the rule import process.',
|
||||
author: 'Unknown',
|
||||
level: 'unknown',
|
||||
status: 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: 'Content missing' },
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
file_path: input.file_path || 'unknown'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const processedRule = processRuleContent(input);
|
||||
if (!processedRule) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Failed to process rule object'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
rule: processedRule
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Error processing rule object: ${error.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error processing rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process rule content into a structured object
|
||||
* @param {Object} rawRule - The raw rule object
|
||||
* @returns {Object|null} Processed rule object
|
||||
*/
|
||||
function processRuleContent(rawRule) {
|
||||
if (!rawRule) {
|
||||
logger.warn('Cannot convert rule: rule object is null');
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!rawRule.content) {
|
||||
logger.warn('Cannot convert rule: missing content in rule data');
|
||||
|
||||
// Check if we have parameters and try to build from them
|
||||
if (rawRule.parameters && Object.keys(rawRule.parameters).length > 0) {
|
||||
logger.info(`Attempting to build rule ${rawRule.id} from parameters`);
|
||||
return buildRuleFromParameters(rawRule);
|
||||
}
|
||||
|
||||
return {
|
||||
id: rawRule.id || 'unknown',
|
||||
title: 'Error: Missing Rule Content',
|
||||
description: 'The rule content could not be found in the database. This may indicate a problem with the rule import process or a corruption in the database.',
|
||||
level: 'unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
detection: { condition: 'Content missing' }
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Parse the YAML content
|
||||
let parsedRule;
|
||||
try {
|
||||
// Log the content for debugging
|
||||
logger.debug(`Parsing YAML content for rule ${rawRule.id}, content length: ${rawRule.content.length}`);
|
||||
|
||||
// Try different YAML parsing approaches
|
||||
try {
|
||||
parsedRule = yaml.load(rawRule.content);
|
||||
} catch (yamlError) {
|
||||
logger.warn(`Standard YAML parsing failed for ${rawRule.id}: ${yamlError.message}`);
|
||||
|
||||
// Try with more tolerant parsing
|
||||
try {
|
||||
// Try multi-document loading
|
||||
const docs = [];
|
||||
yaml.loadAll(rawRule.content, (doc) => {
|
||||
if (doc) docs.push(doc);
|
||||
});
|
||||
|
||||
if (docs.length > 0) {
|
||||
parsedRule = docs[0]; // Take the first document
|
||||
logger.debug(`Multi-document YAML parsing succeeded for ${rawRule.id}, found ${docs.length} documents`);
|
||||
} else {
|
||||
throw new Error('No documents found in multi-document parse');
|
||||
}
|
||||
} catch (multiError) {
|
||||
logger.warn(`Multi-document YAML parsing failed for ${rawRule.id}: ${multiError.message}`);
|
||||
|
||||
// Last resort: manual extraction of key fields
|
||||
parsedRule = extractFieldsManually(rawRule.content, rawRule.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (!parsedRule) {
|
||||
logger.warn(`Rule parsing resulted in null object for ID: ${rawRule.id}`);
|
||||
parsedRule = {};
|
||||
}
|
||||
} catch (yamlError) {
|
||||
logger.error(`YAML parsing error: ${yamlError.message}`);
|
||||
logger.debug(`Problematic content (first 200 chars): ${rawRule.content.substring(0, 200)}`);
|
||||
parsedRule = {};
|
||||
}
|
||||
|
||||
// Create a new object combining database fields and parsed content
|
||||
const convertedRule = {
|
||||
id: rawRule.id || parsedRule.id || 'unknown',
|
||||
title: parsedRule.title || 'Untitled Rule',
|
||||
description: parsedRule.description || 'No description provided',
|
||||
author: parsedRule.author || 'Unknown',
|
||||
level: parsedRule.level || 'unknown',
|
||||
status: parsedRule.status || 'unknown',
|
||||
logsource: parsedRule.logsource || {},
|
||||
detection: parsedRule.detection || {},
|
||||
falsepositives: parsedRule.falsepositives || [],
|
||||
tags: parsedRule.tags || [],
|
||||
references: parsedRule.references || [],
|
||||
file_path: rawRule.file_path || 'unknown'
|
||||
};
|
||||
|
||||
logger.info(`Successfully converted rule ${convertedRule.id}`);
|
||||
return convertedRule;
|
||||
} catch (error) {
|
||||
logger.error(`Error parsing rule: ${error.message}`);
|
||||
return {
|
||||
id: rawRule.id || 'unknown',
|
||||
title: 'Error: Could not parse rule',
|
||||
description: `Error parsing rule: ${error.message}`,
|
||||
level: 'unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
falsepositives: [],
|
||||
tags: ['error', 'parse-error'],
|
||||
references: [],
|
||||
detection: { condition: 'Parse error' }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual extraction of key fields from YAML content when parsing fails
|
||||
* @param {string} content - The raw YAML content
|
||||
* @param {string} ruleId - The rule ID
|
||||
* @returns {Object} Extracted fields
|
||||
*/
|
||||
function extractFieldsManually(content, ruleId) {
|
||||
logger.debug(`Attempting manual field extraction for rule ${ruleId}`);
|
||||
|
||||
const result = {
|
||||
id: ruleId
|
||||
};
|
||||
|
||||
// Simple regex patterns to extract common fields
|
||||
const patterns = {
|
||||
title: /title:\s*(.+)$/m,
|
||||
description: /description:\s*(.+)$/m,
|
||||
author: /author:\s*(.+)$/m,
|
||||
level: /level:\s*(.+)$/m,
|
||||
status: /status:\s*(.+)$/m
|
||||
};
|
||||
|
||||
// Extract fields using regex
|
||||
Object.entries(patterns).forEach(([field, pattern]) => {
|
||||
const match = content.match(pattern);
|
||||
if (match && match[1]) {
|
||||
result[field] = match[1].trim();
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug(`Manual extraction found ${Object.keys(result).length - 1} fields for rule ${ruleId}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a rule object from parameters when content is missing
|
||||
* @param {Object} rawRule - The raw rule object with parameters
|
||||
* @returns {Object} Reconstructed rule object
|
||||
*/
|
||||
function buildRuleFromParameters(rawRule) {
|
||||
logger.info(`Building rule ${rawRule.id} from parameters`);
|
||||
|
||||
if (!rawRule || !rawRule.parameters) {
|
||||
logger.warn(`Cannot build rule: missing parameters for rule ${rawRule ? rawRule.id : 'unknown'}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.debug(`Found ${Object.keys(rawRule.parameters).length} parameters for rule ${rawRule.id}`);
|
||||
|
||||
// Initialize a new rule object with essential properties
|
||||
const reconstructedRule = {
|
||||
id: rawRule.id,
|
||||
title: rawRule.parameters.title || 'Unknown Title',
|
||||
description: rawRule.parameters.description || 'No description available',
|
||||
author: rawRule.parameters.author || 'Unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
level: rawRule.parameters.level || 'unknown',
|
||||
status: rawRule.parameters.status || 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: rawRule.parameters['detection.condition'] || 'unknown' },
|
||||
falsepositives: [],
|
||||
tags: [],
|
||||
references: []
|
||||
};
|
||||
|
||||
// Process parameters to rebuild nested objects
|
||||
Object.entries(rawRule.parameters).forEach(([key, value]) => {
|
||||
// Handle array parameters
|
||||
if (key === 'falsepositives' || key === 'tags' || key === 'references') {
|
||||
if (Array.isArray(value)) {
|
||||
reconstructedRule[key] = value;
|
||||
} else if (typeof value === 'string') {
|
||||
// Try to parse JSON string arrays
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
if (Array.isArray(parsed)) {
|
||||
reconstructedRule[key] = parsed;
|
||||
} else {
|
||||
reconstructedRule[key] = [value];
|
||||
}
|
||||
} catch (e) {
|
||||
reconstructedRule[key] = [value];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle logsource properties
|
||||
else if (key.startsWith('logsource.')) {
|
||||
const prop = key.substring('logsource.'.length);
|
||||
reconstructedRule.logsource[prop] = value;
|
||||
}
|
||||
// Handle detection properties
|
||||
else if (key.startsWith('detection.') && key !== 'detection.condition') {
|
||||
const prop = key.substring('detection.'.length);
|
||||
const parts = prop.split('.');
|
||||
|
||||
let current = reconstructedRule.detection;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (!current[parts[i]]) {
|
||||
current[parts[i]] = {};
|
||||
}
|
||||
current = current[parts[i]];
|
||||
}
|
||||
|
||||
current[parts[parts.length - 1]] = value;
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug(`Reconstructed rule structure for ${rawRule.id}: ${JSON.stringify({
|
||||
id: reconstructedRule.id,
|
||||
title: reconstructedRule.title,
|
||||
fields: Object.keys(reconstructedRule)
|
||||
})}`);
|
||||
|
||||
return reconstructedRule;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a readable condition string from a rule
|
||||
* @param {Object} rule - The converted rule object
|
||||
* @returns {String} Human-readable condition
|
||||
*/
|
||||
function extractDetectionCondition(rule) {
|
||||
if (!rule) {
|
||||
return 'No rule data available';
|
||||
}
|
||||
|
||||
if (!rule.detection) {
|
||||
return 'No detection information available';
|
||||
}
|
||||
|
||||
if (!rule.detection.condition) {
|
||||
return 'No condition specified';
|
||||
}
|
||||
|
||||
return rule.detection.condition;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convertSigmaRule,
|
||||
extractDetectionCondition,
|
||||
buildRuleFromParameters
|
||||
};
|
150
src/services/sigma/sigma_details_service.js
Normal file
150
src/services/sigma/sigma_details_service.js
Normal file
|
@ -0,0 +1,150 @@
|
|||
/**
|
||||
* sigma_details_service.js
|
||||
*
|
||||
* This service provides functionality for retrieving and explaining Sigma rules.
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { convertSigmaRule, extractDetectionCondition } = require('./sigma_converter_service');
|
||||
const { debugRuleContent, getRuleYamlContent } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Explains a Sigma rule by providing a simplified, human-readable format
|
||||
* Performs diagnostics before explanation and handles error cases
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to explain
|
||||
* @returns {Promise<Object>} Result object with success flag and explanation or error message
|
||||
*/
|
||||
async function explainSigmaRule(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot explain rule: Missing rule ID`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing rule ID'
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Running diagnostics for rule: ${ruleId}`);
|
||||
logger.info(`${FILE_NAME}: Explaining rule ${ruleId}`);
|
||||
|
||||
try {
|
||||
// Run diagnostics on the rule content first
|
||||
const diagnosticResult = await debugRuleContent(ruleId);
|
||||
logger.debug(`${FILE_NAME}: Diagnostic result: ${JSON.stringify(diagnosticResult || {})}`);
|
||||
|
||||
// Convert the rule ID to a structured object
|
||||
const conversionResult = await convertSigmaRule(ruleId);
|
||||
if (!conversionResult.success) {
|
||||
logger.warn(`${FILE_NAME}: Failed to convert rule ${ruleId}: ${conversionResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: conversionResult.message || `Failed to parse rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
const rule = conversionResult.rule;
|
||||
|
||||
// Extra safety check
|
||||
if (!rule) {
|
||||
logger.error(`${FILE_NAME}: Converted rule is null for ID ${ruleId}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to process rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
// Create a simplified explanation with safe access to properties
|
||||
const explanation = {
|
||||
id: rule.id || ruleId,
|
||||
title: rule.title || 'Untitled Rule',
|
||||
description: rule.description || 'No description provided',
|
||||
author: rule.author || 'Unknown author',
|
||||
severity: rule.level || 'Unknown',
|
||||
detectionExplanation: extractDetectionCondition(rule),
|
||||
falsePositives: Array.isArray(rule.falsepositives) ? rule.falsepositives :
|
||||
typeof rule.falsepositives === 'string' ? [rule.falsepositives] :
|
||||
['None specified'],
|
||||
tags: Array.isArray(rule.tags) ? rule.tags : [],
|
||||
references: Array.isArray(rule.references) ? rule.references : []
|
||||
};
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully explained rule ${ruleId}`);
|
||||
logger.debug(`${FILE_NAME}: Explanation properties: ${Object.keys(explanation).join(', ')}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
explanation
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error explaining rule: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error explaining rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the raw YAML content of a Sigma rule
|
||||
* Retrieves the content from the database
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to get YAML for
|
||||
* @returns {Promise<Object>} Result object with success flag and YAML content or error message
|
||||
*/
|
||||
async function getSigmaRuleYaml(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot get YAML: Missing rule ID`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing rule ID'
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Getting YAML content for rule: ${ruleId}`);
|
||||
|
||||
try {
|
||||
// Get YAML content from database
|
||||
const yamlResult = await getRuleYamlContent(ruleId);
|
||||
|
||||
if (!yamlResult.success) {
|
||||
logger.warn(`${FILE_NAME}: Failed to retrieve YAML for rule ${ruleId}: ${yamlResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: yamlResult.message || `Failed to retrieve YAML for rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
// Add extra safety check for content
|
||||
if (!yamlResult.content) {
|
||||
logger.warn(`${FILE_NAME}: YAML content is empty for rule ${ruleId}`);
|
||||
return {
|
||||
success: true,
|
||||
yaml: '',
|
||||
warning: 'YAML content is empty for this rule'
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Successfully retrieved YAML content with length: ${yamlResult.content.length}`);
|
||||
|
||||
// Return the YAML content
|
||||
return {
|
||||
success: true,
|
||||
yaml: yamlResult.content
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error retrieving YAML: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error retrieving YAML: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
explainSigmaRule,
|
||||
getSigmaRuleYaml
|
||||
};
|
188
src/services/sigma/sigma_repository_service.js
Normal file
188
src/services/sigma/sigma_repository_service.js
Normal file
|
@ -0,0 +1,188 @@
|
|||
/**
|
||||
* sigma_repository_service.js
|
||||
*
|
||||
* This service manages the Sigma rule repository and database updates.
|
||||
* It provides functions to clone/update the repository and run the database
|
||||
* initialization script.
|
||||
*/
|
||||
const { spawn } = require('child_process');
|
||||
const util = require('util');
|
||||
const { exec } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { SIGMA_REPO_DIR } = require('../../config/appConfig');
|
||||
const appConfig = require('../../config/appConfig');
|
||||
const logger = require('../../utils/logger');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
// Promisify exec for async/await usage
|
||||
const execPromise = util.promisify(exec);
|
||||
|
||||
/**
|
||||
* Clones or updates the Sigma repository
|
||||
* Creates the repository directory if it doesn't exist
|
||||
*
|
||||
* @returns {Promise<boolean>} Success status of the operation
|
||||
*/
|
||||
async function updateSigmaRepo() {
|
||||
logger.debug(`${FILE_NAME}: Starting Sigma repository update process`);
|
||||
|
||||
try {
|
||||
// Ensure the parent directory exists
|
||||
const parentDir = path.dirname(SIGMA_REPO_DIR);
|
||||
if (!fs.existsSync(parentDir)) {
|
||||
logger.debug(`${FILE_NAME}: Creating parent directory: ${parentDir}`);
|
||||
fs.mkdirSync(parentDir, { recursive: true });
|
||||
}
|
||||
|
||||
if (!fs.existsSync(SIGMA_REPO_DIR)) {
|
||||
logger.info(`${FILE_NAME}: Cloning Sigma repository...`);
|
||||
|
||||
// Read config to get repo URL
|
||||
const repoUrl = appConfig.SIGMA_REPO_CONFIG.url;
|
||||
if (!repoUrl) {
|
||||
throw new Error('Repository URL not found in configuration');
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Using repository URL: ${repoUrl}`);
|
||||
const cloneResult = await execPromise(`git clone ${repoUrl} ${SIGMA_REPO_DIR}`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Clone output: ${cloneResult.stdout}`);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Updating existing Sigma repository...`);
|
||||
|
||||
// Check if it's actually a git repository
|
||||
if (!fs.existsSync(path.join(SIGMA_REPO_DIR, '.git'))) {
|
||||
logger.warn(`${FILE_NAME}: Directory exists but is not a git repository: ${SIGMA_REPO_DIR}`);
|
||||
throw new Error('Directory exists but is not a git repository');
|
||||
}
|
||||
|
||||
const pullResult = await execPromise(`cd ${SIGMA_REPO_DIR} && git pull`);
|
||||
logger.debug(`${FILE_NAME}: Pull output: ${pullResult.stdout}`);
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Sigma repository is up-to-date`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error updating Sigma repository: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the Sigma database by running the initialization script
|
||||
* Spawns a child process to run the database initialization
|
||||
*
|
||||
* @returns {Promise<boolean>} Success status of the operation
|
||||
*/
|
||||
async function updateSigmaDatabase() {
|
||||
logger.info(`${FILE_NAME}: Starting database update process`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'db', 'init-sigma-db.js');
|
||||
|
||||
// Verify the script exists before trying to run it
|
||||
if (!fs.existsSync(scriptPath)) {
|
||||
logger.error(`${FILE_NAME}: Database initialization script not found at: ${scriptPath}`);
|
||||
reject(new Error(`Database initialization script not found at: ${scriptPath}`));
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Running database update script: ${scriptPath}`);
|
||||
|
||||
const updateProcess = spawn('node', [scriptPath], {
|
||||
stdio: 'pipe' // Capture output instead of inheriting
|
||||
});
|
||||
|
||||
// Capture and log stdout
|
||||
updateProcess.stdout.on('data', (data) => {
|
||||
logger.debug(`${FILE_NAME}: DB Update stdout: ${data.toString().trim()}`);
|
||||
});
|
||||
|
||||
// Capture and log stderr
|
||||
updateProcess.stderr.on('data', (data) => {
|
||||
logger.warn(`${FILE_NAME}: DB Update stderr: ${data.toString().trim()}`);
|
||||
});
|
||||
|
||||
updateProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
logger.info(`${FILE_NAME}: Database update completed successfully`);
|
||||
resolve(true);
|
||||
} else {
|
||||
logger.error(`${FILE_NAME}: Database update failed with exit code ${code}`);
|
||||
reject(new Error(`Update failed with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
|
||||
updateProcess.on('error', (err) => {
|
||||
logger.error(`${FILE_NAME}: Failed to start database update process: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the status of the Sigma repository
|
||||
* Returns information about the repository including last commit
|
||||
*
|
||||
* @returns {Promise<Object>} Repository status information
|
||||
*/
|
||||
async function getSigmaRepoStatus() {
|
||||
logger.debug(`${FILE_NAME}: Checking Sigma repository status`);
|
||||
|
||||
try {
|
||||
if (!fs.existsSync(SIGMA_REPO_DIR)) {
|
||||
logger.warn(`${FILE_NAME}: Sigma repository directory does not exist: ${SIGMA_REPO_DIR}`);
|
||||
return {
|
||||
exists: false,
|
||||
message: 'Repository has not been cloned yet'
|
||||
};
|
||||
}
|
||||
|
||||
// Check if it's a git repository
|
||||
if (!fs.existsSync(path.join(SIGMA_REPO_DIR, '.git'))) {
|
||||
logger.warn(`${FILE_NAME}: Directory exists but is not a git repository: ${SIGMA_REPO_DIR}`);
|
||||
return {
|
||||
exists: true,
|
||||
isRepo: false,
|
||||
message: 'Directory exists but is not a git repository'
|
||||
};
|
||||
}
|
||||
|
||||
// Get last commit info
|
||||
const lastCommitInfo = await execPromise(`cd ${SIGMA_REPO_DIR} && git log -1 --format="%h|%an|%ad|%s"`);
|
||||
const [hash, author, date, subject] = lastCommitInfo.stdout.trim().split('|');
|
||||
|
||||
// Get branch info
|
||||
const branchInfo = await execPromise(`cd ${SIGMA_REPO_DIR} && git branch --show-current`);
|
||||
const currentBranch = branchInfo.stdout.trim();
|
||||
|
||||
return {
|
||||
exists: true,
|
||||
isRepo: true,
|
||||
lastCommit: {
|
||||
hash,
|
||||
author,
|
||||
date,
|
||||
subject
|
||||
},
|
||||
branch: currentBranch,
|
||||
path: SIGMA_REPO_DIR
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error getting repository status: ${error.message}`);
|
||||
return {
|
||||
exists: true,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
updateSigmaRepo,
|
||||
updateSigmaDatabase,
|
||||
getSigmaRepoStatus
|
||||
};
|
214
src/services/sigma/sigma_search_service.js
Normal file
214
src/services/sigma/sigma_search_service.js
Normal file
|
@ -0,0 +1,214 @@
|
|||
/**
|
||||
* sigma_search_service.js
|
||||
*
|
||||
* This service provides functionality for searching Sigma rules by keywords.
|
||||
* It processes search results and returns them in a structured format.
|
||||
* Supports pagination for large result sets.
|
||||
*/
|
||||
const { searchRules } = require('../../sigma_db/sigma_db_queries');
|
||||
const logger = require('../../utils/logger');
|
||||
const { convertSigmaRule } = require('./sigma_converter_service');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Searches for Sigma rules by keyword and processes the results
|
||||
* Returns a structured result object with success status and paginated results
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} page - Page number (1-based index, default: 1)
|
||||
* @param {number} pageSize - Number of results per page (default: 10)
|
||||
* @returns {Promise<Object>} Result object with success flag and processed results with pagination info
|
||||
*/
|
||||
async function searchSigmaRules(keyword, page = 1, pageSize = 10) {
|
||||
if (!keyword || typeof keyword !== 'string') {
|
||||
logger.warn(`${FILE_NAME}: Cannot search rules: Missing or invalid keyword`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing or invalid search keyword'
|
||||
};
|
||||
}
|
||||
|
||||
// Validate pagination parameters
|
||||
if (typeof page !== 'number' || page < 1) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page number: ${page}, defaulting to 1`);
|
||||
page = 1;
|
||||
}
|
||||
|
||||
if (typeof pageSize !== 'number' || pageSize < 1 || pageSize > 100) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page size: ${pageSize}, defaulting to 10`);
|
||||
pageSize = 10;
|
||||
}
|
||||
|
||||
// Trim the keyword to prevent accidental whitespace issues
|
||||
const trimmedKeyword = keyword.trim();
|
||||
if (trimmedKeyword.length === 0) {
|
||||
logger.warn(`${FILE_NAME}: Cannot search rules: Empty keyword after trimming`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Search keyword cannot be empty'
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate the offset based on page number
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
logger.info(`${FILE_NAME}: Searching for Sigma rules with keyword: "${trimmedKeyword}" (page ${page}, size ${pageSize}, offset ${offset})`);
|
||||
|
||||
try {
|
||||
// Pass pageSize and offset to the database query
|
||||
const searchResult = await searchRules(trimmedKeyword, pageSize, offset);
|
||||
|
||||
// Defensive handling of possible return formats
|
||||
let allResults = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Log what we actually received for debugging
|
||||
logger.debug(`${FILE_NAME}: Search result type: ${typeof searchResult}, isArray: ${Array.isArray(searchResult)}`);
|
||||
|
||||
// Handle different possible return formats
|
||||
if (searchResult) {
|
||||
if (Array.isArray(searchResult)) {
|
||||
// Direct array of results
|
||||
allResults = searchResult;
|
||||
logger.debug(`${FILE_NAME}: Received array of ${allResults.length} results`);
|
||||
} else if (typeof searchResult === 'object') {
|
||||
// Object with results property
|
||||
if (Array.isArray(searchResult.results)) {
|
||||
allResults = searchResult.results;
|
||||
totalCount = searchResult.totalCount || 0;
|
||||
logger.debug(`${FILE_NAME}: Received object with ${allResults.length} results of ${totalCount} total matches`);
|
||||
} else if (searchResult.totalCount !== undefined) {
|
||||
// Object might have a different structure
|
||||
totalCount = searchResult.totalCount;
|
||||
logger.debug(`${FILE_NAME}: Received object with totalCount ${totalCount}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Log what we extracted
|
||||
logger.debug(`${FILE_NAME}: Extracted ${allResults.length} results for page ${page} of total ${totalCount}`);
|
||||
|
||||
if (allResults.length === 0 && totalCount === 0) {
|
||||
logger.info(`${FILE_NAME}: No rules found matching "${trimmedKeyword}"`);
|
||||
return {
|
||||
success: true,
|
||||
results: [],
|
||||
message: `No rules found matching "${trimmedKeyword}"`,
|
||||
pagination: {
|
||||
currentPage: 1,
|
||||
pageSize: pageSize,
|
||||
totalPages: 0,
|
||||
totalResults: 0,
|
||||
hasMore: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate total pages and pagination info based on total count from database
|
||||
const totalPages = Math.ceil(totalCount / pageSize);
|
||||
const hasMore = (offset + pageSize) < totalCount;
|
||||
|
||||
// Check if the requested page is valid
|
||||
if (offset >= totalCount && totalCount > 0) {
|
||||
// Return empty results but with pagination info
|
||||
logger.warn(`${FILE_NAME}: Page ${page} exceeds available results (total: ${totalCount})`);
|
||||
return {
|
||||
success: true,
|
||||
results: [],
|
||||
message: `No results on page ${page}. Try a previous page.`,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: totalPages,
|
||||
totalResults: totalCount,
|
||||
hasMore: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// If we have results, include them with pagination info
|
||||
logger.debug(`${FILE_NAME}: Returning ${allResults.length} results with pagination info (page ${page}/${totalPages}, total: ${totalCount})`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
results: allResults,
|
||||
count: allResults.length,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: totalPages,
|
||||
totalResults: totalCount,
|
||||
hasMore: hasMore
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error searching for rules: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error searching for rules: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced search that returns fully converted rule objects with pagination support
|
||||
* This is a more expensive operation than basic search
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} page - Page number (1-based index, default: 1)
|
||||
* @param {number} pageSize - Number of results per page (default: 10)
|
||||
* @returns {Promise<Object>} Result object with success flag and fully converted rule objects with pagination info
|
||||
*/
|
||||
async function searchAndConvertRules(keyword, page = 1, pageSize = 10) {
|
||||
try {
|
||||
// First perform a basic search with pagination
|
||||
const searchResult = await searchSigmaRules(keyword, page, pageSize);
|
||||
|
||||
if (!searchResult.success || !searchResult.results || searchResult.results.length === 0) {
|
||||
return searchResult;
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Converting ${searchResult.results.length} search results to full rule objects`);
|
||||
|
||||
// Convert each result to a full rule object
|
||||
const convertedResults = [];
|
||||
for (const result of searchResult.results) {
|
||||
try {
|
||||
const conversionResult = await convertSigmaRule(result.id);
|
||||
if (conversionResult.success && conversionResult.rule) {
|
||||
convertedResults.push(conversionResult.rule);
|
||||
} else {
|
||||
logger.warn(`${FILE_NAME}: Failed to convert rule ${result.id}: ${conversionResult.message || 'Unknown error'}`);
|
||||
}
|
||||
} catch (conversionError) {
|
||||
logger.error(`${FILE_NAME}: Error converting rule ${result.id}: ${conversionError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully converted ${convertedResults.length} of ${searchResult.results.length} search results`);
|
||||
|
||||
// Include the pagination information from the search results
|
||||
return {
|
||||
success: true,
|
||||
results: convertedResults,
|
||||
count: convertedResults.length,
|
||||
originalCount: searchResult.results.length,
|
||||
pagination: searchResult.pagination
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in searchAndConvertRules: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error searching and converting rules: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
searchSigmaRules,
|
||||
searchAndConvertRules
|
||||
};
|
53
src/services/sigma/sigma_stats_service.js
Normal file
53
src/services/sigma/sigma_stats_service.js
Normal file
|
@ -0,0 +1,53 @@
|
|||
/**
|
||||
* sigma_stats_service.js
|
||||
*
|
||||
* Service for retrieving and processing Sigma rule database statistics
|
||||
* Provides aggregated statistical information about the rule database
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { getStatsFromDatabase } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Get database statistics
|
||||
* Collects various statistics about the Sigma rule database
|
||||
*
|
||||
* @returns {Promise<Object>} Object with success flag and statistics or error message
|
||||
*/
|
||||
async function getSigmaStats() {
|
||||
logger.info(`${FILE_NAME}: Getting Sigma rule database statistics`);
|
||||
|
||||
try {
|
||||
// Get statistics from database query function
|
||||
const statsResult = await getStatsFromDatabase();
|
||||
|
||||
if (!statsResult.success) {
|
||||
logger.error(`${FILE_NAME}: Failed to retrieve statistics: ${statsResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: statsResult.message
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully collected database statistics`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stats: statsResult.stats
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error processing statistics: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error processing statistics: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSigmaStats
|
||||
};
|
93
src/sigma_db/sigma_db_connection.js
Normal file
93
src/sigma_db/sigma_db_connection.js
Normal file
|
@ -0,0 +1,93 @@
|
|||
/**
|
||||
* sigma_db_connection.js
|
||||
*
|
||||
* This module manages connections to the SQLite database for Sigma rules.
|
||||
* It provides functions for creating and closing database connections with Promise support.
|
||||
*/
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
const { DB_PATH } = require('../config/appConfig');
|
||||
const path = require('path');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Creates and returns a Promise-based connection to the SQLite database
|
||||
* Adds promisified methods to the db object for easier async operations
|
||||
*
|
||||
* @returns {Promise<Object>} Promise resolving to the database connection object
|
||||
*/
|
||||
function getDbConnection() {
|
||||
const absolutePath = path.resolve(DB_PATH);
|
||||
logger.debug(`${FILE_NAME}: Attempting to connect to database at path: ${absolutePath}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const db = new sqlite3.Database(DB_PATH, (err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Database connection error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Successfully connected to database at path: ${DB_PATH}`);
|
||||
|
||||
// Add promisified methods to db object
|
||||
db.getAsync = function(sql, params) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.get(sql, params, function(err, row) {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error in getAsync: ${err.message}`);
|
||||
reject(err);
|
||||
}
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
db.allAsync = function(sql, params) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.all(sql, params, function(err, rows) {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error in allAsync: ${err.message}`);
|
||||
reject(err);
|
||||
}
|
||||
else resolve(rows);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
resolve(db);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely closes a database connection
|
||||
*
|
||||
* @param {Object} db - The database connection to close
|
||||
* @returns {Promise<void>} Promise that resolves when the connection is closed
|
||||
*/
|
||||
function closeDbConnection(db) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!db) {
|
||||
logger.debug(`${FILE_NAME}: No database connection to close`);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
db.close((err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error closing database: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Database connection closed successfully`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDbConnection,
|
||||
closeDbConnection
|
||||
};
|
560
src/sigma_db/sigma_db_initialize.js
Normal file
560
src/sigma_db/sigma_db_initialize.js
Normal file
|
@ -0,0 +1,560 @@
|
|||
//
|
||||
// sigma_db_initialize.js
|
||||
// This script initializes the Sigma database by importing rules from the Sigma repository.
|
||||
// and creating the SQLite database.
|
||||
//
|
||||
const fs = require('fs');
|
||||
const util = require('util');
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
const yaml = require('js-yaml');
|
||||
const glob = util.promisify(require('glob'));
|
||||
|
||||
const logger = require('../utils/logger');
|
||||
const { SIGMA_REPO_DIR, DB_PATH } = require('../config/appConfig');
|
||||
const { updateSigmaRepo } = require('../services/sigma/sigma_repository_service');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
|
||||
// Create database connection
|
||||
function createDbConnection() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const db = new sqlite3.Database(DB_PATH, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Connected to SQLite database at ${DB_PATH}`);
|
||||
|
||||
// CRITICAL FIX: Enable foreign key constraints
|
||||
db.run('PRAGMA foreign_keys = ON;', (pragmaErr) => {
|
||||
if (pragmaErr) {
|
||||
logger.error(`${FILE_NAME}: Failed to enable foreign key constraints: ${pragmaErr.message}`);
|
||||
reject(pragmaErr);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Foreign key constraints enabled`);
|
||||
resolve(db);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize database schema
|
||||
async function initializeDatabase(db) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Drop existing tables if they exist
|
||||
db.run('DROP TABLE IF EXISTS rule_parameters', (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
db.run('DROP TABLE IF EXISTS sigma_rules', (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create rules table with basic information
|
||||
const createRulesTableSql = `
|
||||
CREATE TABLE sigma_rules (
|
||||
id TEXT PRIMARY KEY,
|
||||
file_path TEXT,
|
||||
content TEXT,
|
||||
date DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`;
|
||||
|
||||
db.run(createRulesTableSql, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create rule_parameters table for individual parameters
|
||||
const createParamsTableSql = `
|
||||
CREATE TABLE rule_parameters (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id TEXT,
|
||||
param_name TEXT,
|
||||
param_value TEXT,
|
||||
param_type TEXT,
|
||||
FOREIGN KEY (rule_id) REFERENCES sigma_rules(id) ON DELETE CASCADE
|
||||
)
|
||||
`;
|
||||
|
||||
db.run(createParamsTableSql, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Database schema initialized`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Determine if a YAML document is a Sigma rule
|
||||
function isSigmaRule(doc) {
|
||||
// Check for essential Sigma rule properties
|
||||
return doc && doc.id && (
|
||||
doc.detection ||
|
||||
doc.logsource ||
|
||||
doc.title
|
||||
);
|
||||
}
|
||||
|
||||
// Parse a Sigma YAML file and extract relevant fields
|
||||
function parseRuleFile(filePath) {
|
||||
try {
|
||||
// Read the file content
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Try to load as a multi-document YAML
|
||||
let documents = [];
|
||||
try {
|
||||
yaml.loadAll(content, (doc) => {
|
||||
if (doc) documents.push(doc);
|
||||
});
|
||||
} catch (e) {
|
||||
// If multi-document parsing fails, try as a single document
|
||||
logger.warn(`${FILE_NAME}: Multi-document parsing failed for ${filePath}: ${e.message}`);
|
||||
try {
|
||||
const doc = yaml.load(content);
|
||||
if (doc) documents.push(doc);
|
||||
} catch (singleError) {
|
||||
logger.error(`Failed to parse ${filePath} as YAML: ${singleError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter to only include documents that look like Sigma rules
|
||||
const ruleDocuments = documents.filter(isSigmaRule);
|
||||
|
||||
// Return array of rule objects
|
||||
return ruleDocuments.map(rule => {
|
||||
// Ensure rule.id exists - this is critical
|
||||
if (!rule.id) {
|
||||
logger.warn(`${FILE_NAME}: Rule in ${filePath} has no ID, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
id: rule.id,
|
||||
file_path: filePath,
|
||||
content: content, // Explicitly assign content
|
||||
parameters: extractParameters(rule)
|
||||
};
|
||||
}).filter(rule => rule !== null); // Remove null rules
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error parsing ${filePath}: ${error.message}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Extract all parameters from a rule object, including nested ones
|
||||
function extractParameters(rule) {
|
||||
const params = [];
|
||||
|
||||
function processValue(name, value, parentKey = '') {
|
||||
const fullKey = parentKey ? `${parentKey}.${name}` : name;
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
params.push({
|
||||
param_name: fullKey,
|
||||
param_value: '',
|
||||
param_type: 'null'
|
||||
});
|
||||
} else if (typeof value === 'object' && !Array.isArray(value)) {
|
||||
// For objects, add a parameter for the object itself
|
||||
params.push({
|
||||
param_name: fullKey,
|
||||
param_value: JSON.stringify(value),
|
||||
param_type: 'object'
|
||||
});
|
||||
|
||||
// Then process all its properties
|
||||
Object.entries(value).forEach(([k, v]) => {
|
||||
processValue(k, v, fullKey);
|
||||
});
|
||||
} else if (Array.isArray(value)) {
|
||||
// For arrays, add a parameter for the array itself
|
||||
params.push({
|
||||
param_name: fullKey,
|
||||
param_value: JSON.stringify(value),
|
||||
param_type: 'array'
|
||||
});
|
||||
|
||||
// And also add individual array elements
|
||||
value.forEach((item, index) => {
|
||||
if (typeof item === 'object' && item !== null) {
|
||||
processValue(index.toString(), item, fullKey);
|
||||
} else {
|
||||
params.push({
|
||||
param_name: `${fullKey}[${index}]`,
|
||||
param_value: String(item),
|
||||
param_type: typeof item
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Handle primitive types
|
||||
params.push({
|
||||
param_name: fullKey,
|
||||
param_value: String(value),
|
||||
param_type: typeof value
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process all properties in the rule
|
||||
Object.entries(rule).forEach(([key, value]) => {
|
||||
processValue(key, value);
|
||||
});
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
// Import rules into database
|
||||
async function importRules(db) {
|
||||
// Find all YAML files in the entire repository
|
||||
logger.info('${FILE_NAME}: Looking for rule files...');
|
||||
|
||||
// Get all YAML files - include both .yml and .yaml extensions
|
||||
const files = await glob(`${SIGMA_REPO_DIR}/**/*.{yml,yaml}`);
|
||||
logger.info(`${FILE_NAME}: Found ${files.length} total YAML files`);
|
||||
|
||||
// Prepare insert statements with explicit parameter naming
|
||||
const insertRuleStmt = db.prepare(`
|
||||
INSERT OR REPLACE INTO sigma_rules (id, file_path, content)
|
||||
VALUES (?, ?, ?)
|
||||
`);
|
||||
|
||||
const insertParamStmt = db.prepare(`
|
||||
INSERT INTO rule_parameters (rule_id, param_name, param_value, param_type)
|
||||
VALUES (?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
// Process each file
|
||||
let importedRuleCount = 0;
|
||||
let importedParamCount = 0;
|
||||
let skippedFileCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Keep track of content status
|
||||
let rulesWithContent = 0;
|
||||
let rulesWithoutContent = 0;
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const rules = parseRuleFile(file);
|
||||
|
||||
if (rules.length === 0) {
|
||||
skippedFileCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const rule of rules) {
|
||||
// Begin transaction
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run('BEGIN TRANSACTION', (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
|
||||
let transactionSuccessful = true;
|
||||
|
||||
try {
|
||||
// Debug: Check content before insertion
|
||||
const hasContent = !!(rule.content && rule.content.length > 0);
|
||||
|
||||
if (hasContent) {
|
||||
rulesWithContent++;
|
||||
} else {
|
||||
rulesWithoutContent++;
|
||||
logger.warn(`Rule ${rule.id} has no content!`);
|
||||
// CRITICAL FIX: Ensure empty content is handled as empty string, not null
|
||||
rule.content = '';
|
||||
}
|
||||
|
||||
// Insert rule with explicit parameters
|
||||
await new Promise((resolve, reject) => {
|
||||
insertRuleStmt.run(
|
||||
rule.id, // Parameter 1: id
|
||||
rule.file_path, // Parameter 2: file_path
|
||||
rule.content || '', // Parameter 3: content (ensure not null)
|
||||
(err) => {
|
||||
if (err) {
|
||||
logger.error(`Error inserting rule ${rule.id}: ${err.message}`);
|
||||
transactionSuccessful = false;
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// CRITICAL FIX: Only insert parameters if rule insertion was successful
|
||||
if (transactionSuccessful) {
|
||||
// Insert parameters
|
||||
for (const param of rule.parameters) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
insertParamStmt.run(
|
||||
rule.id, // Parameter 1: rule_id
|
||||
param.param_name, // Parameter 2: param_name
|
||||
param.param_value, // Parameter 3: param_value
|
||||
param.param_type, // Parameter 4: param_type
|
||||
(err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error inserting parameter ${param.param_name} for rule ${rule.id}: ${err.message}`);
|
||||
transactionSuccessful = false;
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// CRITICAL FIX: Stop processing parameters if any insertion fails
|
||||
if (!transactionSuccessful) {
|
||||
break;
|
||||
}
|
||||
|
||||
importedParamCount++;
|
||||
} catch (paramError) {
|
||||
logger.error(`${FILE_NAME}: Error processing parameter: ${paramError.message}`);
|
||||
transactionSuccessful = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL FIX: Only commit if everything was successful
|
||||
if (transactionSuccessful) {
|
||||
// Commit transaction
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run('COMMIT', (err) => {
|
||||
if (err) {
|
||||
transactionSuccessful = false;
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
importedRuleCount++;
|
||||
|
||||
// Log progress every 100 rules
|
||||
if (importedRuleCount % 100 === 0) {
|
||||
logger.info(`${FILE_NAME}: Imported ${importedRuleCount} rules with ${importedParamCount} parameters, ${rulesWithContent} have content, ${rulesWithoutContent} missing content`);
|
||||
}
|
||||
} else {
|
||||
// Rollback transaction if not successful
|
||||
await new Promise((resolve) => {
|
||||
db.run('ROLLBACK', () => resolve());
|
||||
});
|
||||
|
||||
errorCount++;
|
||||
}
|
||||
} catch (error) {
|
||||
// Rollback transaction on error
|
||||
await new Promise((resolve) => {
|
||||
db.run('ROLLBACK', () => resolve());
|
||||
});
|
||||
|
||||
logger.error(`${FILE_NAME}: Error importing rule ${rule.id} from ${file}: ${error.message}`);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error processing file ${file}: ${error.message}`);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
insertRuleStmt.finalize();
|
||||
insertParamStmt.finalize();
|
||||
|
||||
logger.info(`${FILE_NAME}: Import summary: ${importedRuleCount} rules imported with ${importedParamCount} parameters, ${skippedFileCount} files skipped, ${errorCount} errors`);
|
||||
logger.info(`${FILE_NAME}: Content status: ${rulesWithContent} rules with content, ${rulesWithoutContent} rules without content`);
|
||||
|
||||
// Run diagnostics after import
|
||||
await diagnoseContentImport(db);
|
||||
}
|
||||
|
||||
// Diagnose rule content import issues
|
||||
async function diagnoseContentImport(db) {
|
||||
logger.info(`${FILE_NAME}: Starting content import diagnosis...`);
|
||||
|
||||
// Check total rule count
|
||||
const ruleCount = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT COUNT(*) as count FROM sigma_rules', (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row ? row.count : 0);
|
||||
});
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Total rules in database: ${ruleCount}`);
|
||||
|
||||
// Sample a few rules
|
||||
const sampleRules = await new Promise((resolve, reject) => {
|
||||
db.all('SELECT id, file_path, length(content) as content_length FROM sigma_rules LIMIT 5', (err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
});
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Sample rule content lengths: ${JSON.stringify(sampleRules.map(r => ({ id: r.id, content_length: r.content_length })))}`);
|
||||
|
||||
// Check the content column type
|
||||
const tableInfo = await new Promise((resolve, reject) => {
|
||||
db.all('PRAGMA table_info(sigma_rules)', (err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
});
|
||||
});
|
||||
|
||||
const contentColumn = tableInfo.find(col => col.name === 'content');
|
||||
logger.info(`${FILE_NAME}: Content column info: ${JSON.stringify(contentColumn)}`);
|
||||
|
||||
// Check a rule's content explicitly
|
||||
if (sampleRules.length > 0) {
|
||||
const firstRuleId = sampleRules[0].id;
|
||||
const ruleContent = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT content FROM sigma_rules WHERE id = ?', [firstRuleId], (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row ? (row.content || null) : null);
|
||||
});
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: First rule content check: id=${firstRuleId}, has_content=${ruleContent !== null}, type=${typeof ruleContent}, length=${ruleContent ? ruleContent.length : 0}`);
|
||||
|
||||
if (ruleContent) {
|
||||
logger.info(`Content sample: ${ruleContent.substring(0, 100)}...`);
|
||||
}
|
||||
}
|
||||
|
||||
// Look for missing parameters
|
||||
const paramCount = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT COUNT(*) as count FROM rule_parameters', (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row ? row.count : 0);
|
||||
});
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Total parameters: ${paramCount}`);
|
||||
|
||||
// CRITICAL FIX: Check for orphaned parameters
|
||||
const orphanedParamCount = await new Promise((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM rule_parameters p
|
||||
LEFT JOIN sigma_rules r ON p.rule_id = r.id
|
||||
WHERE r.id IS NULL
|
||||
`, (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row ? row.count : 0);
|
||||
});
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Orphaned parameters (should be 0): ${orphanedParamCount}`);
|
||||
|
||||
if (orphanedParamCount > 0) {
|
||||
logger.error(`Found ${orphanedParamCount} orphaned parameters! This indicates a foreign key constraint violation.`);
|
||||
}
|
||||
|
||||
return {
|
||||
rule_count: ruleCount,
|
||||
sample_rules: sampleRules,
|
||||
content_column: contentColumn,
|
||||
param_count: paramCount,
|
||||
orphaned_param_count: orphanedParamCount
|
||||
};
|
||||
}
|
||||
|
||||
// Create indexes on the database for better performance
|
||||
async function createIndexes(db) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const indexes = [
|
||||
'CREATE INDEX IF NOT EXISTS idx_param_rule_id ON rule_parameters(rule_id)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_param_name ON rule_parameters(param_name)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_param_value ON rule_parameters(param_value)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_param_type ON rule_parameters(param_type)'
|
||||
];
|
||||
|
||||
let completed = 0;
|
||||
|
||||
for (const indexSql of indexes) {
|
||||
db.run(indexSql, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
completed++;
|
||||
if (completed === indexes.length) {
|
||||
logger.info('Database indexes created');
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Main function
|
||||
async function main() {
|
||||
try {
|
||||
// Update Sigma repository
|
||||
const repoUpdated = await updateSigmaRepo();
|
||||
if (!repoUpdated) {
|
||||
logger.error(`${FILE_NAME}: Failed to update repository. Database may not be up-to-date.`);
|
||||
}
|
||||
|
||||
// Connect to database
|
||||
const db = await createDbConnection();
|
||||
|
||||
// Initialize database schema
|
||||
await initializeDatabase(db);
|
||||
|
||||
// Import rules
|
||||
await importRules(db);
|
||||
|
||||
// Create indexes
|
||||
await createIndexes(db);
|
||||
|
||||
// Close database connection
|
||||
db.close((err) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error closing database: ${err.message}`);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Database connection closed`);
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`${FILE_NAME}: Database initialization completed successfully`);
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Database initialization failed: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// If this script is run directly (not imported)
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initializeDatabase,
|
||||
importRules,
|
||||
createIndexes
|
||||
};
|
585
src/sigma_db/sigma_db_queries.js
Normal file
585
src/sigma_db/sigma_db_queries.js
Normal file
|
@ -0,0 +1,585 @@
|
|||
/**
|
||||
*
|
||||
* sigma_db_queries.js
|
||||
* this script contains functions to interact with the Sigma database
|
||||
*
|
||||
* IMPORTANT:
|
||||
* SQLite queries need explicit Promise handling when using db.all()
|
||||
*
|
||||
* We had an issue in that the Promise returned by db.all() wasn't being
|
||||
* properly resolved in the async context. By wrapping the db.all() call in
|
||||
* a new Promise and explicitly handling the callback, we ensure the query
|
||||
* completes before continuing. This is important with SQLite where the
|
||||
* connection state management can sometimes be tricky with async/await.
|
||||
*
|
||||
*/
|
||||
const { getDbConnection } = require('./sigma_db_connection');
|
||||
const logger = require('../utils/logger');
|
||||
const { DB_PATH } = require('../config/appConfig');
|
||||
const path = require('path');
|
||||
|
||||
const { getFileName } = require('../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
|
||||
/**
|
||||
* Get a list of all rule IDs in the database
|
||||
* Useful for bulk operations and database integrity checks
|
||||
*
|
||||
* @returns {Promise<Array>} Array of rule IDs or empty array on error
|
||||
*/
|
||||
async function getAllRuleIds() {
|
||||
let db;
|
||||
try {
|
||||
logger.info(`${FILE_NAME}: Retrieving all rule IDs from database`);
|
||||
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Connected to database for retrieving all rule IDs`);
|
||||
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
db.all('SELECT id FROM sigma_rules ORDER BY id', [], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Error fetching all rule IDs: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(rows || []);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: Retrieved ${result.length} rule IDs from database`);
|
||||
return result.map(row => row.id);
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error retrieving all rule IDs: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return [];
|
||||
} finally {
|
||||
if (db) {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after retrieving all rule IDs`);
|
||||
} catch (closeError) {
|
||||
logger.warn(`${FILE_NAME}: Error closing database: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find a Sigma rule by its ID
|
||||
* Retrieves rule data and associated parameters from the database
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to find
|
||||
* @returns {Promise<Object|null>} The rule object or null if not found
|
||||
*/
|
||||
async function findRuleById(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot find rule: Missing rule ID`);
|
||||
return null;
|
||||
}
|
||||
|
||||
let db;
|
||||
try {
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Connected to database for rule lookup: ${ruleId}`);
|
||||
|
||||
// Get the base rule using promisified method
|
||||
const rule = await db.getAsync('SELECT * FROM sigma_rules WHERE id = ?', [ruleId]);
|
||||
if (!rule) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} not found in database`);
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Found base rule with ID ${ruleId}, content length: ${rule.content ? rule.content.length : 0}`);
|
||||
|
||||
// Get parameters using promisified method
|
||||
const paramsAsync = await db.allAsync('SELECT param_name, param_value, param_type FROM rule_parameters WHERE rule_id = ?', [ruleId]);
|
||||
logger.debug(`${FILE_NAME}: Params query returned ${paramsAsync ? paramsAsync.length : 0} results via allAsync`);
|
||||
|
||||
// Check if content is missing
|
||||
if (!rule.content) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} exists but has no content`);
|
||||
rule.content_missing = true;
|
||||
}
|
||||
|
||||
// Get all parameters for this rule with case-insensitive matching
|
||||
try {
|
||||
const params = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
'SELECT param_name, param_value, param_type FROM rule_parameters WHERE LOWER(rule_id) = LOWER(?)',
|
||||
[ruleId],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: Retrieved ${params ? params.length : 0} parameters for rule ${ruleId}`);
|
||||
|
||||
// Validate params is an array
|
||||
if (params && Array.isArray(params)) {
|
||||
// Attach parameters to the rule object
|
||||
rule.parameters = {};
|
||||
|
||||
for (const param of params) {
|
||||
if (param && param.param_name) {
|
||||
// Convert value based on type
|
||||
let value = param.param_value;
|
||||
|
||||
if (param.param_type === 'object' || param.param_type === 'array') {
|
||||
try {
|
||||
value = JSON.parse(param.param_value);
|
||||
} catch (parseError) {
|
||||
logger.warn(`${FILE_NAME}: Failed to parse JSON for parameter ${param.param_name}: ${parseError.message}`);
|
||||
}
|
||||
} else if (param.param_type === 'boolean') {
|
||||
value = param.param_value === 'true';
|
||||
} else if (param.param_type === 'number') {
|
||||
value = Number(param.param_value);
|
||||
}
|
||||
|
||||
rule.parameters[param.param_name] = value;
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Successfully processed ${Object.keys(rule.parameters).length} parameters for rule ${ruleId}`);
|
||||
} else {
|
||||
logger.warn(`${FILE_NAME}: Parameters for rule ${ruleId} not available or not iterable`);
|
||||
rule.parameters = {};
|
||||
}
|
||||
} catch (paramError) {
|
||||
logger.error(`${FILE_NAME}: Error fetching parameters for rule ${ruleId}: ${paramError.message}`);
|
||||
logger.debug(`${FILE_NAME}: Parameter error stack: ${paramError.stack}`);
|
||||
rule.parameters = {};
|
||||
}
|
||||
|
||||
return rule;
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error finding rule ${ruleId}: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return null;
|
||||
} finally {
|
||||
// Close the database connection if it was opened
|
||||
if (db && typeof db.close === 'function') {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after rule lookup`);
|
||||
} catch (closeError) {
|
||||
logger.warn(`${FILE_NAME}: Error closing database connection: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for Sigma rules by keyword in rule titles
|
||||
* Performs a case-insensitive search and returns matching rules with pagination
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} limit - Maximum number of results to return (default: 10)
|
||||
* @param {number} offset - Number of results to skip (for pagination, default: 0)
|
||||
* @returns {Promise<Object>} Object with results array and total count
|
||||
*/
|
||||
async function searchRules(keyword, limit = 10, offset = 0) {
|
||||
if (!keyword) {
|
||||
logger.warn(`${FILE_NAME}: Empty search keyword provided`);
|
||||
return { results: [], totalCount: 0 };
|
||||
}
|
||||
|
||||
// Sanitize keyword to prevent SQL injection
|
||||
const sanitizedKeyword = keyword.replace(/'/g, "''");
|
||||
logger.info(`${FILE_NAME}: Searching for rules with keyword in title: ${sanitizedKeyword} (limit: ${limit}, offset: ${offset})`);
|
||||
|
||||
let db;
|
||||
try {
|
||||
// Make sure we properly await the DB connection
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Database connection established for search`);
|
||||
|
||||
// First get the total count of matching rules (for pagination info)
|
||||
const countQuery = `
|
||||
SELECT COUNT(*) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'title'
|
||||
AND INSTR(LOWER(param_value), LOWER(?)) > 0
|
||||
`;
|
||||
|
||||
const countResult = await new Promise((resolve, reject) => {
|
||||
db.get(countQuery, [sanitizedKeyword], (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Count query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row || { count: 0 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const totalCount = countResult.count;
|
||||
logger.debug(`${FILE_NAME}: Total matching rules for "${sanitizedKeyword}": ${totalCount}`);
|
||||
|
||||
// Use parameterized query instead of string interpolation for better security
|
||||
const instrQuery = `
|
||||
SELECT rule_id, param_value AS title
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'title'
|
||||
AND INSTR(LOWER(param_value), LOWER(?)) > 0
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
|
||||
const results = await new Promise((resolve, reject) => {
|
||||
db.all(instrQuery, [sanitizedKeyword, limit, offset], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Search query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.debug(`${FILE_NAME}: Search query returned ${rows ? rows.length : 0} results`);
|
||||
resolve(rows || []);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
logger.debug(`${FILE_NAME}: Search results page for keyword "${sanitizedKeyword}": ${results.length} matches (page ${Math.floor(offset / limit) + 1})`);
|
||||
|
||||
return {
|
||||
results: results.map(r => ({ id: r.rule_id, title: r.title })),
|
||||
totalCount
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in search operation: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Search error stack: ${error.stack}`);
|
||||
return { results: [], totalCount: 0 };
|
||||
} finally {
|
||||
// Make sure we properly close the connection
|
||||
if (db) {
|
||||
try {
|
||||
await new Promise((resolve) => db.close(() => resolve()));
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after search operation`);
|
||||
} catch (closeError) {
|
||||
logger.error(`${FILE_NAME}: Error closing database connection after search: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Debug function to retrieve detailed information about a rule's content
|
||||
* Useful for diagnosing issues with rule retrieval and content parsing
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to debug
|
||||
* @returns {Promise<Object|null>} Object containing debug information or null on error
|
||||
*/
|
||||
async function debugRuleContent(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot debug rule: Missing rule ID`);
|
||||
return null;
|
||||
}
|
||||
|
||||
let db;
|
||||
try {
|
||||
db = await getDbConnection();
|
||||
|
||||
const absolutePath = path.resolve(DB_PATH);
|
||||
logger.debug(`${FILE_NAME}: Debug function connecting to DB at path: ${absolutePath}`);
|
||||
|
||||
// Get raw rule record
|
||||
const rule = await db.get('SELECT id, file_path, length(content) as content_length, typeof(content) as content_type FROM sigma_rules WHERE id = ?', [ruleId]);
|
||||
|
||||
if (!rule) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} not found during debug operation`);
|
||||
return { error: 'Rule not found', ruleId };
|
||||
}
|
||||
|
||||
// Return just the rule information without the undefined variables
|
||||
return {
|
||||
rule,
|
||||
ruleId
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Debug error for rule ${ruleId}: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Debug error stack: ${error.stack}`);
|
||||
return {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
ruleId
|
||||
};
|
||||
} finally {
|
||||
if (db) {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after debug operation`);
|
||||
} catch (closeError) {
|
||||
logger.warn(`${FILE_NAME}: Error closing database after debug: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw YAML content of a Sigma rule
|
||||
* Retrieves the content field from the database which should contain YAML
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule
|
||||
* @returns {Promise<Object>} Object with success flag and content or error message
|
||||
*/
|
||||
async function getRuleYamlContent(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot get YAML content: Missing rule ID`);
|
||||
return { success: false, message: 'Missing rule ID' };
|
||||
}
|
||||
|
||||
let db;
|
||||
try {
|
||||
logger.info(`${FILE_NAME}: Fetching YAML content for rule: ${ruleId}`);
|
||||
logger.debug(`${FILE_NAME}: Rule ID type: ${typeof ruleId}, length: ${ruleId.length}`);
|
||||
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Connected to database for YAML retrieval`);
|
||||
|
||||
// Debug query before execution
|
||||
const debugResult = await db.get('SELECT id, typeof(content) as content_type, length(content) as content_length FROM sigma_rules WHERE id = ?', [ruleId]);
|
||||
logger.debug(`${FILE_NAME}: Debug query result: ${JSON.stringify(debugResult || 'not found')}`);
|
||||
|
||||
if (!debugResult) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} not found in debug query`);
|
||||
return { success: false, message: 'Rule not found' };
|
||||
}
|
||||
|
||||
// Get actual content
|
||||
const rule = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT content FROM sigma_rules WHERE id = ?', [ruleId], (err, row) => {
|
||||
if (err) {
|
||||
logger.error(`${FILE_NAME}: Content query error: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(row || null);
|
||||
}
|
||||
});
|
||||
});
|
||||
logger.debug(`${FILE_NAME}: Content query result for ${ruleId}: ${rule ? 'Found' : 'Not found'}`);
|
||||
|
||||
if (!rule) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} not found in content query`);
|
||||
return { success: false, message: 'Rule not found' };
|
||||
}
|
||||
|
||||
if (!rule.content) {
|
||||
logger.warn(`${FILE_NAME}: Rule with ID ${ruleId} found but has no content`);
|
||||
return { success: false, message: 'Rule found but content is empty' };
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Content retrieved successfully for ${ruleId}, type: ${typeof rule.content}, length: ${rule.content.length}`);
|
||||
|
||||
return { success: true, content: rule.content };
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error retrieving YAML content for ${ruleId}: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: YAML retrieval error stack: ${error.stack}`);
|
||||
return { success: false, message: `Error retrieving YAML: ${error.message}` };
|
||||
} finally {
|
||||
if (db) {
|
||||
try {
|
||||
await db.close();
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after YAML retrieval`);
|
||||
} catch (closeError) {
|
||||
logger.warn(`${FILE_NAME}: Error closing database after YAML retrieval: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics about Sigma rules in the database
|
||||
* Collects counts, categories, and other aggregate information
|
||||
*
|
||||
* @returns {Promise<Object>} Object with various statistics about the rules
|
||||
*/
|
||||
async function getStatsFromDatabase() {
|
||||
let db;
|
||||
try {
|
||||
db = await getDbConnection();
|
||||
logger.debug(`${FILE_NAME}: Connected to database for statistics`);
|
||||
|
||||
// Get total rule count
|
||||
const totalRulesResult = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT COUNT(*) as count FROM sigma_rules', (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const totalRules = totalRulesResult.count;
|
||||
|
||||
// Get last update time
|
||||
const lastUpdateResult = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT MAX(date) as last_update FROM sigma_rules', (err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const lastUpdate = lastUpdateResult.last_update;
|
||||
|
||||
// Get rules by log source count (Windows, Linux, macOS)
|
||||
const windowsRulesResult = await new Promise((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT COUNT(DISTINCT rule_id) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'logsource' AND
|
||||
param_value LIKE '%"product":"windows"%'`,
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const windowsRules = windowsRulesResult.count || 0;
|
||||
|
||||
const linuxRulesResult = await new Promise((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT COUNT(DISTINCT rule_id) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'logsource' AND
|
||||
param_value LIKE '%"product":"linux"%'`,
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const linuxRules = linuxRulesResult.count || 0;
|
||||
|
||||
const macosRulesResult = await new Promise((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT COUNT(DISTINCT rule_id) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'logsource' AND
|
||||
param_value LIKE '%"product":"macos"%'`,
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const macosRules = macosRulesResult.count || 0;
|
||||
|
||||
// Get rules by severity level
|
||||
const severityStats = await new Promise((resolve, reject) => {
|
||||
db.all(`
|
||||
SELECT param_value AS level, COUNT(DISTINCT rule_id) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'level'
|
||||
GROUP BY param_value
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN param_value = 'critical' THEN 1
|
||||
WHEN param_value = 'high' THEN 2
|
||||
WHEN param_value = 'medium' THEN 3
|
||||
WHEN param_value = 'low' THEN 4
|
||||
WHEN param_value = 'informational' THEN 5
|
||||
ELSE 6
|
||||
END`,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
});
|
||||
});
|
||||
|
||||
// Get top 5 rule authors
|
||||
const topAuthors = await new Promise((resolve, reject) => {
|
||||
db.all(`
|
||||
SELECT param_value AS author, COUNT(*) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'author'
|
||||
GROUP BY param_value
|
||||
ORDER BY count DESC
|
||||
LIMIT 5`,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
});
|
||||
});
|
||||
|
||||
// Get empty content count (rules with missing YAML)
|
||||
const emptyContentResult = await new Promise((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM sigma_rules
|
||||
WHERE content IS NULL OR content = ''`,
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
});
|
||||
});
|
||||
const emptyContentCount = emptyContentResult.count;
|
||||
|
||||
// Get MITRE ATT&CK tactics statistics
|
||||
const mitreStats = await new Promise((resolve, reject) => {
|
||||
db.all(`
|
||||
SELECT param_value AS tag, COUNT(DISTINCT rule_id) as count
|
||||
FROM rule_parameters
|
||||
WHERE param_name = 'tags' AND param_value LIKE 'attack.%'
|
||||
GROUP BY param_value
|
||||
ORDER BY count DESC
|
||||
LIMIT 10`,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
});
|
||||
});
|
||||
|
||||
// Format MITRE tactics for display
|
||||
const formattedMitreTactics = mitreStats.map(item => {
|
||||
const tactic = item.tag.substring(7); // Remove 'attack.' prefix
|
||||
return {
|
||||
tactic: tactic,
|
||||
count: item.count
|
||||
};
|
||||
});
|
||||
|
||||
// Compile all statistics
|
||||
const stats = {
|
||||
totalRules,
|
||||
lastUpdate,
|
||||
operatingSystems: {
|
||||
windows: windowsRules,
|
||||
linux: linuxRules,
|
||||
macos: macosRules,
|
||||
other: totalRules - (windowsRules + linuxRules + macosRules)
|
||||
},
|
||||
severityLevels: severityStats.map(s => ({ level: s.level, count: s.count })),
|
||||
topAuthors: topAuthors.map(a => ({ name: a.author, count: a.count })),
|
||||
databaseHealth: {
|
||||
emptyContentCount,
|
||||
contentPercentage: totalRules > 0 ? Math.round(((totalRules - emptyContentCount) / totalRules) * 100) : 0
|
||||
},
|
||||
mitreTactics: formattedMitreTactics
|
||||
};
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stats
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error retrieving statistics: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error retrieving statistics: ${error.message}`
|
||||
};
|
||||
} finally {
|
||||
if (db) {
|
||||
try {
|
||||
await new Promise((resolve) => db.close(() => resolve()));
|
||||
logger.debug(`${FILE_NAME}: Database connection closed after statistics retrieval`);
|
||||
} catch (closeError) {
|
||||
logger.warn(`${FILE_NAME}: Error closing database: ${closeError.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllRuleIds,
|
||||
findRuleById,
|
||||
searchRules,
|
||||
debugRuleContent,
|
||||
getRuleYamlContent,
|
||||
getStatsFromDatabase
|
||||
};
|
42
src/utils/error_handler.js
Normal file
42
src/utils/error_handler.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
/**
|
||||
* error_handler.js
|
||||
*
|
||||
* Provides standardized error handling for Slack responses
|
||||
*/
|
||||
const logger = require('./logger');
|
||||
|
||||
/**
|
||||
* Handle errors consistently across handlers
|
||||
*
|
||||
* @param {Error} error - The error that occurred
|
||||
* @param {string} source - Source file/function where error occurred
|
||||
* @param {Function} respond - Slack respond function
|
||||
* @param {Object} options - Additional options
|
||||
* @param {boolean} options.replaceOriginal - Whether to replace original message
|
||||
* @param {string} options.responseType - Response type (ephemeral/in_channel)
|
||||
* @param {string} options.customMessage - Custom message to display instead of error
|
||||
*/
|
||||
const handleError = async (error, source, respond, options = {}) => {
|
||||
const {
|
||||
replaceOriginal = false,
|
||||
responseType = 'ephemeral',
|
||||
customMessage = null
|
||||
} = options;
|
||||
|
||||
// Log the error with consistent format
|
||||
logger.error(`${source}: ${error.message}`);
|
||||
logger.debug(`${source}: Error stack: ${error.stack}`);
|
||||
|
||||
// Respond to user with appropriate message
|
||||
const displayMessage = customMessage || `An unexpected error occurred: ${error.message}`;
|
||||
|
||||
await respond({
|
||||
text: displayMessage,
|
||||
replace_original: replaceOriginal,
|
||||
response_type: responseType
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleError
|
||||
};
|
10
src/utils/file_utils.js
Normal file
10
src/utils/file_utils.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
// file_utils.js
|
||||
const path = require('path');
|
||||
|
||||
function getFileName(filePath) {
|
||||
return path.basename(filePath);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getFileName
|
||||
};
|
0
src/utils/formatters.js
Normal file
0
src/utils/formatters.js
Normal file
103
src/utils/logger.js
Normal file
103
src/utils/logger.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
/**
|
||||
* logger.js
|
||||
* Handles logging functionality
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { LOGGING_CONFIG } = require('../config/appConfig');
|
||||
|
||||
// Define log levels and their priority (higher number = higher priority)
|
||||
const LOG_LEVELS = {
|
||||
DEBUG: 1,
|
||||
INFO: 2,
|
||||
WARN: 3,
|
||||
ERROR: 4
|
||||
};
|
||||
|
||||
// Get configured log level from config, default to INFO if not specified
|
||||
const configuredLevel = (LOGGING_CONFIG?.level || 'info').toUpperCase();
|
||||
const configuredLevelValue = LOG_LEVELS[configuredLevel] || LOG_LEVELS.INFO;
|
||||
|
||||
// Ensure logs directory exists
|
||||
const LOGS_DIR = path.join(__dirname, '..', '..', 'logs');
|
||||
if (!fs.existsSync(LOGS_DIR)) {
|
||||
fs.mkdirSync(LOGS_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Use log file from config if available, otherwise use default
|
||||
const LOG_FILE = LOGGING_CONFIG?.file
|
||||
? path.resolve(path.join(__dirname, '..', '..'), LOGGING_CONFIG.file)
|
||||
: path.join(LOGS_DIR, 'fylgja.log');
|
||||
|
||||
// Create logger object
|
||||
const logger = {
|
||||
/**
|
||||
* Internal method to write log entry to file and console if level meets threshold
|
||||
* @param {string} level - Log level (DEBUG, INFO, WARN, ERROR)
|
||||
* @param {string} message - Log message to write
|
||||
*/
|
||||
_writeToFile: (level, message) => {
|
||||
// Check if this log level should be displayed based on configured level
|
||||
const levelValue = LOG_LEVELS[level] || 0;
|
||||
|
||||
if (levelValue >= configuredLevelValue) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logEntry = `${timestamp} ${level}: ${message}\n`;
|
||||
|
||||
// Append to log file
|
||||
try {
|
||||
fs.appendFileSync(LOG_FILE, logEntry);
|
||||
} catch (err) {
|
||||
console.error(`Failed to write to log file: ${err.message}`);
|
||||
}
|
||||
|
||||
// Also log to console with appropriate method
|
||||
switch (level) {
|
||||
case 'ERROR':
|
||||
console.error(logEntry.trim());
|
||||
break;
|
||||
case 'WARN':
|
||||
console.warn(logEntry.trim());
|
||||
break;
|
||||
case 'DEBUG':
|
||||
console.debug(logEntry.trim());
|
||||
break;
|
||||
case 'INFO':
|
||||
default:
|
||||
console.info(logEntry.trim());
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Log an info message
|
||||
* @param {string} message - Message to log
|
||||
*/
|
||||
info: (message) => logger._writeToFile('INFO', message),
|
||||
|
||||
/**
|
||||
* Log an error message
|
||||
* @param {string} message - Message to log
|
||||
*/
|
||||
error: (message) => logger._writeToFile('ERROR', message),
|
||||
|
||||
/**
|
||||
* Log a warning message
|
||||
* @param {string} message - Message to log
|
||||
*/
|
||||
warn: (message) => logger._writeToFile('WARN', message),
|
||||
|
||||
/**
|
||||
* Log a debug message
|
||||
* @param {string} message - Message to log
|
||||
*/
|
||||
debug: (message) => logger._writeToFile('DEBUG', message),
|
||||
|
||||
/**
|
||||
* Get the current log level
|
||||
* @returns {string} Current log level
|
||||
*/
|
||||
getLogLevel: () => configuredLevel
|
||||
};
|
||||
|
||||
module.exports = logger;
|
0
src/utils/validators.js
Normal file
0
src/utils/validators.js
Normal file
Loading…
Add table
Add a link
Reference in a new issue