first commit
This commit is contained in:
commit
7988853b57
43 changed files with 8415 additions and 0 deletions
153
src/services/sigma/sigma_backend_converter.js
Normal file
153
src/services/sigma/sigma_backend_converter.js
Normal file
|
@ -0,0 +1,153 @@
|
|||
/**
|
||||
* sigma_backend_converter.js
|
||||
*
|
||||
* Service for converting Sigma rules to various backend SIEM formats
|
||||
* Uses the sigma-cli tool for conversion operations
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { execSync } = require('child_process');
|
||||
const logger = require('../../utils/logger');
|
||||
const { SIGMA_CLI_PATH, SIGMA_CLI_CONFIG } = require('../../config/appConfig');
|
||||
const { convertSigmaRule } = require('./sigma_converter_service');
|
||||
const { getRuleYamlContent } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Convert a Sigma rule to a specific backend format using the sigma-cli
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to convert
|
||||
* @param {Object} config - Configuration for the conversion
|
||||
* @param {string} config.backend - Target backend (default from YAML config)
|
||||
* @param {string} config.target - Query target (default from YAML config)
|
||||
* @param {string} config.format - Output format (default from YAML config)
|
||||
* @returns {Promise<Object>} Conversion result with output or error
|
||||
*/
|
||||
async function convertRuleToBackend(ruleId, config = {}) {
|
||||
try {
|
||||
// Validate configuration and set defaults from YAML config
|
||||
const backend = config.backend || SIGMA_CLI_CONFIG.backend;
|
||||
const target = config.target || SIGMA_CLI_CONFIG.target;
|
||||
const format = config.format || SIGMA_CLI_CONFIG.format;
|
||||
|
||||
logger.info(`${FILE_NAME}: Converting rule ${ruleId} using backend: ${backend}, target: ${target}, format: ${format}`);
|
||||
|
||||
// Verify sigma-cli path
|
||||
if (!fs.existsSync(SIGMA_CLI_PATH)) {
|
||||
logger.error(`${FILE_NAME}: Sigma CLI not found at path: ${SIGMA_CLI_PATH}`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Sigma CLI tool not found'
|
||||
};
|
||||
}
|
||||
|
||||
// Get the rule YAML content
|
||||
const yamlResult = await getRuleYamlContent(ruleId);
|
||||
if (!yamlResult.success || !yamlResult.content) {
|
||||
logger.warn(`${FILE_NAME}: Failed to retrieve YAML for rule ${ruleId}: ${yamlResult.message || 'No content'}`);
|
||||
return {
|
||||
success: false,
|
||||
message: yamlResult.message || 'Failed to retrieve rule content'
|
||||
};
|
||||
}
|
||||
|
||||
// Save the YAML to a temporary file
|
||||
const tempDir = os.tmpdir();
|
||||
const tempFilePath = path.join(tempDir, `sigma_rule_${ruleId}_${Date.now()}.yml`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Writing rule YAML to temp file: ${tempFilePath}`);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(tempFilePath, yamlResult.content);
|
||||
} catch (fileError) {
|
||||
logger.error(`${FILE_NAME}: Error writing temporary file: ${fileError.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error preparing rule for conversion: ${fileError.message}`
|
||||
};
|
||||
}
|
||||
|
||||
// Build the sigma-cli command
|
||||
// Command syntax: sigma convert -t "$backend" -p "$target" -f "$format"
|
||||
const command = `"${SIGMA_CLI_PATH}" convert -t ${backend} -p ${target} -f ${format} "${tempFilePath}"`;
|
||||
|
||||
// Execute the command
|
||||
logger.debug(`${FILE_NAME}: Executing sigma-cli command: ${command}`);
|
||||
let result;
|
||||
|
||||
try {
|
||||
result = execSync(command, { encoding: 'utf8' });
|
||||
} catch (execError) {
|
||||
logger.error(`${FILE_NAME}: Sigma-cli execution error: ${execError.message}`);
|
||||
|
||||
// Clean up temporary file
|
||||
try {
|
||||
fs.unlinkSync(tempFilePath);
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`${FILE_NAME}: Error removing temporary file: ${cleanupError.message}`);
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error during rule conversion: ${execError.message}`
|
||||
};
|
||||
}
|
||||
|
||||
// Clean up temporary file
|
||||
try {
|
||||
fs.unlinkSync(tempFilePath);
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`${FILE_NAME}: Error removing temporary file: ${cleanupError.message}`);
|
||||
}
|
||||
|
||||
// Get rule metadata for context
|
||||
const ruleData = await convertSigmaRule(ruleId);
|
||||
|
||||
if (!ruleData.success || !ruleData.rule) {
|
||||
logger.warn(`${FILE_NAME}: Failed to get metadata for rule ${ruleId}`);
|
||||
|
||||
// return the conversion output
|
||||
return {
|
||||
success: true,
|
||||
output: result.trim(),
|
||||
rule: {
|
||||
id: ruleId,
|
||||
title: 'Unknown Rule',
|
||||
description: 'Rule metadata could not be retrieved'
|
||||
},
|
||||
conversionDetails: {
|
||||
backend,
|
||||
target,
|
||||
format
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Return the output with rule metadata
|
||||
return {
|
||||
success: true,
|
||||
output: result.trim(),
|
||||
rule: ruleData.rule,
|
||||
conversionDetails: {
|
||||
backend,
|
||||
target,
|
||||
format
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error converting rule ${ruleId} to backend: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error converting rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convertRuleToBackend
|
||||
};
|
422
src/services/sigma/sigma_converter_service.js
Normal file
422
src/services/sigma/sigma_converter_service.js
Normal file
|
@ -0,0 +1,422 @@
|
|||
//
|
||||
// sigma_converter_service.js
|
||||
// converts Sigma rules to a structured object
|
||||
//
|
||||
const logger = require('../../utils/logger');
|
||||
const yaml = require('js-yaml');
|
||||
const { findRuleById } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Convert a Sigma rule to a structured object
|
||||
* Can be called with either a rule object or a rule ID
|
||||
*
|
||||
* @param {Object|String} input - Either a raw rule object or a rule ID
|
||||
* @param {Object} [config] - Optional configuration
|
||||
* @returns {Promise<Object>} Converted rule or result object
|
||||
*/
|
||||
async function convertSigmaRule(input, config = null) {
|
||||
// Check if we're dealing with a rule ID (string)
|
||||
if (typeof input === 'string') {
|
||||
try {
|
||||
const ruleId = input;
|
||||
logger.info(`Converting rule by ID: ${ruleId}`);
|
||||
// Find the rule in database
|
||||
const rawRule = await findRuleById(ruleId);
|
||||
if (!rawRule) {
|
||||
logger.warn(`Rule with ID ${ruleId} not found`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Rule with ID ${ruleId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
// Debug: Log what we found
|
||||
logger.debug(`Retrieved rule ${ruleId} from database: content ${rawRule.content ? 'present' : 'missing'}, parameters ${rawRule.parameters ? Object.keys(rawRule.parameters).length : 0}`);
|
||||
|
||||
// Check if content is missing (flag set by findRuleById)
|
||||
if (rawRule.content_missing || !rawRule.content) {
|
||||
logger.warn(`Rule with ID ${ruleId} has missing content, attempting to build from parameters`);
|
||||
|
||||
// Try to build from parameters
|
||||
if (rawRule.parameters && Object.keys(rawRule.parameters).length > 0) {
|
||||
const builtRule = buildRuleFromParameters(rawRule);
|
||||
|
||||
if (builtRule) {
|
||||
logger.info(`Successfully built rule ${ruleId} from parameters`);
|
||||
return {
|
||||
success: true,
|
||||
rule: builtRule,
|
||||
built_from_parameters: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn(`Could not build rule ${ruleId} from parameters, returning placeholder`);
|
||||
return {
|
||||
success: true,
|
||||
rule: {
|
||||
id: ruleId,
|
||||
title: 'Rule Found But Content Missing',
|
||||
description: `The rule with ID ${ruleId} exists in the database, but its content field is empty. This may indicate a problem with the rule import process.`,
|
||||
author: 'Unknown',
|
||||
level: 'unknown',
|
||||
status: 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: 'Content missing' },
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
file_path: rawRule.file_path || 'unknown'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Process the raw rule
|
||||
const processedRule = processRuleContent(rawRule);
|
||||
if (!processedRule) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to process rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`Processing rule content for ${rawRule.id}:`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
rule: processedRule
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Error converting rule by ID: ${error.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error converting rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
if (!input) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'No rule data provided'
|
||||
};
|
||||
}
|
||||
|
||||
// Check for missing content
|
||||
if (!input.content) {
|
||||
logger.warn('Rule object has missing content, attempting to build from parameters');
|
||||
|
||||
// Try to build from parameters
|
||||
if (input.parameters && Object.keys(input.parameters).length > 0) {
|
||||
const builtRule = buildRuleFromParameters(input);
|
||||
|
||||
if (builtRule) {
|
||||
logger.info(`Successfully built rule ${input.id} from parameters`);
|
||||
return {
|
||||
success: true,
|
||||
rule: builtRule,
|
||||
built_from_parameters: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn(`Could not build rule from parameters, returning placeholder`);
|
||||
return {
|
||||
success: true,
|
||||
rule: {
|
||||
id: input.id || 'unknown',
|
||||
title: 'Rule Found But Content Missing',
|
||||
description: 'The rule exists in the database, but its content field is empty. This may indicate a problem with the rule import process.',
|
||||
author: 'Unknown',
|
||||
level: 'unknown',
|
||||
status: 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: 'Content missing' },
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
file_path: input.file_path || 'unknown'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const processedRule = processRuleContent(input);
|
||||
if (!processedRule) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Failed to process rule object'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
rule: processedRule
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Error processing rule object: ${error.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error processing rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process rule content into a structured object
|
||||
* @param {Object} rawRule - The raw rule object
|
||||
* @returns {Object|null} Processed rule object
|
||||
*/
|
||||
function processRuleContent(rawRule) {
|
||||
if (!rawRule) {
|
||||
logger.warn('Cannot convert rule: rule object is null');
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!rawRule.content) {
|
||||
logger.warn('Cannot convert rule: missing content in rule data');
|
||||
|
||||
// Check if we have parameters and try to build from them
|
||||
if (rawRule.parameters && Object.keys(rawRule.parameters).length > 0) {
|
||||
logger.info(`Attempting to build rule ${rawRule.id} from parameters`);
|
||||
return buildRuleFromParameters(rawRule);
|
||||
}
|
||||
|
||||
return {
|
||||
id: rawRule.id || 'unknown',
|
||||
title: 'Error: Missing Rule Content',
|
||||
description: 'The rule content could not be found in the database. This may indicate a problem with the rule import process or a corruption in the database.',
|
||||
level: 'unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
falsepositives: ['N/A - Content missing'],
|
||||
tags: ['error', 'missing-content'],
|
||||
references: [],
|
||||
detection: { condition: 'Content missing' }
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Parse the YAML content
|
||||
let parsedRule;
|
||||
try {
|
||||
// Log the content for debugging
|
||||
logger.debug(`Parsing YAML content for rule ${rawRule.id}, content length: ${rawRule.content.length}`);
|
||||
|
||||
// Try different YAML parsing approaches
|
||||
try {
|
||||
parsedRule = yaml.load(rawRule.content);
|
||||
} catch (yamlError) {
|
||||
logger.warn(`Standard YAML parsing failed for ${rawRule.id}: ${yamlError.message}`);
|
||||
|
||||
// Try with more tolerant parsing
|
||||
try {
|
||||
// Try multi-document loading
|
||||
const docs = [];
|
||||
yaml.loadAll(rawRule.content, (doc) => {
|
||||
if (doc) docs.push(doc);
|
||||
});
|
||||
|
||||
if (docs.length > 0) {
|
||||
parsedRule = docs[0]; // Take the first document
|
||||
logger.debug(`Multi-document YAML parsing succeeded for ${rawRule.id}, found ${docs.length} documents`);
|
||||
} else {
|
||||
throw new Error('No documents found in multi-document parse');
|
||||
}
|
||||
} catch (multiError) {
|
||||
logger.warn(`Multi-document YAML parsing failed for ${rawRule.id}: ${multiError.message}`);
|
||||
|
||||
// Last resort: manual extraction of key fields
|
||||
parsedRule = extractFieldsManually(rawRule.content, rawRule.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (!parsedRule) {
|
||||
logger.warn(`Rule parsing resulted in null object for ID: ${rawRule.id}`);
|
||||
parsedRule = {};
|
||||
}
|
||||
} catch (yamlError) {
|
||||
logger.error(`YAML parsing error: ${yamlError.message}`);
|
||||
logger.debug(`Problematic content (first 200 chars): ${rawRule.content.substring(0, 200)}`);
|
||||
parsedRule = {};
|
||||
}
|
||||
|
||||
// Create a new object combining database fields and parsed content
|
||||
const convertedRule = {
|
||||
id: rawRule.id || parsedRule.id || 'unknown',
|
||||
title: parsedRule.title || 'Untitled Rule',
|
||||
description: parsedRule.description || 'No description provided',
|
||||
author: parsedRule.author || 'Unknown',
|
||||
level: parsedRule.level || 'unknown',
|
||||
status: parsedRule.status || 'unknown',
|
||||
logsource: parsedRule.logsource || {},
|
||||
detection: parsedRule.detection || {},
|
||||
falsepositives: parsedRule.falsepositives || [],
|
||||
tags: parsedRule.tags || [],
|
||||
references: parsedRule.references || [],
|
||||
file_path: rawRule.file_path || 'unknown'
|
||||
};
|
||||
|
||||
logger.info(`Successfully converted rule ${convertedRule.id}`);
|
||||
return convertedRule;
|
||||
} catch (error) {
|
||||
logger.error(`Error parsing rule: ${error.message}`);
|
||||
return {
|
||||
id: rawRule.id || 'unknown',
|
||||
title: 'Error: Could not parse rule',
|
||||
description: `Error parsing rule: ${error.message}`,
|
||||
level: 'unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
falsepositives: [],
|
||||
tags: ['error', 'parse-error'],
|
||||
references: [],
|
||||
detection: { condition: 'Parse error' }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual extraction of key fields from YAML content when parsing fails
|
||||
* @param {string} content - The raw YAML content
|
||||
* @param {string} ruleId - The rule ID
|
||||
* @returns {Object} Extracted fields
|
||||
*/
|
||||
function extractFieldsManually(content, ruleId) {
|
||||
logger.debug(`Attempting manual field extraction for rule ${ruleId}`);
|
||||
|
||||
const result = {
|
||||
id: ruleId
|
||||
};
|
||||
|
||||
// Simple regex patterns to extract common fields
|
||||
const patterns = {
|
||||
title: /title:\s*(.+)$/m,
|
||||
description: /description:\s*(.+)$/m,
|
||||
author: /author:\s*(.+)$/m,
|
||||
level: /level:\s*(.+)$/m,
|
||||
status: /status:\s*(.+)$/m
|
||||
};
|
||||
|
||||
// Extract fields using regex
|
||||
Object.entries(patterns).forEach(([field, pattern]) => {
|
||||
const match = content.match(pattern);
|
||||
if (match && match[1]) {
|
||||
result[field] = match[1].trim();
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug(`Manual extraction found ${Object.keys(result).length - 1} fields for rule ${ruleId}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a rule object from parameters when content is missing
|
||||
* @param {Object} rawRule - The raw rule object with parameters
|
||||
* @returns {Object} Reconstructed rule object
|
||||
*/
|
||||
function buildRuleFromParameters(rawRule) {
|
||||
logger.info(`Building rule ${rawRule.id} from parameters`);
|
||||
|
||||
if (!rawRule || !rawRule.parameters) {
|
||||
logger.warn(`Cannot build rule: missing parameters for rule ${rawRule ? rawRule.id : 'unknown'}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.debug(`Found ${Object.keys(rawRule.parameters).length} parameters for rule ${rawRule.id}`);
|
||||
|
||||
// Initialize a new rule object with essential properties
|
||||
const reconstructedRule = {
|
||||
id: rawRule.id,
|
||||
title: rawRule.parameters.title || 'Unknown Title',
|
||||
description: rawRule.parameters.description || 'No description available',
|
||||
author: rawRule.parameters.author || 'Unknown',
|
||||
file_path: rawRule.file_path || 'unknown',
|
||||
level: rawRule.parameters.level || 'unknown',
|
||||
status: rawRule.parameters.status || 'unknown',
|
||||
logsource: {},
|
||||
detection: { condition: rawRule.parameters['detection.condition'] || 'unknown' },
|
||||
falsepositives: [],
|
||||
tags: [],
|
||||
references: []
|
||||
};
|
||||
|
||||
// Process parameters to rebuild nested objects
|
||||
Object.entries(rawRule.parameters).forEach(([key, value]) => {
|
||||
// Handle array parameters
|
||||
if (key === 'falsepositives' || key === 'tags' || key === 'references') {
|
||||
if (Array.isArray(value)) {
|
||||
reconstructedRule[key] = value;
|
||||
} else if (typeof value === 'string') {
|
||||
// Try to parse JSON string arrays
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
if (Array.isArray(parsed)) {
|
||||
reconstructedRule[key] = parsed;
|
||||
} else {
|
||||
reconstructedRule[key] = [value];
|
||||
}
|
||||
} catch (e) {
|
||||
reconstructedRule[key] = [value];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle logsource properties
|
||||
else if (key.startsWith('logsource.')) {
|
||||
const prop = key.substring('logsource.'.length);
|
||||
reconstructedRule.logsource[prop] = value;
|
||||
}
|
||||
// Handle detection properties
|
||||
else if (key.startsWith('detection.') && key !== 'detection.condition') {
|
||||
const prop = key.substring('detection.'.length);
|
||||
const parts = prop.split('.');
|
||||
|
||||
let current = reconstructedRule.detection;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (!current[parts[i]]) {
|
||||
current[parts[i]] = {};
|
||||
}
|
||||
current = current[parts[i]];
|
||||
}
|
||||
|
||||
current[parts[parts.length - 1]] = value;
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug(`Reconstructed rule structure for ${rawRule.id}: ${JSON.stringify({
|
||||
id: reconstructedRule.id,
|
||||
title: reconstructedRule.title,
|
||||
fields: Object.keys(reconstructedRule)
|
||||
})}`);
|
||||
|
||||
return reconstructedRule;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a readable condition string from a rule
|
||||
* @param {Object} rule - The converted rule object
|
||||
* @returns {String} Human-readable condition
|
||||
*/
|
||||
function extractDetectionCondition(rule) {
|
||||
if (!rule) {
|
||||
return 'No rule data available';
|
||||
}
|
||||
|
||||
if (!rule.detection) {
|
||||
return 'No detection information available';
|
||||
}
|
||||
|
||||
if (!rule.detection.condition) {
|
||||
return 'No condition specified';
|
||||
}
|
||||
|
||||
return rule.detection.condition;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convertSigmaRule,
|
||||
extractDetectionCondition,
|
||||
buildRuleFromParameters
|
||||
};
|
150
src/services/sigma/sigma_details_service.js
Normal file
150
src/services/sigma/sigma_details_service.js
Normal file
|
@ -0,0 +1,150 @@
|
|||
/**
|
||||
* sigma_details_service.js
|
||||
*
|
||||
* This service provides functionality for retrieving and explaining Sigma rules.
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { convertSigmaRule, extractDetectionCondition } = require('./sigma_converter_service');
|
||||
const { debugRuleContent, getRuleYamlContent } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Explains a Sigma rule by providing a simplified, human-readable format
|
||||
* Performs diagnostics before explanation and handles error cases
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to explain
|
||||
* @returns {Promise<Object>} Result object with success flag and explanation or error message
|
||||
*/
|
||||
async function explainSigmaRule(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot explain rule: Missing rule ID`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing rule ID'
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Running diagnostics for rule: ${ruleId}`);
|
||||
logger.info(`${FILE_NAME}: Explaining rule ${ruleId}`);
|
||||
|
||||
try {
|
||||
// Run diagnostics on the rule content first
|
||||
const diagnosticResult = await debugRuleContent(ruleId);
|
||||
logger.debug(`${FILE_NAME}: Diagnostic result: ${JSON.stringify(diagnosticResult || {})}`);
|
||||
|
||||
// Convert the rule ID to a structured object
|
||||
const conversionResult = await convertSigmaRule(ruleId);
|
||||
if (!conversionResult.success) {
|
||||
logger.warn(`${FILE_NAME}: Failed to convert rule ${ruleId}: ${conversionResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: conversionResult.message || `Failed to parse rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
const rule = conversionResult.rule;
|
||||
|
||||
// Extra safety check
|
||||
if (!rule) {
|
||||
logger.error(`${FILE_NAME}: Converted rule is null for ID ${ruleId}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to process rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
// Create a simplified explanation with safe access to properties
|
||||
const explanation = {
|
||||
id: rule.id || ruleId,
|
||||
title: rule.title || 'Untitled Rule',
|
||||
description: rule.description || 'No description provided',
|
||||
author: rule.author || 'Unknown author',
|
||||
severity: rule.level || 'Unknown',
|
||||
detectionExplanation: extractDetectionCondition(rule),
|
||||
falsePositives: Array.isArray(rule.falsepositives) ? rule.falsepositives :
|
||||
typeof rule.falsepositives === 'string' ? [rule.falsepositives] :
|
||||
['None specified'],
|
||||
tags: Array.isArray(rule.tags) ? rule.tags : [],
|
||||
references: Array.isArray(rule.references) ? rule.references : []
|
||||
};
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully explained rule ${ruleId}`);
|
||||
logger.debug(`${FILE_NAME}: Explanation properties: ${Object.keys(explanation).join(', ')}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
explanation
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error explaining rule: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error explaining rule: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the raw YAML content of a Sigma rule
|
||||
* Retrieves the content from the database
|
||||
*
|
||||
* @param {string} ruleId - The ID of the rule to get YAML for
|
||||
* @returns {Promise<Object>} Result object with success flag and YAML content or error message
|
||||
*/
|
||||
async function getSigmaRuleYaml(ruleId) {
|
||||
if (!ruleId) {
|
||||
logger.warn(`${FILE_NAME}: Cannot get YAML: Missing rule ID`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing rule ID'
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Getting YAML content for rule: ${ruleId}`);
|
||||
|
||||
try {
|
||||
// Get YAML content from database
|
||||
const yamlResult = await getRuleYamlContent(ruleId);
|
||||
|
||||
if (!yamlResult.success) {
|
||||
logger.warn(`${FILE_NAME}: Failed to retrieve YAML for rule ${ruleId}: ${yamlResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: yamlResult.message || `Failed to retrieve YAML for rule with ID ${ruleId}`
|
||||
};
|
||||
}
|
||||
|
||||
// Add extra safety check for content
|
||||
if (!yamlResult.content) {
|
||||
logger.warn(`${FILE_NAME}: YAML content is empty for rule ${ruleId}`);
|
||||
return {
|
||||
success: true,
|
||||
yaml: '',
|
||||
warning: 'YAML content is empty for this rule'
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Successfully retrieved YAML content with length: ${yamlResult.content.length}`);
|
||||
|
||||
// Return the YAML content
|
||||
return {
|
||||
success: true,
|
||||
yaml: yamlResult.content
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error retrieving YAML: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error retrieving YAML: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
explainSigmaRule,
|
||||
getSigmaRuleYaml
|
||||
};
|
188
src/services/sigma/sigma_repository_service.js
Normal file
188
src/services/sigma/sigma_repository_service.js
Normal file
|
@ -0,0 +1,188 @@
|
|||
/**
|
||||
* sigma_repository_service.js
|
||||
*
|
||||
* This service manages the Sigma rule repository and database updates.
|
||||
* It provides functions to clone/update the repository and run the database
|
||||
* initialization script.
|
||||
*/
|
||||
const { spawn } = require('child_process');
|
||||
const util = require('util');
|
||||
const { exec } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { SIGMA_REPO_DIR } = require('../../config/appConfig');
|
||||
const appConfig = require('../../config/appConfig');
|
||||
const logger = require('../../utils/logger');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
// Promisify exec for async/await usage
|
||||
const execPromise = util.promisify(exec);
|
||||
|
||||
/**
|
||||
* Clones or updates the Sigma repository
|
||||
* Creates the repository directory if it doesn't exist
|
||||
*
|
||||
* @returns {Promise<boolean>} Success status of the operation
|
||||
*/
|
||||
async function updateSigmaRepo() {
|
||||
logger.debug(`${FILE_NAME}: Starting Sigma repository update process`);
|
||||
|
||||
try {
|
||||
// Ensure the parent directory exists
|
||||
const parentDir = path.dirname(SIGMA_REPO_DIR);
|
||||
if (!fs.existsSync(parentDir)) {
|
||||
logger.debug(`${FILE_NAME}: Creating parent directory: ${parentDir}`);
|
||||
fs.mkdirSync(parentDir, { recursive: true });
|
||||
}
|
||||
|
||||
if (!fs.existsSync(SIGMA_REPO_DIR)) {
|
||||
logger.info(`${FILE_NAME}: Cloning Sigma repository...`);
|
||||
|
||||
// Read config to get repo URL
|
||||
const repoUrl = appConfig.SIGMA_REPO_CONFIG.url;
|
||||
if (!repoUrl) {
|
||||
throw new Error('Repository URL not found in configuration');
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Using repository URL: ${repoUrl}`);
|
||||
const cloneResult = await execPromise(`git clone ${repoUrl} ${SIGMA_REPO_DIR}`);
|
||||
|
||||
logger.debug(`${FILE_NAME}: Clone output: ${cloneResult.stdout}`);
|
||||
} else {
|
||||
logger.info(`${FILE_NAME}: Updating existing Sigma repository...`);
|
||||
|
||||
// Check if it's actually a git repository
|
||||
if (!fs.existsSync(path.join(SIGMA_REPO_DIR, '.git'))) {
|
||||
logger.warn(`${FILE_NAME}: Directory exists but is not a git repository: ${SIGMA_REPO_DIR}`);
|
||||
throw new Error('Directory exists but is not a git repository');
|
||||
}
|
||||
|
||||
const pullResult = await execPromise(`cd ${SIGMA_REPO_DIR} && git pull`);
|
||||
logger.debug(`${FILE_NAME}: Pull output: ${pullResult.stdout}`);
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Sigma repository is up-to-date`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error updating Sigma repository: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the Sigma database by running the initialization script
|
||||
* Spawns a child process to run the database initialization
|
||||
*
|
||||
* @returns {Promise<boolean>} Success status of the operation
|
||||
*/
|
||||
async function updateSigmaDatabase() {
|
||||
logger.info(`${FILE_NAME}: Starting database update process`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'db', 'init-sigma-db.js');
|
||||
|
||||
// Verify the script exists before trying to run it
|
||||
if (!fs.existsSync(scriptPath)) {
|
||||
logger.error(`${FILE_NAME}: Database initialization script not found at: ${scriptPath}`);
|
||||
reject(new Error(`Database initialization script not found at: ${scriptPath}`));
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Running database update script: ${scriptPath}`);
|
||||
|
||||
const updateProcess = spawn('node', [scriptPath], {
|
||||
stdio: 'pipe' // Capture output instead of inheriting
|
||||
});
|
||||
|
||||
// Capture and log stdout
|
||||
updateProcess.stdout.on('data', (data) => {
|
||||
logger.debug(`${FILE_NAME}: DB Update stdout: ${data.toString().trim()}`);
|
||||
});
|
||||
|
||||
// Capture and log stderr
|
||||
updateProcess.stderr.on('data', (data) => {
|
||||
logger.warn(`${FILE_NAME}: DB Update stderr: ${data.toString().trim()}`);
|
||||
});
|
||||
|
||||
updateProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
logger.info(`${FILE_NAME}: Database update completed successfully`);
|
||||
resolve(true);
|
||||
} else {
|
||||
logger.error(`${FILE_NAME}: Database update failed with exit code ${code}`);
|
||||
reject(new Error(`Update failed with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
|
||||
updateProcess.on('error', (err) => {
|
||||
logger.error(`${FILE_NAME}: Failed to start database update process: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the status of the Sigma repository
|
||||
* Returns information about the repository including last commit
|
||||
*
|
||||
* @returns {Promise<Object>} Repository status information
|
||||
*/
|
||||
async function getSigmaRepoStatus() {
|
||||
logger.debug(`${FILE_NAME}: Checking Sigma repository status`);
|
||||
|
||||
try {
|
||||
if (!fs.existsSync(SIGMA_REPO_DIR)) {
|
||||
logger.warn(`${FILE_NAME}: Sigma repository directory does not exist: ${SIGMA_REPO_DIR}`);
|
||||
return {
|
||||
exists: false,
|
||||
message: 'Repository has not been cloned yet'
|
||||
};
|
||||
}
|
||||
|
||||
// Check if it's a git repository
|
||||
if (!fs.existsSync(path.join(SIGMA_REPO_DIR, '.git'))) {
|
||||
logger.warn(`${FILE_NAME}: Directory exists but is not a git repository: ${SIGMA_REPO_DIR}`);
|
||||
return {
|
||||
exists: true,
|
||||
isRepo: false,
|
||||
message: 'Directory exists but is not a git repository'
|
||||
};
|
||||
}
|
||||
|
||||
// Get last commit info
|
||||
const lastCommitInfo = await execPromise(`cd ${SIGMA_REPO_DIR} && git log -1 --format="%h|%an|%ad|%s"`);
|
||||
const [hash, author, date, subject] = lastCommitInfo.stdout.trim().split('|');
|
||||
|
||||
// Get branch info
|
||||
const branchInfo = await execPromise(`cd ${SIGMA_REPO_DIR} && git branch --show-current`);
|
||||
const currentBranch = branchInfo.stdout.trim();
|
||||
|
||||
return {
|
||||
exists: true,
|
||||
isRepo: true,
|
||||
lastCommit: {
|
||||
hash,
|
||||
author,
|
||||
date,
|
||||
subject
|
||||
},
|
||||
branch: currentBranch,
|
||||
path: SIGMA_REPO_DIR
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error getting repository status: ${error.message}`);
|
||||
return {
|
||||
exists: true,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
updateSigmaRepo,
|
||||
updateSigmaDatabase,
|
||||
getSigmaRepoStatus
|
||||
};
|
214
src/services/sigma/sigma_search_service.js
Normal file
214
src/services/sigma/sigma_search_service.js
Normal file
|
@ -0,0 +1,214 @@
|
|||
/**
|
||||
* sigma_search_service.js
|
||||
*
|
||||
* This service provides functionality for searching Sigma rules by keywords.
|
||||
* It processes search results and returns them in a structured format.
|
||||
* Supports pagination for large result sets.
|
||||
*/
|
||||
const { searchRules } = require('../../sigma_db/sigma_db_queries');
|
||||
const logger = require('../../utils/logger');
|
||||
const { convertSigmaRule } = require('./sigma_converter_service');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Searches for Sigma rules by keyword and processes the results
|
||||
* Returns a structured result object with success status and paginated results
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} page - Page number (1-based index, default: 1)
|
||||
* @param {number} pageSize - Number of results per page (default: 10)
|
||||
* @returns {Promise<Object>} Result object with success flag and processed results with pagination info
|
||||
*/
|
||||
async function searchSigmaRules(keyword, page = 1, pageSize = 10) {
|
||||
if (!keyword || typeof keyword !== 'string') {
|
||||
logger.warn(`${FILE_NAME}: Cannot search rules: Missing or invalid keyword`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Missing or invalid search keyword'
|
||||
};
|
||||
}
|
||||
|
||||
// Validate pagination parameters
|
||||
if (typeof page !== 'number' || page < 1) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page number: ${page}, defaulting to 1`);
|
||||
page = 1;
|
||||
}
|
||||
|
||||
if (typeof pageSize !== 'number' || pageSize < 1 || pageSize > 100) {
|
||||
logger.warn(`${FILE_NAME}: Invalid page size: ${pageSize}, defaulting to 10`);
|
||||
pageSize = 10;
|
||||
}
|
||||
|
||||
// Trim the keyword to prevent accidental whitespace issues
|
||||
const trimmedKeyword = keyword.trim();
|
||||
if (trimmedKeyword.length === 0) {
|
||||
logger.warn(`${FILE_NAME}: Cannot search rules: Empty keyword after trimming`);
|
||||
return {
|
||||
success: false,
|
||||
message: 'Search keyword cannot be empty'
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate the offset based on page number
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
logger.info(`${FILE_NAME}: Searching for Sigma rules with keyword: "${trimmedKeyword}" (page ${page}, size ${pageSize}, offset ${offset})`);
|
||||
|
||||
try {
|
||||
// Pass pageSize and offset to the database query
|
||||
const searchResult = await searchRules(trimmedKeyword, pageSize, offset);
|
||||
|
||||
// Defensive handling of possible return formats
|
||||
let allResults = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Log what we actually received for debugging
|
||||
logger.debug(`${FILE_NAME}: Search result type: ${typeof searchResult}, isArray: ${Array.isArray(searchResult)}`);
|
||||
|
||||
// Handle different possible return formats
|
||||
if (searchResult) {
|
||||
if (Array.isArray(searchResult)) {
|
||||
// Direct array of results
|
||||
allResults = searchResult;
|
||||
logger.debug(`${FILE_NAME}: Received array of ${allResults.length} results`);
|
||||
} else if (typeof searchResult === 'object') {
|
||||
// Object with results property
|
||||
if (Array.isArray(searchResult.results)) {
|
||||
allResults = searchResult.results;
|
||||
totalCount = searchResult.totalCount || 0;
|
||||
logger.debug(`${FILE_NAME}: Received object with ${allResults.length} results of ${totalCount} total matches`);
|
||||
} else if (searchResult.totalCount !== undefined) {
|
||||
// Object might have a different structure
|
||||
totalCount = searchResult.totalCount;
|
||||
logger.debug(`${FILE_NAME}: Received object with totalCount ${totalCount}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Log what we extracted
|
||||
logger.debug(`${FILE_NAME}: Extracted ${allResults.length} results for page ${page} of total ${totalCount}`);
|
||||
|
||||
if (allResults.length === 0 && totalCount === 0) {
|
||||
logger.info(`${FILE_NAME}: No rules found matching "${trimmedKeyword}"`);
|
||||
return {
|
||||
success: true,
|
||||
results: [],
|
||||
message: `No rules found matching "${trimmedKeyword}"`,
|
||||
pagination: {
|
||||
currentPage: 1,
|
||||
pageSize: pageSize,
|
||||
totalPages: 0,
|
||||
totalResults: 0,
|
||||
hasMore: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate total pages and pagination info based on total count from database
|
||||
const totalPages = Math.ceil(totalCount / pageSize);
|
||||
const hasMore = (offset + pageSize) < totalCount;
|
||||
|
||||
// Check if the requested page is valid
|
||||
if (offset >= totalCount && totalCount > 0) {
|
||||
// Return empty results but with pagination info
|
||||
logger.warn(`${FILE_NAME}: Page ${page} exceeds available results (total: ${totalCount})`);
|
||||
return {
|
||||
success: true,
|
||||
results: [],
|
||||
message: `No results on page ${page}. Try a previous page.`,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: totalPages,
|
||||
totalResults: totalCount,
|
||||
hasMore: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// If we have results, include them with pagination info
|
||||
logger.debug(`${FILE_NAME}: Returning ${allResults.length} results with pagination info (page ${page}/${totalPages}, total: ${totalCount})`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
results: allResults,
|
||||
count: allResults.length,
|
||||
pagination: {
|
||||
currentPage: page,
|
||||
pageSize: pageSize,
|
||||
totalPages: totalPages,
|
||||
totalResults: totalCount,
|
||||
hasMore: hasMore
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error searching for rules: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error searching for rules: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced search that returns fully converted rule objects with pagination support
|
||||
* This is a more expensive operation than basic search
|
||||
*
|
||||
* @param {string} keyword - The keyword to search for
|
||||
* @param {number} page - Page number (1-based index, default: 1)
|
||||
* @param {number} pageSize - Number of results per page (default: 10)
|
||||
* @returns {Promise<Object>} Result object with success flag and fully converted rule objects with pagination info
|
||||
*/
|
||||
async function searchAndConvertRules(keyword, page = 1, pageSize = 10) {
|
||||
try {
|
||||
// First perform a basic search with pagination
|
||||
const searchResult = await searchSigmaRules(keyword, page, pageSize);
|
||||
|
||||
if (!searchResult.success || !searchResult.results || searchResult.results.length === 0) {
|
||||
return searchResult;
|
||||
}
|
||||
|
||||
logger.debug(`${FILE_NAME}: Converting ${searchResult.results.length} search results to full rule objects`);
|
||||
|
||||
// Convert each result to a full rule object
|
||||
const convertedResults = [];
|
||||
for (const result of searchResult.results) {
|
||||
try {
|
||||
const conversionResult = await convertSigmaRule(result.id);
|
||||
if (conversionResult.success && conversionResult.rule) {
|
||||
convertedResults.push(conversionResult.rule);
|
||||
} else {
|
||||
logger.warn(`${FILE_NAME}: Failed to convert rule ${result.id}: ${conversionResult.message || 'Unknown error'}`);
|
||||
}
|
||||
} catch (conversionError) {
|
||||
logger.error(`${FILE_NAME}: Error converting rule ${result.id}: ${conversionError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully converted ${convertedResults.length} of ${searchResult.results.length} search results`);
|
||||
|
||||
// Include the pagination information from the search results
|
||||
return {
|
||||
success: true,
|
||||
results: convertedResults,
|
||||
count: convertedResults.length,
|
||||
originalCount: searchResult.results.length,
|
||||
pagination: searchResult.pagination
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error in searchAndConvertRules: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `Error searching and converting rules: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
searchSigmaRules,
|
||||
searchAndConvertRules
|
||||
};
|
53
src/services/sigma/sigma_stats_service.js
Normal file
53
src/services/sigma/sigma_stats_service.js
Normal file
|
@ -0,0 +1,53 @@
|
|||
/**
|
||||
* sigma_stats_service.js
|
||||
*
|
||||
* Service for retrieving and processing Sigma rule database statistics
|
||||
* Provides aggregated statistical information about the rule database
|
||||
*/
|
||||
const logger = require('../../utils/logger');
|
||||
const { getStatsFromDatabase } = require('../../sigma_db/sigma_db_queries');
|
||||
|
||||
const { getFileName } = require('../../utils/file_utils');
|
||||
const FILE_NAME = getFileName(__filename);
|
||||
|
||||
/**
|
||||
* Get database statistics
|
||||
* Collects various statistics about the Sigma rule database
|
||||
*
|
||||
* @returns {Promise<Object>} Object with success flag and statistics or error message
|
||||
*/
|
||||
async function getSigmaStats() {
|
||||
logger.info(`${FILE_NAME}: Getting Sigma rule database statistics`);
|
||||
|
||||
try {
|
||||
// Get statistics from database query function
|
||||
const statsResult = await getStatsFromDatabase();
|
||||
|
||||
if (!statsResult.success) {
|
||||
logger.error(`${FILE_NAME}: Failed to retrieve statistics: ${statsResult.message}`);
|
||||
return {
|
||||
success: false,
|
||||
message: statsResult.message
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`${FILE_NAME}: Successfully collected database statistics`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stats: statsResult.stats
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${FILE_NAME}: Error processing statistics: ${error.message}`);
|
||||
logger.debug(`${FILE_NAME}: Error stack: ${error.stack}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: `Error processing statistics: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSigmaStats
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue