Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 162 additions & 0 deletions src/refinements/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {
attackIdPatterns,
type Aliases,
type AttackObject,
type Collection,
type ExternalReferences,
type KillChainPhase,
type StixBundle,
Expand Down Expand Up @@ -190,6 +191,167 @@ export function createFirstBundleObjectRefinement() {
};
}

/**
* Creates a refinement function for validating that objects in an array have no duplicates
* based on specified keys
*
* @param arrayPath - The path to the array property in the context value (e.g., ['objects']). Use [] for direct array validation.
* @param keys - The keys to use for duplicate detection (e.g., ['id'] or ['source_name', 'external_id']). Use [] for primitive arrays.
* @param errorMessage - Optional custom error message template. Use {keys} for key values, {value} for primitives, and {index} for position
* @returns A refinement function for duplicate validation
*
* @remarks
* This function validates that objects in an array are unique based on one or more key fields.
* It creates a composite key from the specified fields and checks for duplicates.
*
* **Supports three validation modes:**
* 1. Object arrays with single key: `keys = ['id']`
* 2. Object arrays with composite keys: `keys = ['source_name', 'external_id']`
* 3. Primitive arrays: `keys = []` (validates the values themselves)
*
* @example
* ```typescript
* // Single key validation
* const validateUniqueIds = validateNoDuplicates(['objects'], ['id']);
* const schema = baseSchema.check(validateUniqueIds);
*
* // Composite key validation
* const validateUniqueRefs = validateNoDuplicates(
* ['external_references'],
* ['source_name', 'external_id'],
* 'Duplicate reference found with source_name="{source_name}" and external_id="{external_id}"'
* );
*
* // Primitive array validation (e.g., array of strings)
* const validateUniqueStrings = validateNoDuplicates(
* [],
* [],
* 'Duplicate value "{value}" found'
* );
* ```
*/
export function validateNoDuplicates(arrayPath: string[], keys: string[], errorMessage?: string) {
return (ctx: z.core.ParsePayload<unknown>): void => {
// Navigate to the array using the path
let arr: unknown = ctx.value;
for (const pathSegment of arrayPath) {
if (arr && typeof arr === 'object') {
arr = (arr as Record<string, unknown>)[pathSegment];
} else {
return;
}
}

// If array doesn't exist or is not an array, skip validation
if (!Array.isArray(arr)) {
return;
}

const seen = new Map<string, number>();

arr.forEach((item, index) => {
// Create composite key from specified keys
// If keys array is empty, treat each item as a primitive value
const keyValues =
keys.length === 0
? [String(item)]
: keys.map((key) => {
const value = item?.[key];
return value !== undefined ? String(value) : '';
});
const compositeKey = keyValues.join('||');

if (seen.has(compositeKey)) {
// Build key-value pairs for error message
const keyValuePairs = keys.reduce(
(acc, key, i) => {
acc[key] = keyValues[i];
return acc;
},
{} as Record<string, string>,
);

// Generate error message
let message = errorMessage;
if (!message) {
if (keys.length === 0) {
// Primitive array (no keys)
message = `Duplicate value "${keyValues[0]}" found at index ${index}. Previously seen at index ${seen.get(compositeKey)}.`;
} else if (keys.length === 1) {
message = `Duplicate object with ${keys[0]}="${keyValues[0]}" found at index ${index}. Previously seen at index ${seen.get(compositeKey)}.`;
} else {
const keyPairs = keys.map((key, i) => `${key}="${keyValues[i]}"`).join(', ');
message = `Duplicate object with ${keyPairs} found at index ${index}. Previously seen at index ${seen.get(compositeKey)}.`;
}
} else {
// Replace placeholders in custom message
message = message.replace(/\{(\w+)\}/g, (match, key) => {
if (key === 'index') return String(index);
if (key === 'value' && keys.length === 0) return keyValues[0];
return keyValuePairs[key] ?? match;
});
}

ctx.issues.push({
code: 'custom',
message,
path: keys.length === 0 ? [...arrayPath, index] : [...arrayPath, index, ...keys],
input: keys.length === 0 ? item : keys.length === 1 ? item?.[keys[0]] : keyValuePairs,
});
} else {
seen.set(compositeKey, index);
}
});
};
}

/**
* Creates a refinement function for validating that all STIX IDs referenced in x_mitre_contents
* exist in the bundle's objects array
*
* @returns A refinement function for x_mitre_contents reference validation
*
* @remarks
* This function validates that every STIX ID referenced in the collection's x_mitre_contents
* property (which acts as a table of contents for the bundle) has a corresponding object
* in the bundle's objects array. This ensures referential integrity within the bundle.
*
* The function expects:
* - The first object in the bundle to be a Collection (x-mitre-collection type)
* - Each object_ref in x_mitre_contents to match an id in the objects array
*
* @example
* ```typescript
* const schema = stixBundleSchema.check(validateXMitreContentsReferences());
* ```
*/
export function validateXMitreContentsReferences() {
return (ctx: z.core.ParsePayload<StixBundle>): void => {
// Get the collection object (first object in bundle)
const collectionObject = ctx.value.objects[0];
const collectionContents = (collectionObject as Collection).x_mitre_contents;

if (!collectionContents) {
return;
}

// Create a set of all object IDs in the bundle for efficient lookup
const objectIds = new Set(ctx.value.objects.map((obj) => (obj as AttackObject).id));

// Validate each reference in x_mitre_contents
collectionContents.forEach((contentRef: { object_ref: string }, index: number) => {
if (!objectIds.has(contentRef.object_ref)) {
ctx.issues.push({
code: 'custom',
message: `STIX ID "${contentRef.object_ref}" referenced in x_mitre_contents is not present in the bundle's objects array`,
path: ['objects', 0, 'x_mitre_contents', index, 'object_ref'],
input: contentRef.object_ref,
});
}
});
};
}

/**
* Creates a refinement function for validating ATT&CK ID in external references
*
Expand Down
32 changes: 10 additions & 22 deletions src/schemas/sdo/analytic.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
xMitreModifiedByRefSchema,
xMitrePlatformsSchema,
} from '../common/property-schemas/index.js';
import { validateNoDuplicates } from '../../refinements/index.js';

//==============================================================================
//
Expand Down Expand Up @@ -46,28 +47,15 @@ export type LogSourceReference = z.infer<typeof xMitreLogSourceReferenceSchema>;
export const xMitreLogSourceReferencesSchema = z
.array(xMitreLogSourceReferenceSchema)
.min(1)
.refine(
// Reject duplicate log source references, delineated by (x_mitre_data_component_ref, name, channel)
// An analytic cannot reference the same log source twice
(logSourceReferences) => {
const seenRefs = new Set<string>();

for (const logSourceRef of logSourceReferences) {
const key = `${logSourceRef.x_mitre_data_component_ref}|${logSourceRef.name}|${logSourceRef.channel}`;
if (seenRefs.has(key)) {
return false;
}
seenRefs.add(key);
}

return true;
},
{
message:
'Duplicate log source reference found: each (x_mitre_data_component_ref, name, channel) tuple must be unique',
path: ['x_mitre_log_source_references'],
},
)
.check((ctx) => {
// Validate no duplicate log source references using composite key validation
// Each (x_mitre_data_component_ref, name, channel) tuple must be unique
validateNoDuplicates(
[],
['x_mitre_data_component_ref', 'name', 'channel'],
'Duplicate log source reference found: each (x_mitre_data_component_ref, name, channel) tuple must be unique',
)(ctx);
})
.meta({
description:
'A list of log source references, which are delineated by a Data Component STIX ID and the (`name`, `channel`) that is being targeted.',
Expand Down
32 changes: 10 additions & 22 deletions src/schemas/sdo/data-component.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
xMitreDomainsSchema,
xMitreModifiedByRefSchema,
} from '../common/property-schemas/index.js';
import { validateNoDuplicates } from '../../refinements/index.js';

//==============================================================================
//
Expand Down Expand Up @@ -46,28 +47,15 @@ export const xMitreLogSourcesSchema = z
.strict(),
)
.min(1)
.refine(
// Reject duplicate (name, channel) pairs
// Allow same name with different channels
// Allow same channel with different names
(permutations) => {
const seen = new Set<string>();

for (const perm of permutations) {
const key = `${perm.name}|${perm.channel}`;
if (seen.has(key)) {
return false;
}
seen.add(key);
}

return true;
},
{
message: 'Duplicate log source found: each (name, channel) pair must be unique',
path: ['x_mitre_log_sources'],
},
)
.check((ctx) => {
// Validate no duplicate (name, channel) pairs using composite key validation
// Allow same name with different channels, and same channel with different names
validateNoDuplicates(
[],
['name', 'channel'],
'Duplicate log source found: each (name, channel) pair must be unique',
)(ctx);
})
.meta({
description: `
The \`log_source\` object defines platform-specific collection configurations embedded within data components:
Expand Down
9 changes: 9 additions & 0 deletions src/schemas/sdo/detection-strategy.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
xMitreDomainsSchema,
xMitreModifiedByRefSchema,
} from '../common/property-schemas/index.js';
import { validateNoDuplicates } from '../../refinements/index.js';

//==============================================================================
//
Expand All @@ -30,6 +31,14 @@ export const detectionStrategySchema = attackBaseDomainObjectSchema
x_mitre_analytic_refs: z
.array(createStixIdValidator('x-mitre-analytic'))
.nonempty({ error: 'At least one analytic ref is required' })
.check((ctx) => {
// Validate no duplicate analytic references using primitive array validation
validateNoDuplicates(
[],
[],
'Duplicate reference "{value}" found. Each embedded relationship referenced in x_mitre_analytic_refs must be unique.',
)(ctx);
})
.meta({
description:
'Array of STIX IDs referencing `x-mitre-analytic` objects that implement this detection strategy.',
Expand Down
17 changes: 16 additions & 1 deletion src/schemas/sdo/stix-bundle.schema.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { z } from 'zod/v4';
import { createFirstBundleObjectRefinement } from '../../refinements/index.js';
import {
createFirstBundleObjectRefinement,
validateNoDuplicates,
validateXMitreContentsReferences,
} from '../../refinements/index.js';
import {
createStixIdValidator,
createStixTypeValidator,
Expand Down Expand Up @@ -188,7 +192,18 @@ export const stixBundleSchema = z
})
.strict()
.check((ctx) => {
// Validate that the first object in the 'objects' array is of type 'x-mitre-collection'
createFirstBundleObjectRefinement()(ctx);

// Validate that all IDs referenced in 'x_mitre_contents' are present in 'objects' array
validateXMitreContentsReferences()(ctx);

// Validate that no duplicate objects are present in 'objects' array
validateNoDuplicates(
['objects'],
['id'],
'Duplicate object with id "{id}" found. Each object in the bundle must have a unique id.',
)(ctx);
});

export type StixBundle = z.infer<typeof stixBundleSchema>;
6 changes: 3 additions & 3 deletions test/objects/detection-strategy.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -226,14 +226,14 @@ describe('detectionStrategySchema', () => {
});

describe('Edge Cases and Special Scenarios', () => {
it('should handle duplicate analytic IDs', () => {
it('should reject duplicate analytic IDs', () => {
const analyticId = `x-mitre-analytic--${uuidv4()}`;
const detectionStrategyWithDuplicates: DetectionStrategy = {
...minimalDetectionStrategy,
x_mitre_analytic_refs: [analyticId, analyticId, analyticId],
};
// Schema doesn't prevent duplicates, so this should pass
expect(() => detectionStrategySchema.parse(detectionStrategyWithDuplicates)).not.toThrow();
// Schema prevents duplicates, so this should fail
expect(() => detectionStrategySchema.parse(detectionStrategyWithDuplicates)).toThrow();
});

it('should handle large number of analytics', () => {
Expand Down
Loading