UNPKG

@mintlify/validation

Version:

Validates mint.json files

422 lines (421 loc) 21.4 kB
import lcm from 'lcm'; import { inferType } from './SchemaConverter.js'; import { addKeyIfDefined, copyExampleIfDefined, copyKeyIfDefined, dereference, recursivelyFindDescription, stringFileFormats, structuredDataContentTypes, } from './utils.js'; /** * Given an OpenAPI 3.1 SchemaObject or ReferenceObject containing any number of * refs or compositions, this function returns the schema in sum-of-products form. * * When given the following schema: * * ```yaml * title: 'A' * oneOf: * - { title: 'B' } * - { title: 'C' } * allOf: * - { title: 'D' } * - { title: 'E' } * ``` * * this function returns the following sum of products: * * ```js * [ * [{ title: 'B' }, { title: 'D' }, { title: 'E' }, { title: 'A' }], * [{ title: 'C' }, { title: 'D' }, { title: 'E' }, { title: 'A' }], * ] * ``` * * @param schema The schema or ref to reduce * @param componentSchemas The value of `document.components.schemas`, to be used when dereferencing * @returns The schema in sum-of-products form */ export function reduceToSumOfProducts(schemaOrRef, componentSchemas, opts) { var _a, _b, _c, _d, _e, _f; let schema; if ('$ref' in schemaOrRef) { const dereferencedSchema = dereference('schemas', schemaOrRef.$ref, componentSchemas); if (!dereferencedSchema) return [[{}]]; schema = Object.assign(Object.assign({}, dereferencedSchema), { // @ts-expect-error some customers use title even though it's not part of the spec title: (_a = schemaOrRef.title) !== null && _a !== void 0 ? _a : dereferencedSchema.title, description: (_b = schemaOrRef.description) !== null && _b !== void 0 ? _b : dereferencedSchema.description }); } else { schema = schemaOrRef; } if ((opts === null || opts === void 0 ? void 0 : opts.isRoot) && ((_c = schema.discriminator) === null || _c === void 0 ? void 0 : _c.mapping) && !schema.oneOf && !schema.allOf) { const properties = Object.values(schema.discriminator.mapping); if (properties.length) { delete schema.discriminator; schema = Object.assign(Object.assign({}, schema), { // @ts-expect-error allOf is indeed a valid type for schema allOf: [ { oneOf: [ ...properties.map((prop) => ({ $ref: prop, })), ], }, ] }); } } // handle v3 `nullable` property const nullable = schema.nullable; if (!schema.oneOf && !schema.allOf && !schema.anyOf && !Array.isArray(schema.type) && !nullable) { return [[schema]]; } const baseSchema = Object.assign({}, schema); delete baseSchema.oneOf; delete baseSchema.anyOf; delete baseSchema.allOf; delete baseSchema.not; const baseSchemaTypes = Array.isArray(baseSchema.type) ? [...baseSchema.type] : [baseSchema.type]; if (nullable && !baseSchemaTypes.includes('null')) { baseSchemaTypes.push('null'); } const baseSchemaArr = Object.keys(baseSchema).length > 0 ? [baseSchemaTypes.map((type) => [Object.assign(Object.assign({}, baseSchema), { type })])] : []; const reducedOneOfs = (_d = schema.oneOf) === null || _d === void 0 ? void 0 : _d.map((subschema) => reduceToSumOfProducts(subschema, componentSchemas)); const reducedAnyOfs = (_e = schema.anyOf) === null || _e === void 0 ? void 0 : _e.map((subschema) => reduceToSumOfProducts(subschema, componentSchemas)); const reducedAllOfs = (_f = schema.allOf) === null || _f === void 0 ? void 0 : _f.map((subschema) => reduceToSumOfProducts(subschema, componentSchemas)); const combinedOneOfs = reducedOneOfs ? [addIncrementalSchemas(reducedOneOfs)] : []; const combinedAnyOfs = reducedAnyOfs ? [addIncrementalSchemas(reducedAnyOfs)] : []; const multipliedAllOfs = reducedAllOfs ? [multiplyIncrementalSchemas(reducedAllOfs)] : []; return multiplyIncrementalSchemas([ ...combinedOneOfs, ...combinedAnyOfs, ...multipliedAllOfs, ...baseSchemaArr, ]); } /** * Adds an array of schemas in sum-of-products form, returning the result * as a single schema in sum-of-products form. * * (AB + CD) + (E + F) + (G + H) => AB + CD + E + F + G + H */ function addIncrementalSchemas(schemas) { // this one's easy! just concatenate all the sums return schemas.flat(); } /** * Multiplies an array of schemas in sum-of-products form, returning the result * as a single schema in sum-of-products form. * * (AB + CD)(E + F)(G + H) => ABEG + ABEH + ABFG + ABFH + CDEG + CDEH + CDFG + CDFH */ function multiplyIncrementalSchemas(schemas) { // walking through this function, we'll use the example (AB + CD)(E + F)(G + H) // base case, which allows us to essentially do (G + H)(1) = (G + H) if (!schemas[0]) { return [[]]; } // now we evaluate using the distributive property: // (AB + CD)(EG + EH + FG + FH) = (ABEG + ABEH + ABFG + ABFH) + (CDEG + CDEH + CDFG + CDFH) // first, we recursively evaluate all remaining terms so we only have to deal with the situation above. // in our scenario, the remaining terms are (E + F)(G + H), which gives us (EG + EH + FG + FH) const remainingSumOfProducts = multiplyIncrementalSchemas(schemas.slice(1)); return schemas[0].flatMap((product /* AB */) => { return remainingSumOfProducts.map((remainingProduct /* EF */) => [...product, ...remainingProduct] /* ABEF */); }); } /** * This function logically combines an array of simple schemas (schemas that contain no compositions) * in preparation for conversion to `IncrementalDataSchema`. This is akin to "multiplying" all of the schemas, * to continue our math analogy. The result is a single simple schema, which is easier to work with. * * How fields are combined depends on the field. For fields like `title` and `description` where * it doesn't make sense to combine, we just take the last. For `required` we combine arrays, * for `maximum` we take the minimum value, etc. * * @param schemas An array of simple schemas to combine * @param componentSchemas The value of `document.components.schemas`. In this function, it is only used to check if properties are readOnly/writeOnly * @param location Whether the schema is part of the request, response, or neither. Used for filtering readOnly/writeOnly properties * @returns A single simple schema that satisfies all the input schemas */ export function combineSimpleSchemas(schemas, componentSchemas, location) { // all schemas with no `type` field go at the end of the array to avoid // caling `copyKeyIfDefined` for discriminators with empty types unless // we're certain the schema's title is the last possible option schemas.sort((a, b) => { if (a.type && !b.type) return -1; if (!a.type && b.type) return 1; return 0; }); return schemas.reduce((acc, curr) => { var _a, _b, _c, _d, _e, _f; // schemas are meant to be immutable, so copy the type let currType = curr.type; // don't throw an error if type is being constricted if (acc.type === 'integer' && currType === 'number') { currType = 'integer'; } else if (acc.type === 'number' && currType === 'integer') { acc.type = 'integer'; } else if (acc.type === undefined && currType !== undefined) { acc.type = currType; } else if (acc.type !== undefined && currType === undefined) { currType = acc.type; } if (acc.type !== currType) { throw new Error(`${acc.type} vs ${currType}`); } // we're technically breaking immutability rules here, but it's probably okay because // it will be the same every time - we're just normalizing the maximum/minimum // and exclusiveMaximum/exclusiveMinimum properties for (const schema of [acc, curr]) { if (typeof schema.exclusiveMaximum === 'number') { if (schema.maximum === undefined || schema.maximum >= schema.exclusiveMaximum) { schema.maximum = schema.exclusiveMaximum; schema.exclusiveMaximum = true; } else { schema.exclusiveMaximum = undefined; } } if (typeof schema.exclusiveMinimum === 'number') { if (schema.minimum === undefined || schema.minimum <= schema.exclusiveMinimum) { schema.minimum = schema.exclusiveMinimum; schema.exclusiveMinimum = true; } else { schema.exclusiveMinimum = undefined; } } } if (curr.discriminator == undefined && (curr.type != undefined || acc.title == undefined)) { copyKeyIfDefined('title', curr, acc); } copyKeyIfDefined('description', curr, acc); copyKeyIfDefined('format', curr, acc); copyKeyIfDefined('default', curr, acc); copyKeyIfDefined('x-default', curr, acc); copyKeyIfDefined('const', curr, acc); combineKeyIfDefined('multipleOf', curr, acc, lcm); combineKeyIfDefined('maxLength', curr, acc, Math.min); combineKeyIfDefined('minLength', curr, acc, Math.max); combineKeyIfDefined('maxItems', curr, acc, Math.min); combineKeyIfDefined('minItems', curr, acc, Math.max); combineKeyIfDefined('maxProperties', curr, acc, Math.min); combineKeyIfDefined('minProperties', curr, acc, Math.max); combineKeyIfDefined('required', curr, acc, (a, b) => b.concat(a.filter((value) => !b.includes(value)))); combineKeyIfDefined('enum', curr, acc, (a, b) => b.filter((value) => a.includes(value))); combineKeyIfDefined('readOnly', curr, acc, (a, b) => a && b); combineKeyIfDefined('writeOnly', curr, acc, (a, b) => a && b); combineKeyIfDefined('deprecated', curr, acc, (a, b) => a || b); const combinedMaximum = combine(curr, acc, 'maximum', Math.min); const combinedMinimum = combine(curr, acc, 'minimum', Math.max); const exclusiveMaximum = (acc.maximum === combinedMaximum ? acc.exclusiveMaximum : undefined) || (curr.maximum === combinedMaximum ? curr.exclusiveMaximum : undefined); addKeyIfDefined('exclusiveMaximum', exclusiveMaximum, acc); const exclusiveMinimum = (acc.minimum === combinedMinimum ? acc.exclusiveMinimum : undefined) || (curr.minimum === combinedMinimum ? curr.exclusiveMinimum : undefined); addKeyIfDefined('exclusiveMinimum', exclusiveMinimum, acc); addKeyIfDefined('maximum', combinedMaximum, acc); addKeyIfDefined('minimum', combinedMinimum, acc); // don't use coalesce operator, since null is a valid example const example1 = ((_a = acc.examples) === null || _a === void 0 ? void 0 : _a[0]) !== undefined ? acc.examples[0] : acc.example; const example2 = ((_b = curr.examples) === null || _b === void 0 ? void 0 : _b[0]) !== undefined ? curr.examples[0] : curr.example; if (example1 && example2 && typeof example1 === 'object' && typeof example2 === 'object') { acc.example = Object.assign(Object.assign({}, example1), example2); } else { // don't use coalesce operator, since null is a valid example addKeyIfDefined('example', example2 !== undefined ? example2 : example1, acc); } if (curr.items) { const items = (_c = acc.items) !== null && _c !== void 0 ? _c : { allOf: [] }; items.allOf.push(curr.items); acc.items = items; } if (curr.properties) { Object.entries(curr.properties) .filter(([_, subschema]) => { // dereference just for the readOnly/writeOnly check if ('$ref' in subschema) { const dereferencedSchema = dereference('schemas', subschema.$ref, componentSchemas); if (!dereferencedSchema) return true; subschema = dereferencedSchema; } if (subschema.readOnly && location === 'request') return false; if (subschema.writeOnly && location === 'response') return false; return true; }) .forEach(([property, subschema]) => { var _a; const properties = (_a = acc.properties) !== null && _a !== void 0 ? _a : {}; const currSchemaArr = properties[property]; if (currSchemaArr) { currSchemaArr.allOf.push(subschema); } else { properties[property] = { allOf: [subschema] }; } acc.properties = properties; }); } if (((_d = acc.properties) === null || _d === void 0 ? void 0 : _d.type) && curr.discriminator && !acc.description) { let name = undefined; const allOf = (_e = acc.properties.type.allOf[0]) !== null && _e !== void 0 ? _e : {}; if ('const' in allOf && typeof allOf.const === 'string') { name = allOf.const; } const description = recursivelyFindDescription(curr, name) || schemas.flatMap((schema) => recursivelyFindDescription(schema, name)).filter(Boolean)[0]; if (description) { acc.description = description; } } if (curr.additionalProperties === false) { acc.additionalProperties = false; } else if (acc.additionalProperties !== false && curr.additionalProperties && typeof curr.additionalProperties === 'object') { const additionalProperties = (_f = acc.additionalProperties) !== null && _f !== void 0 ? _f : { allOf: [] }; additionalProperties.allOf.push(curr.additionalProperties); acc.additionalProperties = additionalProperties; } return acc; }, {}); } const combineKeyIfDefined = (key, source, destination, transform) => { addKeyIfDefined(key, combine(source, destination, key, transform), destination); }; const combine = (schema1, schema2, key, transform) => { var _a; return schema1[key] !== undefined && schema2[key] !== undefined ? transform(schema1[key], schema2[key]) : (_a = schema1[key]) !== null && _a !== void 0 ? _a : schema2[key]; }; function convertCombinedSchema(schema, required) { var _a, _b; const sharedProps = {}; addKeyIfDefined('required', required, sharedProps); copyKeyIfDefined('title', schema, sharedProps); copyKeyIfDefined('description', schema, sharedProps); copyKeyIfDefined('readOnly', schema, sharedProps); copyKeyIfDefined('writeOnly', schema, sharedProps); copyKeyIfDefined('deprecated', schema, sharedProps); if (schema.type === undefined) { const inferredType = inferType(schema); if (inferredType === undefined) { return Object.assign({ type: 'any' }, sharedProps); } schema.type = inferredType; } switch (schema.type) { case 'boolean': const booleanProps = sharedProps; copyKeyIfDefined('default', schema, booleanProps); copyKeyIfDefined('x-default', schema, booleanProps); copyExampleIfDefined(schema, booleanProps); return Object.assign({ type: schema.type }, booleanProps); case 'number': case 'integer': if (schema.enum) { const numberEnumProps = sharedProps; copyKeyIfDefined('default', schema, numberEnumProps); copyKeyIfDefined('x-default', schema, numberEnumProps); copyExampleIfDefined(schema, numberEnumProps); return Object.assign({ type: schema.type === 'number' ? 'enum<number>' : 'enum<integer>', enum: schema.enum.filter((option) => typeof option === 'number') }, numberEnumProps); } const numberProps = sharedProps; copyKeyIfDefined('multipleOf', schema, numberProps); copyKeyIfDefined('maximum', schema, numberProps); copyKeyIfDefined('exclusiveMaximum', schema, numberProps); copyKeyIfDefined('minimum', schema, numberProps); copyKeyIfDefined('exclusiveMinimum', schema, numberProps); copyKeyIfDefined('default', schema, numberProps); copyKeyIfDefined('x-default', schema, numberProps); copyExampleIfDefined(schema, numberProps); return Object.assign({ type: schema.type }, numberProps); case 'string': if (schema.enum) { const stringEnumProps = sharedProps; copyKeyIfDefined('default', schema, stringEnumProps); copyKeyIfDefined('x-default', schema, stringEnumProps); copyExampleIfDefined(schema, stringEnumProps); return Object.assign({ type: 'enum<string>', enum: schema.enum.filter((option) => typeof option === 'string') }, stringEnumProps); } if (schema.format && stringFileFormats.includes(schema.format)) { const fileProps = sharedProps; return Object.assign({ type: 'file', contentEncoding: schema.format }, fileProps); } const stringProps = sharedProps; copyKeyIfDefined('format', schema, stringProps); copyKeyIfDefined('pattern', schema, stringProps); copyKeyIfDefined('maxLength', schema, stringProps); copyKeyIfDefined('minLength', schema, stringProps); copyKeyIfDefined('default', schema, stringProps); copyKeyIfDefined('x-default', schema, stringProps); copyKeyIfDefined('const', schema, stringProps); copyExampleIfDefined(schema, stringProps); return Object.assign({ type: schema.type }, stringProps); case 'array': const arrayProps = sharedProps; copyKeyIfDefined('maxItems', schema, arrayProps); copyKeyIfDefined('minItems', schema, arrayProps); copyKeyIfDefined('uniqueItems', schema, arrayProps); copyKeyIfDefined('default', schema, arrayProps); copyKeyIfDefined('x-default', schema, arrayProps); copyExampleIfDefined(schema, arrayProps); return Object.assign({ type: schema.type, items: (_a = schema.items) !== null && _a !== void 0 ? _a : {} }, arrayProps); case 'object': const objectProperties = sharedProps; addKeyIfDefined('requiredProperties', schema.required, objectProperties); copyKeyIfDefined('additionalProperties', schema, objectProperties); copyKeyIfDefined('maxProperties', schema, objectProperties); copyKeyIfDefined('minProperties', schema, objectProperties); copyKeyIfDefined('default', schema, objectProperties); copyKeyIfDefined('x-default', schema, objectProperties); copyExampleIfDefined(schema, objectProperties); return Object.assign({ type: schema.type, properties: (_b = schema.properties) !== null && _b !== void 0 ? _b : {} }, objectProperties); case 'null': const nullProps = sharedProps; copyKeyIfDefined('default', schema, nullProps); copyKeyIfDefined('x-default', schema, nullProps); copyExampleIfDefined(schema, nullProps); return Object.assign({ type: schema.type }, nullProps); default: throw new Error(); } } export function generateFirstIncrementalSchema(schema, componentSchemas, required, location, contentType) { if (schema === undefined) { // If the content type can feasibly be interpreted as a file (i.e. if it does NOT start // with "application/json" or another structured data format), return a file type. if (contentType && !structuredDataContentTypes.some((type) => contentType.startsWith(type))) { return [{ type: 'file', contentMediaType: contentType }]; } return [{ type: 'any' }]; } return generateNextIncrementalSchema(schema, componentSchemas, required, location); } export function generateNextIncrementalSchema(schema, componentSchemas, required, location) { const sumOfProducts = reduceToSumOfProducts(schema, componentSchemas, { isRoot: true }); const incrementalDataSchemaArray = sumOfProducts.flatMap((product) => { try { const combinedSchema = combineSimpleSchemas(product, componentSchemas, location); return [convertCombinedSchema(combinedSchema, required)]; } catch (_a) { return []; } }); if (!incrementalDataSchemaArray[0]) { // TODO: throw when `safeParse: false` return [{ type: 'any' }]; } return [incrementalDataSchemaArray[0], ...incrementalDataSchemaArray.slice(1)]; }