synthex
Version:
Type-safe LLM response simulation with streaming & error injection
50 lines (39 loc) • 1.42 kB
Markdown
```ts
import { s } from '../src/index';
const schema = s.object({
status: s.enum(['active', 'inactive']).required(),
deactivationReason: s.string().when('status', 'inactive').required(),
activationDate: s.date().when('status', 'active').required(),
}).build('ConditionalSchema');
```
```ts
import { s, MockGenerator } from '../src/index';
const schema = s.object({ text: s.string().required() }).build('StreamSchema');
const generator = new MockGenerator({ streamChunkSize: 1, streamDelayMs: 100 });
(async () => {
for await (const chunk of generator.streamGenerate(schema)) {
console.log(chunk);
}
})();
```
```ts
import { s, MockGenerator } from '../src/index';
const schema = s.object({ text: s.string().required() }).build('HallucSchema');
const generator = new MockGenerator({ hallucinate: true, hallucinationProbability: 0.5 });
console.log(generator.generate(schema));
```
```ts
import { MockGenerator } from '../src/index';
const generator = new MockGenerator({ simulateFunctionCall: true });
console.log(generator.simulateFunctionCall('myFunc', { foo: 1 }));
```
```sh
ts-node bin/schema-io.ts export ./examples/user-schema.ts ./user-schema.json
ts-node bin/schema-io.ts import ./user-schema.json
```