undeexcepturi
Version:
TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.
1,121 lines (965 loc) • 121 kB
text/typescript
import { v4 } from 'uuid';
import {
AnyEntity,
ChangeSet,
DefaultLogger,
EventSubscriber,
FilterQuery,
FlushEventArgs,
RawQueryFragment,
} from '@mikro-orm/core';
import {
ChangeSetType,
Collection,
Configuration,
EntityManager,
ForeignKeyConstraintViolationException,
InvalidFieldNameException,
IsolationLevel,
LoadStrategy,
LockMode,
MikroORM,
NonUniqueFieldNameException,
NotNullConstraintViolationException,
PopulateHint,
QueryFlag,
QueryOrder,
raw,
ref,
sql,
Reference,
SyntaxErrorException,
TableExistsException,
TableNotFoundException,
UniqueConstraintViolationException,
ValidationError,
wrap,
} from '@mikro-orm/core';
import { PostgreSqlConnection, PostgreSqlDriver } from '@mikro-orm/postgresql';
import {
Address2,
Author2,
Book2,
BookTag2,
FooBar2,
FooBaz2,
Label2,
Publisher2,
PublisherType,
PublisherType2,
Test2,
} from './entities-sql';
import { initORMPostgreSql, mockLogger } from './bootstrap';
import { performance } from 'perf_hooks';
import { Test2Subscriber } from './subscribers/Test2Subscriber';
describe('EntityManagerPostgre', () => {
let orm: MikroORM<PostgreSqlDriver>;
async function createBooksWithTags() {
const author = await orm.em.upsert(Author2, { name: 'Jon Snow', email: 'snow@wall.st' });
const book1 = new Book2('My Life on The Wall, part 1', author.id);
const book2 = new Book2('My Life on The Wall, part 2', author.id);
const book3 = new Book2('My Life on The Wall, part 3', author.id);
const publisher = new Publisher2();
book1.publisher = ref(publisher);
book2.publisher = ref(publisher);
book3.publisher = ref(publisher);
const tag1 = new BookTag2('silly');
const tag2 = new BookTag2('funny');
const tag3 = new BookTag2('sick');
const tag4 = new BookTag2('strange');
const tag5 = new BookTag2('sexy');
book1.tags.add(tag1, tag3);
book2.tags.add(tag1, tag2, tag5);
book3.tags.add(tag2, tag4, tag5);
await orm.em.persistAndFlush([book1, book2, book3]);
orm.em.clear();
}
beforeAll(async () => orm = await initORMPostgreSql());
beforeEach(async () => orm.schema.clearDatabase());
afterEach(() => expect(RawQueryFragment.checkCacheSize()).toBe(0));
afterAll(async () => {
await orm.schema.dropDatabase();
await orm.close(true);
});
test('isConnected()', async () => {
expect(await orm.isConnected()).toBe(true);
expect(await orm.checkConnection()).toEqual({
ok: true,
});
await orm.close(true);
expect(await orm.isConnected()).toBe(false);
const check = await orm.checkConnection();
expect(check).toMatchObject({
ok: false,
error: expect.any(Error),
reason: 'Unable to acquire a connection',
});
await orm.connect();
expect(await orm.isConnected()).toBe(true);
expect(await orm.checkConnection()).toEqual({
ok: true,
});
});
test('getConnectionOptions()', async () => {
const config = new Configuration({
driver: PostgreSqlDriver,
clientUrl: 'postgre://root@127.0.0.1:1234/db_name',
host: '127.0.0.10',
password: 'secret',
user: 'user',
logger: jest.fn(),
forceUtcTimezone: true,
} as any, false);
const driver = new PostgreSqlDriver(config);
expect(driver.getConnection().getConnectionOptions()).toMatchObject({
database: 'db_name',
host: '127.0.0.10',
password: 'secret',
port: 1234,
user: 'user',
});
});
test('raw query with array param', async () => {
const q1 = await orm.em.getPlatform().formatQuery(`select * from author2 where id in (?) limit ?`, [[1, 2, 3], 3]);
expect(q1).toBe('select * from author2 where id in (1, 2, 3) limit 3');
const q2 = await orm.em.getPlatform().formatQuery(`select * from author2 where id in (?) limit ?`, [['1', '2', '3'], 3]);
expect(q2).toBe(`select * from author2 where id in ('1', '2', '3') limit 3`);
});
test('should return postgre driver', async () => {
const driver = orm.em.getDriver();
expect(driver).toBeInstanceOf(PostgreSqlDriver);
await expect(driver.findOne(Book2.name, { double: 123 })).resolves.toBeNull();
const author = await driver.nativeInsert(Author2.name, { name: 'author', email: 'email' });
const tag = await driver.nativeInsert(BookTag2.name, { name: 'tag name' });
const uuid1 = v4();
await expect(driver.nativeInsert(Book2.name, { uuid: uuid1, author: author.insertId, tags: [tag.insertId] })).resolves.not.toBeNull();
await expect(driver.nativeUpdate(Book2.name, { uuid: uuid1 }, { title: 'booook' })).resolves.not.toBeNull();
await expect(driver.getConnection().execute('select 1 as count')).resolves.toEqual([{ count: 1 }]);
await expect(driver.getConnection().execute('select 1 as count', [], 'get')).resolves.toEqual({ count: 1 });
await expect(driver.getConnection().execute('select 1 as count', [], 'run')).resolves.toEqual({
affectedRows: 1,
row: { count: 1 },
rows: [{ count: 1 }],
});
await expect(driver.getConnection().execute('insert into test2 (name) values (?) returning id', ['test'], 'run')).resolves.toEqual({
affectedRows: 1,
insertId: 1,
row: { id: 1 },
rows: [{ id: 1 }],
});
await expect(driver.getConnection().execute('update test2 set name = ? where name = ?', ['test 2', 'test'], 'run')).resolves.toEqual({
affectedRows: 1,
insertId: 0,
row: undefined,
rows: [],
});
await expect(driver.getConnection().execute('delete from test2 where name = ?', ['test 2'], 'run')).resolves.toEqual({
affectedRows: 1,
insertId: 0,
row: undefined,
rows: [],
});
expect(driver.getPlatform().denormalizePrimaryKey(1)).toBe(1);
expect(driver.getPlatform().denormalizePrimaryKey('1')).toBe('1');
await expect(driver.find(BookTag2.name, { books: { $in: [uuid1] } })).resolves.not.toBeNull();
expect(driver.getPlatform().formatQuery('CREATE USER ?? WITH PASSWORD ?', ['foo', 'bar'])).toBe(`CREATE USER "foo" WITH PASSWORD 'bar'`);
expect(driver.getPlatform().formatQuery('select \\?, ?, ?', ['foo', 'bar'])).toBe(`select ?, 'foo', 'bar'`);
expect(driver.getPlatform().formatQuery('? = ??', ['foo', 'bar'])).toBe(`'foo' = "bar"`);
// multi inserts
await driver.nativeInsert(Test2.name, { id: 1, name: 't1' });
await driver.nativeInsert(Test2.name, { id: 2, name: 't2' });
await driver.nativeInsert(Test2.name, { id: 3, name: 't3' });
await driver.nativeInsert(Test2.name, { id: 4, name: 't4' });
await driver.nativeInsert(Test2.name, { id: 5, name: 't5' });
const mock = mockLogger(orm, ['query']);
const res = await driver.nativeInsertMany(Publisher2.name, [
{ name: 'test 1', tests: [1, 3, 4], type: PublisherType.GLOBAL, type2: PublisherType2.LOCAL },
{ name: 'test 2', tests: [4, 2], type: PublisherType.LOCAL, type2: PublisherType2.LOCAL },
{ name: 'test 3', tests: [1, 5, 2], type: PublisherType.GLOBAL, type2: PublisherType2.LOCAL },
]);
expect(mock.mock.calls[0][0]).toMatch('insert into "publisher2" ("name", "type", "type2") values ($1, $2, $3), ($4, $5, $6), ($7, $8, $9) returning "id"');
expect(mock.mock.calls[1][0]).toMatch('insert into "publisher2_tests" ("test2_id", "publisher2_id") values ($1, $2), ($3, $4), ($5, $6)');
expect(mock.mock.calls[2][0]).toMatch('insert into "publisher2_tests" ("test2_id", "publisher2_id") values ($1, $2), ($3, $4)');
expect(mock.mock.calls[3][0]).toMatch('insert into "publisher2_tests" ("test2_id", "publisher2_id") values ($1, $2), ($3, $4), ($5, $6)');
// postgres returns all the ids based on returning clause
expect(res).toMatchObject({ insertId: 1, affectedRows: 3, row: { id: 1 }, rows: [ { id: 1 }, { id: 2 }, { id: 3 } ] });
const res2 = await driver.find(Publisher2.name, {});
expect(res2).toMatchObject([
{ id: 1, name: 'test 1', type: PublisherType.GLOBAL, type2: PublisherType2.LOCAL },
{ id: 2, name: 'test 2', type: PublisherType.LOCAL, type2: PublisherType2.LOCAL },
{ id: 3, name: 'test 3', type: PublisherType.GLOBAL, type2: PublisherType2.LOCAL },
]);
});
test('multi insert maps PKs', async () => {
const tests = [1, 2, 3, 4, 5].map(n => orm.em.create(Test2, { name: `n${n}` }, { persist: false }));
await orm.em.insertMany(tests);
expect(tests.map(t => t.id)).toEqual([1, 2, 3, 4, 5]);
expect(orm.em.getUnitOfWork().getIdentityMap().values()).toHaveLength(0);
});
test('driver appends errored query', async () => {
const driver = orm.em.getDriver();
const err1 = `insert into "not_existing" ("foo") values ('bar') - relation "not_existing" does not exist`;
await expect(driver.nativeInsert('not_existing', { foo: 'bar' })).rejects.toThrow(err1);
const err2 = `delete from "not_existing" - relation "not_existing" does not exist`;
await expect(driver.nativeDelete('not_existing', {})).rejects.toThrow(err2);
});
test('connection returns correct URL', async () => {
const conn1 = new PostgreSqlConnection(new Configuration({
driver: PostgreSqlDriver,
clientUrl: 'postgre://example.host.com',
port: 1234,
user: 'usr',
password: 'pw',
} as any, false));
await expect(conn1.getClientUrl()).toBe('postgre://usr:*****@example.host.com:1234');
const conn2 = new PostgreSqlConnection(new Configuration({ driver: PostgreSqlDriver, port: 5433 } as any, false));
await expect(conn2.getClientUrl()).toBe('postgresql://postgres@127.0.0.1:5433');
});
test('should convert entity to PK when trying to search by entity', async () => {
const repo = orm.em.getRepository(Author2);
const author = new Author2('name', 'email');
author.termsAccepted = true;
author.favouriteAuthor = author;
await orm.em.persistAndFlush(author);
const a = await repo.findOne(author);
const authors = await repo.find({ favouriteAuthor: author });
expect(a).toBe(author);
expect(authors[0]).toBe(author);
await expect(repo.findOne({ termsAccepted: false })).resolves.toBeNull();
});
test('transactions', async () => {
const god1 = new Author2('God1', 'hello@heaven1.god');
try {
await orm.em.transactional(async em => {
await em.persistAndFlush(god1);
throw new Error(); // rollback the transaction
});
} catch { }
const res1 = await orm.em.findOne(Author2, { name: 'God1' });
expect(res1).toBeNull();
const ret = await orm.em.transactional(async em => {
const god2 = new Author2('God2', 'hello@heaven2.god');
await em.persist(god2);
return true;
});
const res2 = await orm.em.findOne(Author2, { name: 'God2' });
expect(res2).not.toBeNull();
expect(ret).toBe(true);
const err = new Error('Test');
try {
await orm.em.transactional(async em => {
const god3 = new Author2('God4', 'hello@heaven4.god');
await em.persist(god3);
throw err;
});
} catch (e) {
expect(e).toBe(err);
const res3 = await orm.em.findOne(Author2, { name: 'God4' });
expect(res3).toBeNull();
}
});
test('transactions with isolation levels', async () => {
const mock = mockLogger(orm, ['query']);
const god1 = new Author2('God1', 'hello@heaven1.god');
try {
await orm.em.transactional(async em => {
await em.persistAndFlush(god1);
throw new Error(); // rollback the transaction
}, { isolationLevel: IsolationLevel.READ_UNCOMMITTED });
} catch { }
expect(mock.mock.calls[0][0]).toMatch('begin transaction isolation level read uncommitted');
expect(mock.mock.calls[1][0]).toMatch('insert into "author2" ("created_at", "updated_at", "name", "email", "terms_accepted") values ($1, $2, $3, $4, $5) returning "id", "age"');
expect(mock.mock.calls[2][0]).toMatch('rollback');
});
test('read-only transactions', async () => {
const mock = mockLogger(orm, ['query']);
const god1 = new Author2('God1', 'hello@heaven1.god');
await expect(orm.em.transactional(async em => {
await em.persistAndFlush(god1);
}, { readOnly: true, isolationLevel: IsolationLevel.READ_COMMITTED })).rejects.toThrow(/cannot execute INSERT in a read-only transaction/);
expect(mock.mock.calls[0][0]).toMatch('begin transaction isolation level read committed read only');
expect(mock.mock.calls[1][0]).toMatch('insert into "author2" ("created_at", "updated_at", "name", "email", "terms_accepted") values ($1, $2, $3, $4, $5) returning "id", "age"');
expect(mock.mock.calls[2][0]).toMatch('rollback');
});
test('nested transactions with save-points', async () => {
await orm.em.transactional(async em => {
const god1 = new Author2('God1', 'hello1@heaven.god');
try {
await em.transactional(async em2 => {
await em2.persistAndFlush(god1);
throw new Error(); // rollback the transaction
});
} catch { }
const res1 = await em.findOne(Author2, { name: 'God1' });
expect(res1).toBeNull();
await em.transactional(async em2 => {
const god2 = new Author2('God2', 'hello2@heaven.god');
await em2.persistAndFlush(god2);
});
const res2 = await em.findOne(Author2, { name: 'God2' });
expect(res2).not.toBeNull();
});
});
test('nested transaction rollback with save-points will commit the outer one', async () => {
const mock = mockLogger(orm, ['query']);
// start outer transaction
const transaction = orm.em.transactional(async em => {
// do stuff inside inner transaction and rollback
try {
await em.transactional(async em2 => {
await em2.persistAndFlush(new Author2('God', 'hello@heaven.god'));
throw new Error(); // rollback the transaction
});
} catch { }
await em.persist(new Author2('God Persisted!', 'hello-persisted@heaven.god'));
});
// try to commit the outer transaction
await expect(transaction).resolves.toBeUndefined();
expect(mock.mock.calls.length).toBe(6);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('savepoint trx');
expect(mock.mock.calls[2][0]).toMatch('insert into "author2" ("created_at", "updated_at", "name", "email", "terms_accepted") values ($1, $2, $3, $4, $5) returning "id"');
expect(mock.mock.calls[3][0]).toMatch('rollback to savepoint trx');
expect(mock.mock.calls[4][0]).toMatch('insert into "author2" ("created_at", "updated_at", "name", "email", "terms_accepted") values ($1, $2, $3, $4, $5) returning "id"');
expect(mock.mock.calls[5][0]).toMatch('commit');
await expect(orm.em.findOne(Author2, { name: 'God Persisted!' })).resolves.not.toBeNull();
});
test('collection loads items after savepoint should not fail', async () => {
const publisher = new Publisher2('7K publisher', PublisherType.GLOBAL);
const book = new Book2('My Life on The Wall, part 1', new Author2('name', 'email'));
book.publisher = ref(publisher);
const author = new Author2('Bartleby', 'bartelby@writer.org');
author.books.add(book);
await orm.em.persistAndFlush(author);
orm.em.clear();
const mock = mockLogger(orm, ['query']);
const em = orm.em.fork();
await em.begin();
const book2 = await em.findOneOrFail(Book2, book.uuid);
const publisher2 = await book2.publisher!.loadOrFail({ populate: ['tests'], lockMode: LockMode.PESSIMISTIC_WRITE });
await em.transactional(async () => {
//
});
expect(publisher2.books.isInitialized(true)).toBe(false);
const books1 = await publisher2.books.load({ lockMode: LockMode.PESSIMISTIC_WRITE });
const books2 = await publisher2.books.load({ lockMode: LockMode.PESSIMISTIC_WRITE });
expect(books1).toBeInstanceOf(Collection);
expect(books1.isInitialized(true)).toBe(true);
expect(books1).toBe(books2);
await em.commit();
expect(mock.mock.calls[0][0]).toMatch(`begin`);
expect(mock.mock.calls[1][0]).toMatch(`select "b0"."uuid_pk", "b0"."created_at", "b0"."title", "b0"."price", "b0"."double", "b0"."meta", "b0"."author_id", "b0"."publisher_id", "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where "b0"."author_id" is not null and "b0"."uuid_pk" = $1 limit $2`);
expect(mock.mock.calls[2][0]).toMatch(`select "p0".* from "publisher2" as "p0" where "p0"."id" = $1 limit $2 for update`);
expect(mock.mock.calls[3][0]).toMatch(`select "t1".*, "p0"."test2_id" as "fk__test2_id", "p0"."publisher2_id" as "fk__publisher2_id" from "publisher2_tests" as "p0" inner join "public"."test2" as "t1" on "p0"."test2_id" = "t1"."id" where "p0"."publisher2_id" in ($1) order by "p0"."id" asc for update`);
expect(mock.mock.calls[4][0]).toMatch(`savepoint trx`);
expect(mock.mock.calls[5][0]).toMatch(`release savepoint trx`);
expect(mock.mock.calls[6][0]).toMatch(`select "b0"."uuid_pk", "b0"."created_at", "b0"."title", "b0"."price", "b0"."double", "b0"."meta", "b0"."author_id", "b0"."publisher_id", "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where "b0"."author_id" is not null and "b0"."publisher_id" in ($1) for update`);
expect(mock.mock.calls[7][0]).toMatch(`commit`);
});
test('em.commit/rollback validation', async () => {
await expect(orm.em.commit()).rejects.toThrow('An open transaction is required for this operation');
await expect(orm.em.rollback()).rejects.toThrow('An open transaction is required for this operation');
});
test('findOne supports optimistic locking [testMultipleFlushesDoIncrementalUpdates]', async () => {
expect(Test2Subscriber.log).toEqual([]);
const a = await orm.em.createQueryBuilder(Test2).insert({ name: '123' });
const r1 = await orm.em.createQueryBuilder(Test2).where({ name: '123' });
orm.em.clear();
const test = new Test2();
for (let i = 0; i < 5; i++) {
test.name = 'test' + i;
await orm.em.persistAndFlush(test);
expect(typeof test.version).toBe('number');
expect(test.version).toBe(i + 1);
}
expect(Test2Subscriber.log.map(r => r[0])).toEqual([
'onFlush',
'afterFlush',
'onFlush',
'afterFlush',
'onFlush',
'afterFlush',
'onFlush',
'afterFlush',
'onFlush',
'afterFlush',
]);
});
test('should load entities', async () => {
expect(orm).toBeInstanceOf(MikroORM);
expect(orm.em).toBeInstanceOf(EntityManager);
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god, 0.01);
bible.double = 123.45;
await orm.em.persistAndFlush(bible);
const author = new Author2('Jon Snow', 'snow@wall.st');
author.born = '1990-03-23';
author.favouriteBook = bible;
const publisher = new Publisher2('7K publisher', PublisherType.GLOBAL);
const book1 = new Book2('My Life on The Wall, part 1', author, 1.11);
book1.publisher = ref(publisher);
const book2 = new Book2('My Life on The Wall, part 2', author, 2.22);
book2.publisher = ref(publisher);
const book3 = new Book2('My Life on The Wall, part 3', author, 3.33);
book3.publisher = ref(publisher);
orm.em.persist(book1);
orm.em.persist(book2);
orm.em.persist(book3);
await orm.em.flush();
orm.em.clear();
const publisher7k = (await orm.em.getRepository(Publisher2).findOne({ name: '7K publisher' }))!;
expect(publisher7k).not.toBeNull();
expect(publisher7k.tests).toBeInstanceOf(Collection);
expect(publisher7k.tests.isInitialized()).toBe(false);
orm.em.clear();
const authorRepository = orm.em.getRepository(Author2);
const booksRepository = orm.em.getRepository(Book2);
const books = await booksRepository.findAll({ populate: ['author'] });
expect(wrap(books[0].author).isInitialized()).toBe(true);
expect(typeof books[0].double).toBe('number');
expect(books[0].double).toBe(123.45);
expect(typeof books[0].price).toBe('number');
expect(books[0].price).toBe(0.01);
await expect(authorRepository.findOne({ favouriteBook: bible.uuid })).resolves.not.toBe(null);
orm.em.clear();
const noBooks = await booksRepository.find({ title: 'not existing' }, { populate: ['author'] });
expect(noBooks.length).toBe(0);
orm.em.clear();
const jon = (await authorRepository.findOne({ name: 'Jon Snow' }, { populate: ['books', 'favouriteBook'] }))!;
const authors = await authorRepository.findAll({ populate: ['books', 'favouriteBook'] });
await expect(authorRepository.findOne({ email: 'not existing' })).resolves.toBeNull();
// full text search test
const fullTextBooks = (await booksRepository.find({ title: { $fulltext: 'life wall' } }))!;
expect(fullTextBooks.length).toBe(3);
// count test
const count = await authorRepository.count();
expect(count).toBe(authors.length);
const count2 = await authorRepository.count({ favouriteBook: v4() }, { groupBy: 'email' });
expect(count2).toBe(0);
// identity map test
authors.shift(); // shift the god away, as that entity is detached from IM
expect(jon).toBe(authors[0]);
expect(jon).toBe(await authorRepository.findOne(jon.id));
// serialization test
const o = wrap(jon).toJSON();
expect(o).toMatchObject({
id: jon.id,
createdAt: jon.createdAt,
updatedAt: jon.updatedAt,
books: [
{ author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 1' },
{ author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 2' },
{ author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 3' },
],
favouriteBook: { author: god.id, title: 'Bible' },
born: '1990-03-23',
email: 'snow@wall.st',
name: 'Jon Snow',
});
expect(wrap(jon).toJSON()).toEqual(o);
expect(jon.books.getIdentifiers()).toBeInstanceOf(Array);
expect(typeof jon.books.getIdentifiers()[0]).toBe('string');
expect(jon.books.getIdentifiers()[0]).toBe(book1.uuid);
for (const author of authors) {
expect(author.books).toBeInstanceOf(Collection);
expect(author.books.isInitialized()).toBe(true);
// iterator test
for (const book of author.books) {
expect(book.title).toMatch(/My Life on The Wall, part \d/);
expect(book.author).toBeInstanceOf(Author2);
expect(wrap(book.author).isInitialized()).toBe(true);
expect(book.publisher).toBeInstanceOf(Reference);
expect(book.publisher!.unwrap()).toBeInstanceOf(Publisher2);
expect(book.publisher!.isInitialized()).toBe(false);
}
}
const booksByTitleAsc = await booksRepository.find({ author: jon.id }, { orderBy: { title: QueryOrder.ASC } });
expect(booksByTitleAsc[0].title).toBe('My Life on The Wall, part 1');
expect(booksByTitleAsc[1].title).toBe('My Life on The Wall, part 2');
expect(booksByTitleAsc[2].title).toBe('My Life on The Wall, part 3');
const booksByTitleDesc = await booksRepository.find({ author: jon.id }, { orderBy: { title: QueryOrder.DESC } });
expect(booksByTitleDesc[0].title).toBe('My Life on The Wall, part 3');
expect(booksByTitleDesc[1].title).toBe('My Life on The Wall, part 2');
expect(booksByTitleDesc[2].title).toBe('My Life on The Wall, part 1');
const twoBooks = await booksRepository.find({ author: jon.id }, { orderBy: { title: QueryOrder.DESC }, limit: 2 });
expect(twoBooks.length).toBe(2);
expect(twoBooks[0].title).toBe('My Life on The Wall, part 3');
expect(twoBooks[1].title).toBe('My Life on The Wall, part 2');
const lastBook = await booksRepository.find({ author: jon.id }, {
populate: ['author'],
orderBy: { title: QueryOrder.DESC },
limit: 2,
offset: 2,
});
expect(lastBook.length).toBe(1);
expect(lastBook[0].title).toBe('My Life on The Wall, part 1');
expect(lastBook[0].author).toBeInstanceOf(Author2);
expect(wrap(lastBook[0].author).isInitialized()).toBe(true);
await orm.em.remove(lastBook[0]).flush();
});
test('json properties', async () => {
const god = new Author2('God', 'hello@heaven.god');
god.identities = ['fb-123', 'pw-231', 'tw-321'];
const bible = new Book2('Bible', god);
bible.meta = { category: 'god like', items: 3, valid: true, nested: { foo: '123', bar: 321, deep: { baz: 59, qux: false } } };
await orm.em.persistAndFlush(bible);
orm.em.clear();
const g = await orm.em.findOneOrFail(Author2, god.id, { populate: ['books'] });
expect(Array.isArray(g.identities)).toBe(true);
expect(g.identities).toEqual(['fb-123', 'pw-231', 'tw-321']);
expect(typeof g.books[0].meta).toBe('object');
expect(g.books[0].meta).toEqual({ category: 'god like', items: 3, valid: true, nested: { foo: '123', bar: 321, deep: { baz: 59, qux: false } } });
orm.em.clear();
const b1 = await orm.em.findOneOrFail(Book2, { meta: { category: 'god like' } });
const b2 = await orm.em.findOneOrFail(Book2, { meta: { category: { $in: ['god like'] }, items: 3 } }); // supports operators (GH #1487)
const b3 = await orm.em.findOneOrFail(Book2, { meta: { nested: { bar: 321 } } });
const b4 = await orm.em.findOneOrFail(Book2, { meta: { nested: { foo: '123', bar: 321 } } });
const b5 = await orm.em.findOneOrFail(Book2, { meta: { valid: true, nested: { foo: '123', bar: 321 } } });
const b6 = await orm.em.findOneOrFail(Book2, { meta: { valid: true, nested: { foo: '123', bar: 321, deep: { baz: 59 } } } });
const b7 = await orm.em.findOneOrFail(Book2, { meta: { valid: true, nested: { foo: '123', bar: 321, deep: { baz: 59, qux: false } } } });
expect(b1).toBe(b2);
expect(b1).toBe(b3);
expect(b1).toBe(b4);
expect(b1).toBe(b5);
expect(b1).toBe(b6);
expect(b1).toBe(b7);
});
test('order by json properties', async () => {
await orm.em.insert(Author2, { name: 'n', email: 'e', id: 1 });
await orm.em.insertMany(Book2, [
{ uuid: '123e4567-e89b-12d3-a456-426614174001', title: 't1', author: 1, meta: { nested: { foo: '3', deep: { str: 'c', qux: false, baz: 3 } } } },
{ uuid: '123e4567-e89b-12d3-a456-426614174002', title: 't2', author: 1, meta: { nested: { foo: '2', deep: { str: 'b', qux: false, baz: 1 } } } },
{ uuid: '123e4567-e89b-12d3-a456-426614174003', title: 't3', author: 1, meta: { nested: { foo: '1', deep: { str: 'a', qux: false, baz: 2 } } } },
]);
const res14 = await orm.em.fork().findAll(Book2, { orderBy: { meta: { nested: { foo: 'asc' } } } });
expect(res14.map(r => r.title)).toEqual(['t3', 't2', 't1']);
const res15 = await orm.em.fork().findAll(Book2, { orderBy: { meta: { nested: { deep: { str: 'asc' } } } } });
expect(res15.map(r => r.title)).toEqual(['t3', 't2', 't1']);
const res16 = await orm.em.fork().findAll(Book2, { orderBy: { meta: { nested: { deep: { baz: QueryOrder.DESC } } } } });
expect(res16.map(r => r.title)).toEqual(['t1', 't3', 't2']);
});
test('properties with spaces in column names', async () => {
const bar = new FooBar2();
bar.name = 'n';
bar.nameWithSpace = '123';
await orm.em.fork().persistAndFlush(bar);
const b1 = await orm.em.findOneOrFail(FooBar2, bar);
expect(b1.nameWithSpace).toBe('123');
b1.nameWithSpace = '456';
await orm.em.flush();
orm.em.clear();
const b2 = await orm.em.findOneOrFail(FooBar2, bar);
expect(b2.nameWithSpace).toBe('456');
});
test('em.create and reference property in constructor parameters', async () => {
const book = orm.em.create(Book2, { title: 'b', author: 1 });
expect(wrap(book.author).isInitialized()).toBe(false);
});
test('unsetting 1:1 inverse (GH #1872)', async () => {
const author = orm.em.create(Author2, { name: 'a', email: 'e' });
const fb1 = orm.em.create(Test2, { name: 'fb 1' });
const fb2 = orm.em.create(Test2, { name: 'fb 2' });
const fz1 = orm.em.create(Book2, { title: 'fb 1', author });
const fz2 = orm.em.create(Book2, { title: 'fb 2', author });
fz1.test = fb1;
await orm.em.persistAndFlush([fb1, fb2, fz1, fz2]);
fz1.test = undefined;
await orm.em.flush();
orm.em.clear();
const fz11 = await orm.em.findOneOrFail(Book2, fz1, { populate: ['test'] });
expect(fz11.test).toBeNull();
});
test('json properties respect field names', async () => {
const bar = new FooBar2();
bar.name = 'b';
bar.objectProperty = { myPropName: { nestedProperty: 123, somethingElse: null } };
await orm.em.fork().persistAndFlush(bar);
const mock = mockLogger(orm, ['query', 'query-params']);
const em = orm.em.fork({ loggerContext: { label: 'foo', bar: 1 } });
em.setLoggerContext({ label: 'foo', bar: 123 });
expect(em.getLoggerContext()).toEqual({ label: 'foo', bar: 123 });
const logSpy = jest.spyOn(DefaultLogger.prototype, 'log');
const b0 = await em.findOneOrFail(FooBar2, bar, {
logging: { label: 'foo 123' },
loggerContext: { bar: 456, new: true },
});
expect(b0.objectProperty).toEqual({ myPropName: { nestedProperty: 123, somethingElse: null } });
const b1 = await em.findOneOrFail(FooBar2, { objectProperty: { myPropName: { nestedProperty: 123 } } });
const b2 = await em.findOneOrFail(FooBar2, { objectProperty: { myPropName: { somethingElse: null } } });
const b3 = await em.findOneOrFail(FooBar2, { objectProperty: { myPropName: { nestedProperty: 123, somethingElse: null } } });
expect(b0).toBe(b1);
expect(b0).toBe(b2);
expect(b0).toBe(b3);
expect(mock.mock.calls).toHaveLength(4);
expect(logSpy.mock.calls).toHaveLength(4);
expect(mock.mock.calls[0][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."id" = 1 limit 1`);
expect(mock.mock.calls[0][0]).toMatch('(foo 123)');
expect(logSpy.mock.calls[0][2]).toMatchObject({ id: em.id, label: 'foo 123', bar: 456, new: true });
expect(mock.mock.calls[1][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("f0"."object_property"->'myPropName'->>'nestedProperty')::float8 = 123 limit 1`);
expect(mock.mock.calls[1][0]).toMatch('(foo)');
expect(logSpy.mock.calls[1][2]).toMatchObject({ id: em.id, label: 'foo', bar: 123 });
expect(mock.mock.calls[2][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."object_property"->'myPropName'->>'somethingElse' is null limit 1`);
expect(mock.mock.calls[2][0]).toMatch('(foo)');
expect(logSpy.mock.calls[2][2]).toMatchObject({ id: em.id, label: 'foo', bar: 123 });
expect(mock.mock.calls[3][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("f0"."object_property"->'myPropName'->>'nestedProperty')::float8 = 123 and "f0"."object_property"->'myPropName'->>'somethingElse' is null limit 1`);
expect(mock.mock.calls[3][0]).toMatch('(foo)');
expect(logSpy.mock.calls[3][2]).toMatchObject({ id: em.id, label: 'foo', bar: 123 });
});
test('findOne should initialize entity that is already in IM', async () => {
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god);
await orm.em.persistAndFlush(bible);
orm.em.clear();
const ref = orm.em.getReference(Author2, god.id);
expect(wrap(ref).isInitialized()).toBe(false);
const newGod = await orm.em.findOne(Author2, god.id);
expect(ref).toBe(newGod);
expect(wrap(ref).isInitialized()).toBe(true);
});
test('findOne supports regexps', async () => {
const author1 = new Author2('Author 1', 'a1@example.com');
const author2 = new Author2('Author 2', 'a2@example.com');
const author3 = new Author2('Author 3', 'a3@example.com');
await orm.em.persistAndFlush([author1, author2, author3]);
orm.em.clear();
const authors = await orm.em.find(Author2, { email: /exa.*le\.c.m$/ });
expect(authors.length).toBe(3);
expect(authors[0].name).toBe('Author 1');
expect(authors[1].name).toBe('Author 2');
expect(authors[2].name).toBe('Author 3');
orm.em.clear();
const authors2 = await orm.em.find(Author2, { email: { $re: 'exa.*le.c.m$' } });
expect(authors2.length).toBe(3);
expect(authors2[0].name).toBe('Author 1');
expect(authors2[1].name).toBe('Author 2');
expect(authors2[2].name).toBe('Author 3');
});
test('findOne supports optimistic locking [testMultipleFlushesDoIncrementalUpdates]', async () => {
const test = new Test2();
for (let i = 0; i < 5; i++) {
test.name = 'test' + i;
await orm.em.persistAndFlush(test);
expect(typeof test.version).toBe('number');
expect(test.version).toBe(i + 1);
}
});
test('findOne supports optimistic locking [testStandardFailureThrowsException]', async () => {
const test = new Test2();
test.name = 'test';
await orm.em.persistAndFlush(test);
expect(typeof test.version).toBe('number');
expect(test.version).toBe(1);
orm.em.clear();
const test2 = await orm.em.findOne(Test2, test.id);
await orm.em.nativeUpdate(Test2, { id: test.id }, { name: 'Changed!' }); // simulate concurrent update
test2!.name = 'WHATT???';
try {
await orm.em.flush();
expect(1).toBe('should be unreachable');
} catch (e: any) {
expect(e).toBeInstanceOf(ValidationError);
expect(e.message).toBe(`The optimistic lock on entity Test2 failed`);
expect((e as ValidationError).getEntity()).toBe(test2);
}
});
test('findOne supports optimistic locking [versioned proxy]', async () => {
const test = new Test2();
test.name = 'test';
await orm.em.persistAndFlush(test);
orm.em.clear();
const proxy = orm.em.getReference(Test2, test.id);
await orm.em.lock(proxy, LockMode.OPTIMISTIC, 1);
expect(wrap(proxy).isInitialized()).toBe(true);
});
test('findOne supports optimistic locking [versioned proxy]', async () => {
const test = new Test2();
test.name = 'test';
await orm.em.persistAndFlush(test);
orm.em.clear();
const test2 = await orm.em.findOne(Test2, test.id);
await orm.em.lock(test2!, LockMode.OPTIMISTIC, test.version);
});
test('findOne supports optimistic locking [testOptimisticTimestampLockFailureThrowsException]', async () => {
const bar = FooBar2.create('Testing');
expect(bar.version).toBeUndefined();
await orm.em.persistAndFlush(bar);
expect(bar.version).toBeInstanceOf(Date);
orm.em.clear();
const bar2 = (await orm.em.findOne(FooBar2, bar.id))!;
expect(bar2.version).toBeInstanceOf(Date);
try {
// Try to lock the record with an older timestamp and it should throw an exception
const expectedVersionExpired = new Date(+bar2.version - 3600);
await orm.em.lock(bar2, LockMode.OPTIMISTIC, expectedVersionExpired);
expect(1).toBe('should be unreachable');
} catch (e) {
expect((e as ValidationError).getEntity()).toBe(bar2);
}
});
test('findOne supports optimistic locking [unversioned entity]', async () => {
const author = new Author2('name', 'email');
await orm.em.persistAndFlush(author);
await expect(orm.em.lock(author, LockMode.OPTIMISTIC)).rejects.toThrow('Cannot obtain optimistic lock on unversioned entity Author2');
});
test('findOne supports optimistic locking [versioned entity]', async () => {
const test = new Test2();
test.name = 'test';
await orm.em.persistAndFlush(test);
await orm.em.lock(test, LockMode.OPTIMISTIC, test.version);
});
test('findOne supports optimistic locking [version mismatch]', async () => {
const test = new Test2();
test.name = 'test';
await orm.em.persistAndFlush(test);
await expect(orm.em.lock(test, LockMode.OPTIMISTIC, test.version + 1)).rejects.toThrow('The optimistic lock failed, version 2 was expected, but is actually 1');
});
test('findOne supports optimistic locking [testLockUnmanagedEntityThrowsException]', async () => {
const test = new Test2();
test.name = 'test';
await expect(orm.em.lock(test, LockMode.OPTIMISTIC)).rejects.toThrow('Entity Test2 is not managed. An entity is managed if its fetched from the database or registered as new through EntityManager.persist()');
});
test('batch updates increments version field (optimistic locking)', async () => {
const tests = [
new Test2({ name: 't1' }),
new Test2({ name: 't2' }),
new Test2({ name: 't3' }),
];
await orm.em.persistAndFlush(tests);
expect(tests.map(t => t.version)).toEqual([1, 1, 1]);
tests.forEach(t => t.name += ' changed!');
await orm.em.flush();
expect(tests.map(t => t.version)).toEqual([2, 2, 2]);
});
test('pessimistic locking requires active transaction', async () => {
const test = Test2.create('Lock test');
await orm.em.persistAndFlush(test);
await expect(orm.em.findOne(Test2, test.id, { lockMode: LockMode.PESSIMISTIC_READ })).rejects.toThrow('An open transaction is required for this operation');
await expect(orm.em.findOne(Test2, test.id, { lockMode: LockMode.PESSIMISTIC_WRITE })).rejects.toThrow('An open transaction is required for this operation');
await expect(orm.em.lock(test, LockMode.PESSIMISTIC_READ)).rejects.toThrow('An open transaction is required for this operation');
await expect(orm.em.lock(test, LockMode.PESSIMISTIC_WRITE)).rejects.toThrow('An open transaction is required for this operation');
});
test('findOne supports pessimistic locking [pessimistic write]', async () => {
const author = new Author2('name', 'email');
await orm.em.persistAndFlush(author);
const mock = mockLogger(orm, ['query']);
await orm.em.transactional(async em => {
await em.lock(author, LockMode.PESSIMISTIC_WRITE);
});
expect(mock.mock.calls).toHaveLength(3);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "a0" where "a0"."id" = $1 for update');
expect(mock.mock.calls[2][0]).toMatch('commit');
});
test('findOne supports pessimistic locking [pessimistic read]', async () => {
const author = new Author2('name', 'email');
await orm.em.persistAndFlush(author);
const mock = mockLogger(orm, ['query']);
await orm.em.transactional(async em => {
await em.lock(author, LockMode.PESSIMISTIC_READ);
});
expect(mock.mock.calls.length).toBe(3);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "a0" where "a0"."id" = $1 for share');
expect(mock.mock.calls[2][0]).toMatch('commit');
mock.mock.calls.length = 0;
await orm.em.transactional(async em => {
await em.lock(author, LockMode.PESSIMISTIC_PARTIAL_WRITE);
});
expect(mock.mock.calls.length).toBe(3);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "a0" where "a0"."id" = $1 for update skip locked');
expect(mock.mock.calls[2][0]).toMatch('commit');
mock.mock.calls.length = 0;
await orm.em.transactional(async em => {
await em.lock(author, LockMode.PESSIMISTIC_PARTIAL_WRITE, { lockTableAliases: ['a0'] });
});
expect(mock.mock.calls.length).toBe(3);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "a0" where "a0"."id" = $1 for update of "a0" skip locked');
expect(mock.mock.calls[2][0]).toMatch('commit');
mock.mock.calls.length = 0;
await orm.em.transactional(async em => {
await em.findAll(Book2, {
lockMode: LockMode.PESSIMISTIC_PARTIAL_WRITE,
lockTableAliases: ['b0'],
populate: ['author'],
strategy: LoadStrategy.JOINED,
});
});
expect(mock.mock.calls.length).toBe(3);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch('select "b0"."uuid_pk", "b0"."created_at", "b0"."title", "b0"."price", "b0"."double", "b0"."meta", "b0"."author_id", "b0"."publisher_id", "b0".price * 1.19 as "price_taxed", "a1"."id" as "a1__id", "a1"."created_at" as "a1__created_at", "a1"."updated_at" as "a1__updated_at", "a1"."name" as "a1__name", "a1"."email" as "a1__email", "a1"."age" as "a1__age", "a1"."terms_accepted" as "a1__terms_accepted", "a1"."optional" as "a1__optional", "a1"."identities" as "a1__identities", "a1"."born" as "a1__born", "a1"."born_time" as "a1__born_time", "a1"."favourite_book_uuid_pk" as "a1__favourite_book_uuid_pk", "a1"."favourite_author_id" as "a1__favourite_author_id", "a1"."identity" as "a1__identity" from "book2" as "b0" left join "author2" as "a1" on "b0"."author_id" = "a1"."id" where "b0"."author_id" is not null for update of "b0" skip locked');
expect(mock.mock.calls[2][0]).toMatch('commit');
});
test('locking and select-in population (GH #1670)', async () => {
await createBooksWithTags();
const mock = mockLogger(orm, ['query']);
await orm.em.transactional(async em => {
await em.findAll(Book2, {
lockMode: LockMode.PESSIMISTIC_PARTIAL_WRITE,
populate: ['author', 'tags'],
populateWhere: PopulateHint.INFER,
strategy: LoadStrategy.SELECT_IN,
});
});
expect(mock.mock.calls.length).toBe(5);
expect(mock.mock.calls[0][0]).toMatch('begin');
expect(mock.mock.calls[1][0]).toMatch(`select "b0"."uuid_pk", "b0"."created_at", "b0"."title", "b0"."price", "b0"."double", "b0"."meta", "b0"."author_id", "b0"."publisher_id", "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where "b0"."author_id" is not null for update skip locked`);
expect(mock.mock.calls[2][0]).toMatch(`select "a0".* from "author2" as "a0" where "a0"."id" in ($1) and "a0"."id" is not null for update skip locked`);
expect(mock.mock.calls[3][0]).toMatch(`select "b1".*, "b0"."book_tag2_id" as "fk__book_tag2_id", "b0"."book2_uuid_pk" as "fk__book2_uuid_pk" from "book2_tags" as "b0" inner join "public"."book_tag2" as "b1" on "b0"."book_tag2_id" = "b1"."id" where "b0"."book2_uuid_pk" in ($1, $2, $3) order by "b0"."order" asc for update skip locked`);
expect(mock.mock.calls[4][0]).toMatch('commit');
});
test('stable results of serialization', async () => {
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god);
const bible2 = new Book2('Bible pt. 2', god);
const bible3 = new Book2('Bible pt. 3', new Author2('Lol', 'lol@lol.lol'));
await orm.em.persistAndFlush([bible, bible2, bible3]);
orm.em.clear();
const newGod = (await orm.em.findOne(Author2, god.id))!;
const books = await orm.em.find(Book2, {});
await wrap(newGod).init();
for (const book of books) {
expect(wrap(book).toJSON()).toMatchObject({
author: book.author.id,
});
}
});
test('stable results of serialization (collection)', async () => {
const pub = new Publisher2('Publisher2');
await orm.em.persistAndFlush(pub);
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god);
bible.publisher = wrap(pub).toReference();
const bible2 = new Book2('Bible pt. 2', god);
bible2.publisher = wrap(pub).toReference();
const bible3 = new Book2('Bible pt. 3', new Author2('Lol', 'lol@lol.lol'));
bible3.publisher = wrap(pub).toReference();
await orm.em.persistAndFlush([bible, bible2, bible3]);
orm.em.clear();
const newGod = orm.em.getReference(Author2, god.id);
const publisher = (await orm.em.findOne(Publisher2, pub.id, { populate: ['books'] }))!;
await wrap(newGod).init();
const json = wrap(publisher).toJSON().books;
for (const book of publisher.books) {
expect(json.find(b => b.uuid === book.uuid)).toMatchObject({
author: book.author.id,
});
}
});
test('stable results of serialization with partial loading', async () => {
const god = new Author2('God', 'hello@heaven.god');
god.books.add(new Book2('Bible', god));
await orm.em.fork().persistAndFlush(god);
// when populating collections, the owner is selected automatically (here book.author)
const newGod = await orm.em.findOneOrFail(Author2, god.id, {
populate: ['books'],
fields: ['books.title'],
});
const json = wrap(newGod).toJSON();
// @ts-expect-error since v6, automatically selected FKs are no longer part of the serialized entity
expect(json.books[0].author).toBeUndefined();
});
test('findOne by id', async () => {
const authorRepository = orm.em.getRepository(Author2);
const jon = new Author2('Jon Snow', 'snow@wall.st');
await orm.em.persistAndFlush(jon);
orm.em.clear();
let author = (await authorRepository.findOne(jon.id))!;
expect(author).not.toBeNull();
expect(author.name).toBe('Jon Snow');
orm.em.clear();
author = (await authorRepository.findOne({ id: jon.id }))!;
expect(author).not.toBeNull();
expect(author.name).toBe('Jon Snow');
});
test('populate ManyToOne relation via init()', async () => {
const authorRepository = orm.em.getRepository(Author2);
const publisher = new Publisher2('Publisher');
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god);
bible.publisher = ref(publisher);
await orm.em.persistAndFlush(bible);
let jon = new Author2('Jon Snow', 'snow@wall.st');
jon.born = '1990-03-23';
jon.favouriteBook = bible;
await orm.em.persistAndFlush(jon);
orm.em.clear();
jon = (await authorRepository.findOne(jon.id))!;
expect(jon).not.toBeNull();
expect(jon.name).toBe('Jon Snow');
expect(jon.born).toEqual('1990-03-23');
expect(jon.favouriteBook).toBeInstanceOf(Book2);
expect(wrap(jon.favouriteBook!).isInitialized()).toBe(false);
await wrap(jon.favouriteBook!).init();
expect(jon.favouriteBook).toBeInstanceOf(Book2);
expect(wrap(jon.favouriteBook!).isInitialized()).toBe(true);
expect(jon.favouriteBook!.title).toBe('Bible');
const em2 = orm.em.fork();
const bible2 = await em2.findOneOrFail(Book2, { uuid: bible.uuid });
expect(wrap(bible2, true).__em!.id).toBe(em2.id);
expect(wrap(bible2.publisher!, true).__em!.id).toBe(em2.id);
const publisher2 = await bible2.publisher!.loadOrFail();
expect(wrap(publisher2, true).__em!.id).toBe(em2.id);
});
test('populating a relation does save its original entity data (GH issue 864)', async () => {
const god = new Author2('God', 'hello@heaven.god');
const bible = new Book2('Bible', god);
await orm.em.persistAndFlush(bible);
orm.em.clear();
const b = await orm.em.findOneOrFail(Book2, bible.uuid, { populate: ['author'] });
expect(wrap(b.author, true).__originalEntityData).toMatchObject({ name: 'God', email: 'hello@heaven.god' });
});
test('populate OneToOne relation', async () => {
const bar = FooBar2.create('bar');
const baz = new FooBaz2('baz');
bar.baz = baz;
await orm.em.persistAndFlush(bar);
orm.em.clear();
const b1 = (await orm.em.findOne(FooBar2, { id: bar.id }, { populate: ['baz'], refresh: true }))!;
expect(b1.baz).toBeInstanceOf(FooBaz2);
expect(b1.baz!.id).toBe(baz.id);
expect(wrap(b1).toJSON()).toMatchObject({ baz: { id: baz.id, bar: bar.id, name: 'baz' } });
const mock = mockLogger(orm, ['query']);
await orm.em.flush();
expect(mock.mock.calls.length).toBe(0);
});
test('populate OneToOne relation on inverse side', async () => {
const bar = FooBar2.create('bar');
const baz = new FooBaz2('baz');
bar.baz = baz;
await orm.em.persistAndFlush(bar);
orm.em.clear();
const mock = mockLogger(orm, ['query']);
// autoJoinOneToOneOwner: false
const b0 = await orm.em.findOneOrFail(FooBaz2, { id: baz.id });
expect(mock.mock.calls[0][0]).toMatch('select "f0".* from "foo_baz2" as "f0" where "f0"."id" = $1 limit $2');
expect(b0.bar).toBeUndefined();
orm.em.clear();
const b1 = await orm.em.findOneOrFail(FooBaz2, { id: baz.id }, { populate: ['bar'] });
expect(mock.mock.calls[1][0]).toMatch('select "f0".*, "f1"."id" as "bar_id" from "foo_baz2" as "f0" left join "foo_bar2" as "f1" on "f0"."id" = "f1"."baz_id" where "f0"."id" = $1 limit $2');
expect(mock.mock.calls[2][0]).toMatch('select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."baz_id" in ($1)');
expect(b1.bar).toBeInstanceOf(FooBar2);
expect(b1.bar!.id).toBe(bar.id);
expect(wrap(b1).toJSON()).toMatchObject({ bar: { id: bar.id, baz: baz.id, name: 'bar' } });
orm.em.clear();
const b2 = await orm.em.findOneOrFail(FooBaz2, { bar: bar.id }, { populate: ['bar'] });
expect(mock.mock.calls[3][0]).toMatch('select "f0".*, "f1"."id" as "bar_id" from "foo_baz2" as "f0" left join "foo_bar2" as "f1" on "f0"."id" = "f1"."baz_id" where "f1"."id" = $1 limit $2');
expect(mock.mock.calls[4][0]).toMatch('select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."baz_id" in ($1)');
expect(b2.bar).toBeInstanceOf(FooBar2);
expect(b2.bar!.id).toBe(bar.id);
expect(wrap(b2).toJSON()).toMatchObject({ bar: { id: bar.id, baz: baz.id, name: 'bar' } });
});
test('populate OneToOne relation with uuid PK', async () => {
const author = new Author2('name', 'email');
const book = new Book2('b1', author);
const test = Test2.create('t');
test.book = book;
await orm.em.persistAndFlush(test);
orm.em.clear();
const b1 = (await orm.em.findOne(Book2, { test: test.id }, { populate: ['test.config'] }))!;
expect(b1.uuid).not.toBeNull();
expect(wrap(b1).toJSON()).toMatchObject({ test: { id: test.id, book: test.book.uuid, name: 't' } });
});
test('batch update with OneToOne relation will use 2 queries (GH issue #1025)', async () => {
c