UNPKG

@budibase/server

Version:
498 lines (419 loc) • 16.3 kB
import { Datasource, FieldType, SortOrder, Table, StringFieldSubType, } from "@budibase/types" import { generator } from "@budibase/backend-core/tests" import { DatabaseName, datasourceDescribe, knexClient, } from "../integrations/tests/utils" import { Knex } from "knex" const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES, DatabaseName.POSTGRES_LEGACY], }) if (mainDescriptions.length) { describe.each(mainDescriptions)( "/postgres integrations ($dbName)", ({ config, dsProvider }) => { let datasource: Datasource let client: Knex beforeAll(async () => { const ds = await dsProvider() datasource = ds.datasource! client = ds.client! }) afterAll(config.end) describe("POST /api/datasources/:datasourceId/schema", () => { let tableName: string beforeEach(async () => { tableName = generator.guid().replaceAll("-", "").substring(0, 10) }) afterEach(async () => { await client.schema.dropTableIfExists(tableName) }) it("recognises when a table has no primary key", async () => { await client.schema.createTable(tableName, table => { table.increments("id", { primaryKey: false }) }) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) expect(response.errors).toEqual({ [tableName]: "Table must have a primary key.", }) }) it("recognises when a table is using a reserved column name", async () => { await client.schema.createTable(tableName, table => { table.increments("_id").primary() }) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) expect(response.errors).toEqual({ [tableName]: "Table contains invalid columns.", }) }) it("recognises enum columns as options", async () => { const tableName = `orders_${generator .guid() .replaceAll("-", "") .substring(0, 6)}` await client.schema.createTable(tableName, table => { table.increments("order_id").primary() table.string("customer_name").notNullable() table.enum("status", ["pending", "processing", "shipped"], { useNative: true, enumName: `${tableName}_status`, }) }) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) const table = response.datasource.entities?.[tableName] expect(table).toBeDefined() expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) }) it("should be able to filter enum columns using string filter", async () => { const tableName = `orders_${generator .guid() .replaceAll("-", "") .substring(0, 6)}` await client.schema.createTable(tableName, table => { table.increments("order_id").primary() table.string("customer_name").notNullable() table.enum("status", ["pending", "processing", "shipped"], { useNative: true, enumName: `${tableName}_status`, }) }) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) const table = response.datasource.entities?.[tableName] expect(table).toBeDefined() // Insert test data await config.api.row.bulkImport(table!._id!, { rows: [ { customer_name: "John", status: "pending" }, { customer_name: "Jane", status: "processing" }, { customer_name: "Bob", status: "shipped" }, ], }) // Test string filter on enum column const { rows } = await config.api.row.search(table!._id!, { query: { string: { status: "proc", }, }, }) expect(rows).toHaveLength(1) expect(rows[0].status).toBe("processing") expect(rows[0].customer_name).toBe("Jane") }) }) describe("check custom column types", () => { beforeAll(async () => { await client.schema.createTable("binaryTable", table => { table.binary("id").primary() table.string("column1") table.integer("column2") }) }) it("should handle binary columns", async () => { const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) expect(response.datasource.entities).toBeDefined() const table = response.datasource.entities?.["binaryTable"] expect(table).toBeDefined() expect(table?.schema.id.externalType).toBe("bytea") const row = await config.api.row.save(table!._id!, { id: "1111", column1: "hello", column2: 222, }) expect(row._id).toBeDefined() const decoded = decodeURIComponent(row._id!).replace(/'/g, '"') expect(JSON.parse(decoded)[0]).toBe("1111") }) }) describe("check fetching null/not null table", () => { beforeAll(async () => { await client.schema.createTable("nullableTable", table => { table.increments("order_id").primary() table.integer("order_number").notNullable() }) }) it("should be able to change the table to allow nullable and refetch this", async () => { const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) const entities = response.datasource.entities expect(entities).toBeDefined() const nullableTable = entities?.["nullableTable"] expect(nullableTable).toBeDefined() expect( nullableTable?.schema["order_number"].constraints?.presence ).toEqual(true) // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase // is aware of - therefore we can try to fetch and make sure BB updates correctly await client.schema.alterTable("nullableTable", table => { table.setNullable("order_number") }) const responseAfter = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) const entitiesAfter = responseAfter.datasource.entities expect(entitiesAfter).toBeDefined() const nullableTableAfter = entitiesAfter?.["nullableTable"] expect(nullableTableAfter).toBeDefined() expect( nullableTableAfter?.schema["order_number"].constraints?.presence ).toBeUndefined() }) }) describe("money field 💰", () => { const tableName = "moneytable" let table: Table beforeAll(async () => { await client.raw(` CREATE TABLE ${tableName} ( id serial PRIMARY KEY, price money ) `) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) table = response.datasource.entities![tableName] }) it("should be able to import a money field", async () => { expect(table).toBeDefined() expect(table?.schema.price.type).toBe(FieldType.NUMBER) }) it("should be able to search a money field", async () => { await config.api.row.bulkImport(table._id!, { rows: [{ price: 200 }, { price: 300 }], }) const { rows } = await config.api.row.search(table._id!, { query: { equal: { price: 200, }, }, }) expect(rows).toHaveLength(1) expect(rows[0].price).toBe("200.00") }) it("should be able to update a money field", async () => { let row = await config.api.row.save(table._id!, { price: 200 }) expect(row.price).toBe("200.00") row = await config.api.row.save(table._id!, { ...row, price: 300 }) expect(row.price).toBe("300.00") row = await config.api.row.save(table._id!, { ...row, price: "400.00", }) expect(row.price).toBe("400.00") }) }) describe("citext field", () => { const tableName = "citexttable" let table: Table beforeAll(async () => { await client.raw(` CREATE EXTENSION IF NOT EXISTS citext; CREATE TABLE ${tableName} ( id serial PRIMARY KEY, email citext ) `) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) table = response.datasource.entities![tableName] }) it("should map citext column to internal type string", async () => { expect(table).toBeDefined() expect(table?.schema.email.type).toBe(FieldType.STRING) expect(table?.schema.email.externalType).toBe("USER-DEFINED") }) }) describe("text[] array field", () => { const tableName = "textarraytable" let table: Table beforeAll(async () => { await client.raw(` CREATE TABLE ${tableName} ( id serial PRIMARY KEY, tags text[] ) `) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) table = response.datasource.entities![tableName] }) it("should map text[] column to internal type string with array subtype", async () => { expect(table).toBeDefined() expect(table?.schema.tags.type).toBe(FieldType.STRING) expect(table?.schema.tags.subtype).toBe(StringFieldSubType.ARRAY) expect(table?.schema.tags.externalType).toBe("ARRAY") }) }) describe("json column sorting behaviour", () => { let tableName: string let table: Table beforeAll(async () => { tableName = `jsonsort_${generator .guid() .replaceAll("-", "") .substring(0, 12)}` await client.schema.createTable(tableName, table => { table.integer("id").primary() table.jsonb("payload") table.string("label") }) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) table = response.datasource.entities![tableName] }) afterAll(async () => { await client.schema.dropTableIfExists(tableName) }) it("ignores sort directives targeting json columns", async () => { expect(table).toBeDefined() expect(table?.schema.payload.type).toBe(FieldType.JSON) await config.api.row.bulkImport(table._id!, { rows: [ { id: 3, label: "gamma", payload: { rank: 3 } }, { id: 1, label: "alpha", payload: { rank: 1 } }, { id: 2, label: "beta", payload: { rank: 2 } }, ], }) const { rows } = await config.api.row.search(table._id!, { sort: "payload", sortOrder: SortOrder.DESCENDING, }) expect(rows.map(row => row.id)).toEqual([1, 2, 3]) expect(rows.map(row => row.label)).toEqual(["alpha", "beta", "gamma"]) }) }) describe("json primary key fallbacks", () => { let tableName: string let table: Table beforeAll(async () => { tableName = `jsonprimary_${generator .guid() .replaceAll("-", "") .substring(0, 12)}` await client.raw(` CREATE TABLE ${tableName} ( payload jsonb NOT NULL, ref text NOT NULL, description text, PRIMARY KEY (payload, ref) ) `) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, }) table = response.datasource.entities![tableName] }) afterAll(async () => { await client.schema.dropTableIfExists(tableName) }) it("uses the first sortable column in a composite primary key", async () => { expect(table).toBeDefined() expect(table?.schema.payload.type).toBe(FieldType.JSON) expect(table?.primary?.[0]).toBe("payload") expect(table?.primary?.[1]).toBe("ref") await config.api.row.bulkImport(table._id!, { rows: [ { payload: { rank: 3 }, ref: "c", description: "third" }, { payload: { rank: 1 }, ref: "a", description: "first" }, { payload: { rank: 2 }, ref: "b", description: "second" }, ], }) const { rows } = await config.api.row.search(table._id!) expect(rows.map(row => row.ref)).toEqual(["a", "b", "c"]) }) }) } ) const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] }) if (descriptions.length) { describe.each(descriptions)( "Integration compatibility with postgres search_path", ({ config, dsProvider }) => { let datasource: Datasource let client: Knex let schema1: string let schema2: string beforeEach(async () => { const ds = await dsProvider() datasource = ds.datasource! const rawDatasource = ds.rawDatasource! schema1 = generator.guid().replaceAll("-", "") schema2 = generator.guid().replaceAll("-", "") client = await knexClient(rawDatasource) await client.schema.createSchema(schema1) await client.schema.createSchema(schema2) rawDatasource.config!.schema = `${schema1}, ${schema2}` client = await knexClient(rawDatasource) datasource = await config.api.datasource.create(rawDatasource) }) afterEach(async () => { await client.schema.dropSchema(schema1, true) await client.schema.dropSchema(schema2, true) }) it("discovers tables from any schema in search path", async () => { await client.schema.createTable(`${schema1}.table1`, table => { table.increments("id1").primary() }) await client.schema.createTable(`${schema2}.table2`, table => { table.increments("id2").primary() }) const response = await config.api.datasource.info(datasource) expect(response.tableNames).toBeDefined() expect(response.tableNames).toEqual( expect.arrayContaining(["table1", "table2"]) ) }) it("does not mix columns from different tables", async () => { const repeated_table_name = "table_same_name" await client.schema.createTable( `${schema1}.${repeated_table_name}`, table => { table.increments("id").primary() table.string("val1") } ) await client.schema.createTable( `${schema2}.${repeated_table_name}`, table => { table.increments("id2").primary() table.string("val2") } ) const response = await config.api.datasource.fetchSchema({ datasourceId: datasource._id!, tablesFilter: [repeated_table_name], }) expect( response.datasource.entities?.[repeated_table_name].schema ).toBeDefined() const schema = response.datasource.entities?.[repeated_table_name].schema expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) }) } ) } }