@databricks/sql
Version:
Driver for connection to Databricks SQL via Thrift API.
57 lines • 2.32 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const HiveDriverError_1 = __importDefault(require("../errors/HiveDriverError"));
const utils_1 = require("./utils");
const utils_2 = require("../utils");
class ArrowResultHandler {
constructor(context, source, { schema, arrowSchema, lz4Compressed }) {
this.context = context;
this.source = source;
// Arrow schema is not available in old DBR versions, which also don't support native Arrow types,
// so it's possible to infer Arrow schema from Hive schema ignoring `useArrowNativeTypes` option
this.arrowSchema = arrowSchema !== null && arrowSchema !== void 0 ? arrowSchema : (0, utils_1.hiveSchemaToArrowSchema)(schema);
this.isLZ4Compressed = lz4Compressed !== null && lz4Compressed !== void 0 ? lz4Compressed : false;
if (this.isLZ4Compressed && !utils_2.LZ4) {
throw new HiveDriverError_1.default('Cannot handle LZ4 compressed result: module `lz4` not installed');
}
}
async hasMore() {
if (!this.arrowSchema) {
return false;
}
return this.source.hasMore();
}
async fetchNext(options) {
var _a;
if (!this.arrowSchema) {
return {
batches: [],
rowCount: 0,
};
}
const rowSet = await this.source.fetchNext(options);
const batches = [];
let totalRowCount = 0;
(_a = rowSet === null || rowSet === void 0 ? void 0 : rowSet.arrowBatches) === null || _a === void 0 ? void 0 : _a.forEach(({ batch, rowCount }) => {
if (batch) {
batches.push(this.isLZ4Compressed ? utils_2.LZ4.decode(batch) : batch);
totalRowCount += rowCount.toNumber(true);
}
});
if (batches.length === 0) {
return {
batches: [],
rowCount: 0,
};
}
return {
batches: [this.arrowSchema, ...batches],
rowCount: totalRowCount,
};
}
}
exports.default = ArrowResultHandler;
//# sourceMappingURL=ArrowResultHandler.js.map
;