kepler.gl
Version:
kepler.gl is a webgl based application to visualize large scale location data in the browser
191 lines (190 loc) • 7.16 kB
TypeScript
import * as arrow from 'apache-arrow';
import { ArrowTable } from '@loaders.gl/schema';
import { ProcessorResult, Field } from '@kepler.gl/types';
import { isPlainObject, analyzerTypeToFieldType, getFieldsFromData } from '@kepler.gl/utils';
import { ParsedDataset, SavedMap, LoadedMap } from '@kepler.gl/schemas';
export declare const CSV_NULLS: RegExp;
declare function tryParseJsonString(str: any): any;
export declare const PARSE_FIELD_VALUE_FROM_STRING: {
boolean: {
valid: (d: unknown) => boolean;
parse: (d: unknown) => boolean;
};
integer: {
valid: (d: unknown) => boolean;
parse: (d: unknown) => number;
};
timestamp: {
valid: (d: unknown, field: Field) => boolean;
parse: (d: any, field: Field) => any;
};
real: {
valid: (d: unknown) => boolean;
parse: typeof parseFloat;
};
object: {
valid: typeof isPlainObject;
parse: typeof tryParseJsonString;
};
array: {
valid: (arg: any) => arg is any[];
parse: typeof tryParseJsonString;
};
};
/**
* Process csv data, output a data object with `{fields: [], rows: []}`.
* The data object can be wrapped in a `dataset` and pass to [`addDataToMap`](../actions/actions.md#adddatatomap)
* @param rawData raw csv string
* @returns data object `{fields: [], rows: []}` can be passed to addDataToMaps
* @public
* @example
* import {processCsvData} from 'kepler.gl/processors';
*
* const testData = `gps_data.utc_timestamp,gps_data.lat,gps_data.lng,gps_data.types,epoch,has_result,id,time,begintrip_ts_utc,begintrip_ts_local,date
* 2016-09-17 00:09:55,29.9900937,31.2590542,driver_analytics,1472688000000,False,1,2016-09-23T00:00:00.000Z,2016-10-01 09:41:39+00:00,2016-10-01 09:41:39+00:00,2016-09-23
* 2016-09-17 00:10:56,29.9927699,31.2461142,driver_analytics,1472688000000,False,2,2016-09-23T00:00:00.000Z,2016-10-01 09:46:37+00:00,2016-10-01 16:46:37+00:00,2016-09-23
* 2016-09-17 00:11:56,29.9907261,31.2312742,driver_analytics,1472688000000,False,3,2016-09-23T00:00:00.000Z,,,2016-09-23
* 2016-09-17 00:12:58,29.9870074,31.2175827,driver_analytics,1472688000000,False,4,2016-09-23T00:00:00.000Z,,,2016-09-23`
*
* const dataset = {
* info: {id: 'test_data', label: 'My Csv'},
* data: processCsvData(testData)
* };
*
* dispatch(addDataToMap({
* datasets: [dataset],
* options: {centerMap: true, readOnly: true}
* }));
*/
export declare function processCsvData(rawData: unknown[][] | string, header?: string[]): ProcessorResult;
/**
* Parse rows of csv by analyzed field types. So that `'1'` -> `1`, `'True'` -> `true`
* @param rows
* @param fields
*/
export declare function parseRowsByFields(rows: any[][], fields: Field[]): any[][];
/**
* Process uploaded csv file to parse value by field type
*
* @param rows
* @param geoFieldIdx field index
* @param field
* @param i
*/
export declare function parseCsvRowsByFieldType(rows: unknown[][], geoFieldIdx: number, field: Field, i: number): void;
/**
* Process data where each row is an object, output can be passed to [`addDataToMap`](../actions/actions.md#adddatatomap)
* NOTE: This function may mutate input.
* @param rawData an array of row object, each object should have the same number of keys
* @returns dataset containing `fields` and `rows`
* @public
* @example
* import {addDataToMap} from 'kepler.gl/actions';
* import {processRowObject} from 'kepler.gl/processors';
*
* const data = [
* {lat: 31.27, lng: 127.56, value: 3},
* {lat: 31.22, lng: 126.26, value: 1}
* ];
*
* dispatch(addDataToMap({
* datasets: {
* info: {label: 'My Data', id: 'my_data'},
* data: processRowObject(data)
* }
* }));
*/
export declare function processRowObject(rawData: unknown[]): ProcessorResult;
/**
* Process GeoJSON [`FeatureCollection`](http://wiki.geojson.org/GeoJSON_draft_version_6#FeatureCollection),
* output a data object with `{fields: [], rows: []}`.
* The data object can be wrapped in a `dataset` and passed to [`addDataToMap`](../actions/actions.md#adddatatomap)
* NOTE: This function may mutate input.
*
* @param rawData raw geojson feature collection
* @returns dataset containing `fields` and `rows`
* @public
* @example
* import {addDataToMap} from 'kepler.gl/actions';
* import {processGeojson} from 'kepler.gl/processors';
*
* const geojson = {
* "type" : "FeatureCollection",
* "features" : [{
* "type" : "Feature",
* "properties" : {
* "capacity" : "10",
* "type" : "U-Rack"
* },
* "geometry" : {
* "type" : "Point",
* "coordinates" : [ -71.073283, 42.417500 ]
* }
* }]
* };
*
* dispatch(addDataToMap({
* datasets: {
* info: {
* label: 'Sample Taxi Trips in New York City',
* id: 'test_trip_data'
* },
* data: processGeojson(geojson)
* }
* }));
*/
export declare function processGeojson(rawData: unknown): ProcessorResult;
/**
* Process saved kepler.gl json to be pass to [`addDataToMap`](../actions/actions.md#adddatatomap).
* The json object should contain `datasets` and `config`.
* @param rawData
* @param schema
* @returns datasets and config `{datasets: {}, config: {}}`
* @public
* @example
* import {addDataToMap} from 'kepler.gl/actions';
* import {processKeplerglJSON} from 'kepler.gl/processors';
*
* dispatch(addDataToMap(processKeplerglJSON(keplerGlJson)));
*/
export declare function processKeplerglJSON(rawData: SavedMap, schema?: import("@kepler.gl/schemas").KeplerGLSchemaClass): LoadedMap | null;
/**
* Parse a single or an array of datasets saved using kepler.gl schema
* @param rawData
* @param schema
*/
export declare function processKeplerglDataset(rawData: object | object[], schema?: import("@kepler.gl/schemas").KeplerGLSchemaClass): ParsedDataset | ParsedDataset[] | null;
/**
* Parse arrow table and return a dataset
*
* @param arrowTable ArrowTable to parse, see loaders.gl/schema
* @returns dataset containing `fields` and `rows` or null
*/
export declare function processArrowTable(arrowTable: ArrowTable): ProcessorResult | null;
/**
* Parse arrow batches returned from parseInBatches()
*
* @param arrowTable the arrow table to parse
* @returns dataset containing `fields` and `rows` or null
*/
export declare function processArrowBatches(arrowBatches: arrow.RecordBatch[]): ProcessorResult | null;
export declare const DATASET_HANDLERS: {
row: typeof processRowObject;
geojson: typeof processGeojson;
csv: typeof processCsvData;
arrow: typeof processArrowTable;
keplergl: typeof processKeplerglDataset;
};
export declare const Processors: {
processGeojson: typeof processGeojson;
processCsvData: typeof processCsvData;
processArrowTable: typeof processArrowTable;
processArrowBatches: typeof processArrowBatches;
processRowObject: typeof processRowObject;
processKeplerglJSON: typeof processKeplerglJSON;
processKeplerglDataset: typeof processKeplerglDataset;
analyzerTypeToFieldType: typeof analyzerTypeToFieldType;
getFieldsFromData: typeof getFieldsFromData;
parseCsvRowsByFieldType: typeof parseCsvRowsByFieldType;
};
export {};