@pulumi/azure-native
Version:
[](https://slack.pulumi.com) [](https://npmjs.com/package/@pulumi/azure-native) [ • 2.62 kB
TypeScript
import * as pulumi from "@pulumi/pulumi";
import * as outputs from "../types/output";
/**
* Uses Azure REST API version 2025-01-01-preview.
*
* Other available API versions: 2023-08-01-preview, 2024-01-01-preview, 2024-10-01-preview, 2025-04-01-preview, 2025-07-01-preview. These can be accessed by generating a local SDK package using the CLI command `pulumi package add azure-native machinelearningservices [ApiVersion]`. See the [version guide](../../../version-guide/#accessing-any-api-version-via-local-packages) for details.
*/
export declare function getInferenceGroupStatus(args: GetInferenceGroupStatusArgs, opts?: pulumi.InvokeOptions): Promise<GetInferenceGroupStatusResult>;
export interface GetInferenceGroupStatusArgs {
/**
* InferenceGroup name.
*/
groupName: string;
/**
* InferencePool name.
*/
poolName: string;
/**
* The name of the resource group. The name is case insensitive.
*/
resourceGroupName: string;
/**
* Name of Azure Machine Learning workspace.
*/
workspaceName: string;
}
export interface GetInferenceGroupStatusResult {
/**
* Gets or sets the actual capacity info for the group.
*/
readonly actualCapacityInfo?: outputs.machinelearningservices.ActualCapacityInfoResponse;
/**
* Gets or sets the actual number of endpoints in the group.
*/
readonly endpointCount?: number;
/**
* Gets or sets the request number of instances for the group.
*/
readonly requestedCapacity?: number;
}
/**
* Uses Azure REST API version 2025-01-01-preview.
*
* Other available API versions: 2023-08-01-preview, 2024-01-01-preview, 2024-10-01-preview, 2025-04-01-preview, 2025-07-01-preview. These can be accessed by generating a local SDK package using the CLI command `pulumi package add azure-native machinelearningservices [ApiVersion]`. See the [version guide](../../../version-guide/#accessing-any-api-version-via-local-packages) for details.
*/
export declare function getInferenceGroupStatusOutput(args: GetInferenceGroupStatusOutputArgs, opts?: pulumi.InvokeOutputOptions): pulumi.Output<GetInferenceGroupStatusResult>;
export interface GetInferenceGroupStatusOutputArgs {
/**
* InferenceGroup name.
*/
groupName: pulumi.Input<string>;
/**
* InferencePool name.
*/
poolName: pulumi.Input<string>;
/**
* The name of the resource group. The name is case insensitive.
*/
resourceGroupName: pulumi.Input<string>;
/**
* Name of Azure Machine Learning workspace.
*/
workspaceName: pulumi.Input<string>;
}