
Inference
Inference
yaml
type: "io.kestra.plugin.huggingface.Inference"Examples
yaml
id: huggingface_inference_text
namespace: company.team
tasks:
- id: huggingface_inference
type: io.kestra.plugin.huggingface.Inference
model: cardiffnlp/twitter-roberta-base-sentiment-latest
apiKey: "{{ secret('HUGGINGFACE_API_KEY') }}"
inputs: "I want a refund"
yaml
id: huggingface_inference
namespace: company.team
tasks:
- id: huggingface_inference_image
type: io.kestra.plugin.huggingface.Inference
model: google/vit-base-patch16-224
apiKey: "{{ secret('HUGGINGFACE_API_KEY') }}"
inputs: "{{ read('my-base64-image.txt') }}"
parameters:
function_to_apply: sigmoid,
top_k: 3
waitForModel: true
useCache: false
Properties
apiKey*Requiredstring
inputs*Requiredstring
model*Requiredstring
endpointstring
Default
https://api-inference.huggingface.co/modelsoptions
Definitions
io.kestra.plugin.huggingface.AbstractHttpTask-RequestOptions
connectTimeoutstring
Format
durationconnectionPoolIdleTimeoutstring
Default
PT0SFormat
durationdefaultCharsetstring
Default
UTF-8java.nio.charset.Charset
maxContentLengthintegerstring
Default
10485760readIdleTimeoutstring
Default
PT5MFormat
durationreadTimeoutstring
Default
PT10SFormat
durationparametersobject
useCachebooleanstring
Default
truewaitForModelbooleanstring
Default
false