Inference
Inference
yaml
type: "io.kestra.plugin.huggingface.Inference"Examples
yaml
id: huggingface_inference_text
namespace: company.team
tasks:
- id: huggingface_inference
type: io.kestra.plugin.huggingface.Inference
model: cardiffnlp/twitter-roberta-base-sentiment-latest
apiKey: "{{ secret('HUGGINGFACE_API_KEY') }}"
inputs: "I want a refund"
yaml
id: huggingface_inference
namespace: company.team
tasks:
- id: huggingface_inference_image
type: io.kestra.plugin.huggingface.Inference
model: google/vit-base-patch16-224
apiKey: "{{ secret('HUGGINGFACE_API_KEY') }}"
inputs: "{{ read('my-base64-image.txt') }}"
parameters:
function_to_apply: sigmoid,
top_k: 3
waitForModel: true
useCache: false
Properties
apiKey *Requiredstring
inputs *Requiredstring
model *Requiredstring
endpoint string
Default
https://api-inference.huggingface.co/modelsoptions
Definitions
io.kestra.plugin.huggingface.AbstractHttpTask-RequestOptions
connectTimeoutstring
connectionPoolIdleTimeoutstring
Default
PT0SdefaultCharsetstring
Default
UTF-8java.nio.charset.Charset
maxContentLengthintegerstring
Default
10485760readIdleTimeoutstring
Default
PT5MreadTimeoutstring
Default
PT10Sparameters object
useCache booleanstring
Default
truewaitForModel booleanstring
Default
false