bl run resource-type resource-name [flags]
bl run agent my-agent --data '{"inputs": "Hello, world!"}'
bl run model my-model --data '{"inputs": "Hello, world!"}'
bl run job my-job --file myjob.json
-d, --data string JSON body data for the inference request
--debug Debug mode
--directory string Directory to run the command from
-e, --env-file strings Environment file to load (default [.env])
-f, --file string Input from a file
--header stringArray Request headers in 'Key: Value' format. Can be specified multiple times
-h, --help help for run
--local Run locally
--method string HTTP method for the inference request (default "POST")
--params strings Query params sent to the inference request
--path string path for the inference request
-s, --secrets strings Secrets to deploy
--upload-file string This transfers the specified local file to the remote URL
-o, --output string Output format. One of: pretty,yaml,json,table
--skip-version-warning Skip version warning
-u, --utc Enable UTC timezone
-v, --verbose Enable verbose output
-w, --workspace string Specify the workspace name
Was this page helpful?