Create or update a Logstash pipeline Generally available; Added in 7.12.0
Create a pipeline that is used for Logstash Central Management. If the specified pipeline exists, it is replaced.
Required authorization
- Cluster privileges:
manage_logstash_pipelines
Body Required
-
A description of the pipeline. This description is not used by Elasticsearch or Logstash.
last_modified
string | number Required A date and time, either as a string whose format can depend on the context (defaulting to ISO 8601), or a number of milliseconds since the Epoch. Elasticsearch accepts both as input, but will generally output a string representation.
-
The configuration for the pipeline.
External documentation -
The user who last updated the pipeline.
PUT /_logstash/pipeline/{id}
Console
PUT _logstash/pipeline/my_pipeline { "description": "Sample pipeline for illustration purposes", "last_modified": "2021-01-02T02:50:51.250Z", "pipeline_metadata": { "type": "logstash_pipeline", "version": 1 }, "username": "elastic", "pipeline": "input {}\\n filter { grok {} }\\n output {}", "pipeline_settings": { "pipeline.workers": 1, "pipeline.batch.size": 125, "pipeline.batch.delay": 50, "queue.type": "memory", "queue.max_bytes": "1gb", "queue.checkpoint.writes": 1024 } }
resp = client.logstash.put_pipeline( id="my_pipeline", pipeline={ "description": "Sample pipeline for illustration purposes", "last_modified": "2021-01-02T02:50:51.250Z", "pipeline_metadata": { "type": "logstash_pipeline", "version": 1 }, "username": "elastic", "pipeline": "input {}\\n filter { grok {} }\\n output {}", "pipeline_settings": { "pipeline.workers": 1, "pipeline.batch.size": 125, "pipeline.batch.delay": 50, "queue.type": "memory", "queue.max_bytes": "1gb", "queue.checkpoint.writes": 1024 } }, )
const response = await client.logstash.putPipeline({ id: "my_pipeline", pipeline: { description: "Sample pipeline for illustration purposes", last_modified: "2021-01-02T02:50:51.250Z", pipeline_metadata: { type: "logstash_pipeline", version: 1, }, username: "elastic", pipeline: "input {}\\n filter { grok {} }\\n output {}", pipeline_settings: { "pipeline.workers": 1, "pipeline.batch.size": 125, "pipeline.batch.delay": 50, "queue.type": "memory", "queue.max_bytes": "1gb", "queue.checkpoint.writes": 1024, }, }, });
response = client.logstash.put_pipeline( id: "my_pipeline", body: { "description": "Sample pipeline for illustration purposes", "last_modified": "2021-01-02T02:50:51.250Z", "pipeline_metadata": { "type": "logstash_pipeline", "version": 1 }, "username": "elastic", "pipeline": "input {}\\n filter { grok {} }\\n output {}", "pipeline_settings": { "pipeline.workers": 1, "pipeline.batch.size": 125, "pipeline.batch.delay": 50, "queue.type": "memory", "queue.max_bytes": "1gb", "queue.checkpoint.writes": 1024 } } )
$resp = $client->logstash()->putPipeline([ "id" => "my_pipeline", "body" => [ "description" => "Sample pipeline for illustration purposes", "last_modified" => "2021-01-02T02:50:51.250Z", "pipeline_metadata" => [ "type" => "logstash_pipeline", "version" => 1, ], "username" => "elastic", "pipeline" => "input {}\\n filter { grok {} }\\n output {}", "pipeline_settings" => [ "pipeline.workers" => 1, "pipeline.batch.size" => 125, "pipeline.batch.delay" => 50, "queue.type" => "memory", "queue.max_bytes" => "1gb", "queue.checkpoint.writes" => 1024, ], ], ]);
curl -X PUT -H "Authorization: ApiKey $ELASTIC_API_KEY" -H "Content-Type: application/json" -d '{"description":"Sample pipeline for illustration purposes","last_modified":"2021-01-02T02:50:51.250Z","pipeline_metadata":{"type":"logstash_pipeline","version":1},"username":"elastic","pipeline":"input {}\\n filter { grok {} }\\n output {}","pipeline_settings":{"pipeline.workers":1,"pipeline.batch.size":125,"pipeline.batch.delay":50,"queue.type":"memory","queue.max_bytes":"1gb","queue.checkpoint.writes":1024}}' "$ELASTICSEARCH_URL/_logstash/pipeline/my_pipeline"
client.logstash().putPipeline(p -> p .id("my_pipeline") .pipeline(pi -> pi .description("Sample pipeline for illustration purposes") .lastModified(DateTime.of("2021-01-02T02:50:51.250Z")) .pipeline("input {}\n filter { grok {} }\n output {}") .pipelineMetadata(pip -> pip .type("logstash_pipeline") .version("1") ) .pipelineSettings(pip -> pip .pipelineWorkers(1) .pipelineBatchSize(125) .pipelineBatchDelay(50) .queueType("memory") .queueMaxBytes("1gb") .queueCheckpointWrites(1024) ) .username("elastic") ) );
Request example
Run `PUT _logstash/pipeline/my_pipeline` to create a pipeline.
{ "description": "Sample pipeline for illustration purposes", "last_modified": "2021-01-02T02:50:51.250Z", "pipeline_metadata": { "type": "logstash_pipeline", "version": 1 }, "username": "elastic", "pipeline": "input {}\\n filter { grok {} }\\n output {}", "pipeline_settings": { "pipeline.workers": 1, "pipeline.batch.size": 125, "pipeline.batch.delay": 50, "queue.type": "memory", "queue.max_bytes": "1gb", "queue.checkpoint.writes": 1024 } }