パイプラインとプロセッサは受信ログを操作し、クエリを簡単にするためにそれを解析して構造化属性に変換します。
現在 UI で構成されているパイプラインとプロセッサの一覧については、パイプラインコンフィギュレーションページを参照してください。
プロセッサに関する追加の API 関連情報は、プロセッサのドキュメントにあります。
パイプラインの詳細については、パイプラインのドキュメントを参照してください。
注:
これらのエンドポイントは管理者ユーザーのみが利用できます。 管理者が作成したアプリケーションキーを使用してください。
Grok パース規則は JSON 出力に影響を与える場合があり、
リクエスト内で使用する前に、返されたデータが構成済みである必要があります。
たとえば、別のリクエスト本文のリクエスト
から返されたデータを使用していて、スペースに \s
のような
正規表現パターンを使用するパース規則がある場合、
すべてのエスケープしたスペースを %{space}
と構成して
本文データに使用する必要があります。
POST https://api.datadoghq.eu/api/v1/logs/config/pipelineshttps://api.datadoghq.com/api/v1/logs/config/pipelines
オーガニゼーションにパイプラインを作成します。
新しいパイプラインの定義。
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name [required]
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"is_enabled": false,
"name": "",
"processors": []
}
OK
Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.
Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name [required]
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"id": "string",
"is_enabled": false,
"is_read_only": false,
"name": "",
"processors": [],
"type": "pipeline"
}
Bad Request
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Curl command
curl -X POST "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipelines" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}" \
-d @- << EOF
{
"name": ""
}
EOF
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
body := *datadog.NewLogsPipeline("Name_example") // LogsPipeline | Definition of the new pipeline.
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.CreateLogsPipeline(ctx).Body(body).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.CreateLogsPipeline``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `CreateLogsPipeline`: LogsPipeline
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.CreateLogsPipeline:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
LogsPipeline body = new LogsPipeline(); // LogsPipeline | Definition of the new pipeline.
try {
LogsPipeline result = apiInstance.createLogsPipeline()
.body(body)
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#createLogsPipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
body = LogsPipeline(
filter=LogsFilter(
query="source:python",
),
id="id_example",
is_enabled=True,
is_read_only=True,
name="",
processors=[
LogsProcessor(),
],
type="pipeline",
) # LogsPipeline | Definition of the new pipeline.
# example passing only required values which don't have defaults set
try:
# Create a pipeline
api_response = api_instance.create_logs_pipeline(body)
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->create_logs_pipeline: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
body = DatadogAPIClient::V1::LogsPipeline.new({name: 'name_example'}) # LogsPipeline | Definition of the new pipeline.
begin
# Create a pipeline
result = api_instance.create_logs_pipeline(body)
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->create_logs_pipeline: #{e}"
end
DELETE https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}
オーガニゼーションから特定のパイプラインを取得します。 このエンドポイントは、JSON 引数を受け取りません。
名前
種類
説明
pipeline_id [required]
string
ID of the pipeline to delete.
OK
Bad Request
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X DELETE "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}"
package main
import (
"context"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
pipelineId := "pipelineId_example" // string | ID of the pipeline to delete.
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
r, err := api_client.LogsPipelinesApi.DeleteLogsPipeline(ctx, pipelineId).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.DeleteLogsPipeline``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
String pipelineId = "pipelineId_example"; // String | ID of the pipeline to delete.
try {
apiInstance.deleteLogsPipeline(pipelineId)
.execute();
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#deleteLogsPipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
pipeline_id = "pipeline_id_example" # str | ID of the pipeline to delete.
# example passing only required values which don't have defaults set
try:
# Delete a pipeline
api_instance.delete_logs_pipeline(pipeline_id)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->delete_logs_pipeline: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
pipeline_id = 'pipeline_id_example' # String | ID of the pipeline to delete.
begin
# Delete a pipeline
api_instance.delete_logs_pipeline(pipeline_id)
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->delete_logs_pipeline: #{e}"
end
GET https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}
オーガニゼーションから指定されたパイプラインを取得します。 このエンドポイントは、JSON 引数を受け取りません。
名前
種類
説明
pipeline_id [required]
string
ID of the pipeline to get.
OK
Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.
Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name [required]
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"id": "string",
"is_enabled": false,
"is_read_only": false,
"name": "",
"processors": [],
"type": "pipeline"
}
Bad Request
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X GET "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}"
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
pipelineId := "pipelineId_example" // string | ID of the pipeline to get.
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.GetLogsPipeline(ctx, pipelineId).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.GetLogsPipeline``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `GetLogsPipeline`: LogsPipeline
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.GetLogsPipeline:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
String pipelineId = "pipelineId_example"; // String | ID of the pipeline to get.
try {
LogsPipeline result = apiInstance.getLogsPipeline(pipelineId)
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#getLogsPipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
pipeline_id = "pipeline_id_example" # str | ID of the pipeline to get.
# example passing only required values which don't have defaults set
try:
# Get a pipeline
api_response = api_instance.get_logs_pipeline(pipeline_id)
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->get_logs_pipeline: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
pipeline_id = 'pipeline_id_example' # String | ID of the pipeline to get.
begin
# Get a pipeline
result = api_instance.get_logs_pipeline(pipeline_id)
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->get_logs_pipeline: #{e}"
end
GET https://api.datadoghq.eu/api/v1/logs/config/pipelineshttps://api.datadoghq.com/api/v1/logs/config/pipelines
オーガニゼーションからすべてのパイプラインを取得します。 このエンドポイントは、JSON 引数を受け取りません。
OK
Array of pipeline ID strings.
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"id": "string",
"is_enabled": false,
"is_read_only": false,
"name": "",
"processors": [],
"type": "pipeline"
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Curl command
curl -X GET "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipelines" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}"
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.ListLogsPipelines(ctx).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.ListLogsPipelines``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `ListLogsPipelines`: []LogsPipeline
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.ListLogsPipelines:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
try {
List<LogsPipeline> result = apiInstance.listLogsPipelines()
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#listLogsPipelines");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
# example, this endpoint has no required or optional parameters
try:
# Get all pipelines
api_response = api_instance.list_logs_pipelines()
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->list_logs_pipelines: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
begin
# Get all pipelines
result = api_instance.list_logs_pipelines
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->list_logs_pipelines: #{e}"
end
GET https://api.datadoghq.eu/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.com/api/v1/logs/config/pipeline-order
パイプラインの現在の順序を取得します。 このエンドポイントは、JSON 引数を受け取りません。
OK
Object containing the ordered list of pipeline IDs.
{
"pipeline_ids": [
"tags",
"org_ids",
"products"
]
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Curl command
curl -X GET "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipeline-order" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}"
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.GetLogsPipelineOrder(ctx).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.GetLogsPipelineOrder``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `GetLogsPipelineOrder`: LogsPipelinesOrder
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.GetLogsPipelineOrder:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
try {
LogsPipelinesOrder result = apiInstance.getLogsPipelineOrder()
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#getLogsPipelineOrder");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
# example, this endpoint has no required or optional parameters
try:
# Get pipeline order
api_response = api_instance.get_logs_pipeline_order()
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->get_logs_pipeline_order: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
begin
# Get pipeline order
result = api_instance.get_logs_pipeline_order
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->get_logs_pipeline_order: #{e}"
end
PUT https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}
特定のパイプライン構成を更新し、プロセッサーまたは順序を変更します。
注: このメソッドを使用すると、現在の構成を Datadog Organization に送信された新しい構成に置き換えて、パイプライン構成が更新されます。
名前
種類
説明
pipeline_id [required]
string
ID of the pipeline to delete.
パイプラインの新しい定義。
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name [required]
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"is_enabled": false,
"name": "",
"processors": []
}
OK
Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.
Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.
フィールド
種類
説明
filter
object
Filter for logs.
query
string
The filter query.
id
string
ID of the pipeline.
is_enabled
boolean
Whether or not the pipeline is enabled.
is_read_only
boolean
Whether or not the pipeline can be edited.
name [required]
string
Name of the pipeline.
processors
[object]
Ordered list of processors in this pipeline.
type
string
Type of pipeline.
{
"filter": {
"query": "source:python"
},
"id": "string",
"is_enabled": false,
"is_read_only": false,
"name": "",
"processors": [],
"type": "pipeline"
}
Bad Request
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
# Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X PUT "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}" \
-d @- << EOF
{
"name": ""
}
EOF
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
pipelineId := "pipelineId_example" // string | ID of the pipeline to delete.
body := *datadog.NewLogsPipeline("Name_example") // LogsPipeline | New definition of the pipeline.
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.UpdateLogsPipeline(ctx, pipelineId).Body(body).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.UpdateLogsPipeline``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `UpdateLogsPipeline`: LogsPipeline
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.UpdateLogsPipeline:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
String pipelineId = "pipelineId_example"; // String | ID of the pipeline to delete.
LogsPipeline body = new LogsPipeline(); // LogsPipeline | New definition of the pipeline.
try {
LogsPipeline result = apiInstance.updateLogsPipeline(pipelineId)
.body(body)
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#updateLogsPipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
pipeline_id = "pipeline_id_example" # str | ID of the pipeline to delete.
body = LogsPipeline(
filter=LogsFilter(
query="source:python",
),
id="id_example",
is_enabled=True,
is_read_only=True,
name="",
processors=[
LogsProcessor(),
],
type="pipeline",
) # LogsPipeline | New definition of the pipeline.
# example passing only required values which don't have defaults set
try:
# Update a pipeline
api_response = api_instance.update_logs_pipeline(pipeline_id, body)
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->update_logs_pipeline: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
pipeline_id = 'pipeline_id_example' # String | ID of the pipeline to delete.
body = DatadogAPIClient::V1::LogsPipeline.new({name: 'name_example'}) # LogsPipeline | New definition of the pipeline.
begin
# Update a pipeline
result = api_instance.update_logs_pipeline(pipeline_id, body)
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->update_logs_pipeline: #{e}"
end
PUT https://api.datadoghq.eu/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.com/api/v1/logs/config/pipeline-order
パイプラインの順序を更新します。ログは順番に処理されるため、パイプラインの順序を変更すると、 他のパイプラインで処理されるデータの構造や内容が変化する可能性があります。
注: PUT
メソッドを使用すると、現在の順序を Datadog オーガニゼーションに送信される
新しい順序に置き換えてパイプラインの順序が更新されます。
新しい順序のパイプライン ID リストを含むオブジェクト。
{
"pipeline_ids": [
"tags",
"org_ids",
"products"
]
}
OK
Object containing the ordered list of pipeline IDs.
{
"pipeline_ids": [
"tags",
"org_ids",
"products"
]
}
Bad Request
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
Forbidden
Error response object.
{
"errors": [
"Bad Request"
]
}
Unprocessable Entity
Response returned by the Logs API when errors occur.
フィールド
種類
説明
error
object
Error returned by the Logs API
code
string
Code identifying the error
details
[object]
Additional error details
message
string
Error message
{
"error": {
"code": "string",
"details": [],
"message": "string"
}
}
# Curl command
curl -X PUT "https://api.datadoghq.eu"https://api.datadoghq.com/api/v1/logs/config/pipeline-order" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${DD_CLIENT_API_KEY}" \
-H "DD-APPLICATION-KEY: ${DD_CLIENT_APP_KEY}" \
-d @- << EOF
{
"pipeline_ids": [
"tags",
"org_ids",
"products"
]
}
EOF
package main
import (
"context"
"encoding/json"
"fmt"
"os"
datadog "github.com/DataDog/datadog-api-client-go/api/v1/datadog"
)
func main() {
ctx := context.WithValue(
context.Background(),
datadog.ContextAPIKeys,
map[string]datadog.APIKey{
"apiKeyAuth": {
Key: os.Getenv("DD_CLIENT_API_KEY"),
},
"appKeyAuth": {
Key: os.Getenv("DD_CLIENT_APP_KEY"),
},
},
)
body := *datadog.NewLogsPipelinesOrder([]string{"PipelineIds_example"}) // LogsPipelinesOrder | Object containing the new ordered list of pipeline IDs.
configuration := datadog.NewConfiguration()
api_client := datadog.NewAPIClient(configuration)
resp, r, err := api_client.LogsPipelinesApi.UpdateLogsPipelineOrder(ctx).Body(body).Execute()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.UpdateLogsPipelineOrder``: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}
// response from `UpdateLogsPipelineOrder`: LogsPipelinesOrder
response_content, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from LogsPipelinesApi.UpdateLogsPipelineOrder:\n%s\n", response_content)
}
// Import classes:
import java.util.*;
import com.datadog.api.v1.client.ApiClient;
import com.datadog.api.v1.client.ApiException;
import com.datadog.api.v1.client.Configuration;
import com.datadog.api.v1.client.auth.*;
import com.datadog.api.v1.client.model.*;
import com.datadog.api.v1.client.api.LogsPipelinesApi;
public class Example {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
// Configure the Datadog site to send API calls to
HashMap<String, String> serverVariables = new HashMap<String, String>();
String site = System.getenv("DD_SITE");
if (site != null) {
serverVariables.put("site", site);
defaultClient.setServerVariables(serverVariables);
}
// Configure API key authorization:
HashMap<String, String> secrets = new HashMap<String, String>();
secrets.put("apiKeyAuth", System.getenv("DD_CLIENT_API_KEY"));
secrets.put("appKeyAuth", System.getenv("DD_CLIENT_APP_KEY"));
defaultClient.configureApiKeys(secrets);
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);
LogsPipelinesOrder body = new LogsPipelinesOrder(); // LogsPipelinesOrder | Object containing the new ordered list of pipeline IDs.
try {
LogsPipelinesOrder result = apiInstance.updateLogsPipelineOrder()
.body(body)
.execute();
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#updateLogsPipelineOrder");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
import os
from dateutil.parser import parse as dateutil_parser
import datadog_api_client.v1
from datadog_api_client.v1.api import logs_pipelines_api
from datadog_api_client.v1.models import *
from pprint import pprint
# Defining the host is optional and defaults to https://api.datadoghq.com
# See configuration.py for a list of all supported configuration parameters.
configuration = datadog_api_client.v1.Configuration(
host = "https://api.datadoghq.com"
)
# The client must configure the authentication and authorization parameters
# in accordance with the API server security policy.
# Examples for each auth method are provided below, use the example that
# satisfies your auth use case.
# Configure API key authorization: apiKeyAuth
configuration.api_key['apiKeyAuth'] = os.getenv('DD_CLIENT_API_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
configuration.api_key['appKeyAuth'] = os.getenv('DD_CLIENT_APP_KEY')
# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
# configuration.api_key_prefix['appKeyAuth'] = 'Bearer'
# Enter a context with an instance of the API client
with datadog_api_client.v1.ApiClient(configuration) as api_client:
# Create an instance of the API class
api_instance = logs_pipelines_api.LogsPipelinesApi(api_client)
body = LogsPipelinesOrder(
pipeline_ids=["tags","org_ids","products"],
) # LogsPipelinesOrder | Object containing the new ordered list of pipeline IDs.
# example passing only required values which don't have defaults set
try:
# Update pipeline order
api_response = api_instance.update_logs_pipeline_order(body)
pprint(api_response)
except datadog_api_client.v1.ApiException as e:
print("Exception when calling LogsPipelinesApi->update_logs_pipeline_order: %s\n" % e)
require 'time'
require 'datadog_api_client/v1'
# setup authorization
DatadogAPIClient::V1.configure do |config|
# Configure API key authorization: apiKeyAuth
config.api_key['apiKeyAuth'] = ENV["DD_CLIENT_API_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['apiKeyAuth'] = 'Bearer'
# Configure API key authorization: appKeyAuth
config.api_key['appKeyAuth'] = ENV["DD_CLIENT_APP_KEY"]
# Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
# config.api_key_prefix['appKeyAuth'] = 'Bearer'
end
api_instance = DatadogAPIClient::V1::LogsPipelinesApi.new
body = DatadogAPIClient::V1::LogsPipelinesOrder.new({pipeline_ids: ['pipeline_ids_example']}) # LogsPipelinesOrder | Object containing the new ordered list of pipeline IDs.
begin
# Update pipeline order
result = api_instance.update_logs_pipeline_order(body)
p result
rescue DatadogAPIClient::V1::ApiError => e
puts "Error when calling LogsPipelinesApi->update_logs_pipeline_order: #{e}"
end