Studio REST API v1.0.0
Scroll down for code samples, example requests and responses. .
DataChain Studio provides REST API for programmatically managing datasets, jobs, and storage operations. All API endpoints require authentication and are scoped to specific teams.
Authorization:
All API endpoints require authentication via a Studio token.
The token must be included in the Authorization header.
You can get a token by using datachain auth token after logging in with datachain auth login or from Tokens page in the Studio UI Settings.
Once you get a token, attach it to the Authorization header in the following format:
- Base URL:
https://studio.datachain.ai/api
Default
Get Jobs
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
headers = {
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("GET", "/api/datachain/jobs/?team_name=string", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request GET \
--url 'https://example.com/api/datachain/jobs/?team_name=string' \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY'
GET /api/datachain/jobs/
Retrieve a list of jobs with optional status filtering.
Requires a token with read access to JOB scope.
Example responses
200 Response
[
{
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"url": "https://studio.datachain.ai/team/team_name/datasets/jobs/0502eef6-a32e-45fa-8e3b-d20ecpabbcf0",
"status": "CREATED",
"created_at": "2021-01-01T00:00:00Z",
"created_by": "username",
"finished_at": "2021-01-01T00:00:00Z",
"query": "print('Hello, World!')",
"query_type": "PYTHON",
"team": "TeamName",
"name": "QueryName",
"workers": 1,
"python_version": "3.12",
"requirements": "numpy==1.24.0",
"repository": "https://github.com/user/repo",
"environment": {
"ENV_NAME": "ENV_VALUE"
},
"exit_code": 0,
"error_message": "Error message"
}
]
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | Inline |
Response Schema
Status Code 200
Create Job
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"query\":\"print('Hello, World!')\",\"query_type\":\"PYTHON\",\"team_name\":\"TeamName\",\"environment\":\"ENV_NAME=ENV_VALUE\",\"workers\":1,\"query_name\":\"QueryName\",\"files\":[\"2\",\"3\"],\"python_version\":\"3.12\",\"requirements\":\"numpy==1.24.0\",\"repository\":\"https://github.com/user/repo\",\"priority\":1,\"compute_cluster_name\":\"ComputeClusterName\",\"compute_cluster_id\":1,\"start_after\":\"2021-01-01T00:00:00Z\",\"cron_expression\":\"0 0 * * *\",\"credentials_name\":\"CredentialsName\"}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/jobs/", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/jobs/ \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"query":"print('\''Hello, World!'\'')","query_type":"PYTHON","team_name":"TeamName","environment":"ENV_NAME=ENV_VALUE","workers":1,"query_name":"QueryName","files":["2","3"],"python_version":"3.12","requirements":"numpy==1.24.0","repository":"https://github.com/user/repo","priority":1,"compute_cluster_name":"ComputeClusterName","compute_cluster_id":1,"start_after":"2021-01-01T00:00:00Z","cron_expression":"0 0 * * *","credentials_name":"CredentialsName"}'
POST /api/datachain/jobs/
Creates a job and returns the job metadata.
Note that compute_cluster_name and compute_cluster_id are mutually exclusive. Requires a token with write access to JOB scope.
Body parameter
{
"query": "print('Hello, World!')",
"query_type": "PYTHON",
"team_name": "TeamName",
"environment": "ENV_NAME=ENV_VALUE",
"workers": 1,
"query_name": "QueryName",
"files": [
"2",
"3"
],
"python_version": "3.12",
"requirements": "numpy==1.24.0",
"repository": "https://github.com/user/repo",
"priority": 1,
"compute_cluster_name": "ComputeClusterName",
"compute_cluster_id": 1,
"start_after": "2021-01-01T00:00:00Z",
"cron_expression": "0 0 * * *",
"credentials_name": "CredentialsName"
}
Example responses
200 Response
{
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"url": "https://studio.datachain.ai/team/team_name/datasets/jobs/0502eef6-a32e-45fa-8e3b-d20ecpabbcf0",
"status": "CREATED",
"created_at": "2021-01-01T00:00:00Z",
"created_by": "username",
"finished_at": "2021-01-01T00:00:00Z",
"query": "print('Hello, World!')",
"query_type": "PYTHON",
"team": "TeamName",
"name": "QueryName",
"workers": 1,
"python_version": "3.12",
"requirements": "numpy==1.24.0",
"repository": "https://github.com/user/repo",
"environment": {
"ENV_NAME": "ENV_VALUE"
},
"exit_code": 0,
"error_message": "Error message"
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | JobOutput |
Get Job Logs
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
headers = { 'Authorization': "API_KEY" }
conn.request("GET", "/api/datachain/jobs/497f6eca-6276-4993-bfeb-53cbbbba6f08/logs", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request GET \
--url https://example.com/api/datachain/jobs/497f6eca-6276-4993-bfeb-53cbbbba6f08/logs \
--header 'Authorization: API_KEY'
GET /api/datachain/jobs/{job_id}/logs
Retrieve the logs for an active job.
Requires a token with read access to JOB scope.
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | None |
Cancel Job
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"team_name\":\"TeamName\"}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/jobs/497f6eca-6276-4993-bfeb-53cbbbba6f08/cancel", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/jobs/497f6eca-6276-4993-bfeb-53cbbbba6f08/cancel \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"team_name":"TeamName"}'
POST /api/datachain/jobs/{job_id}/cancel
Cancel a running or queued job.
Requires a token with write access to JOB scope.
Body parameter
Example responses
200 Response
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | ActionFeedback |
Upload File
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "-----011000010111000001101001\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\nstring\r\n-----011000010111000001101001--\r\n"
headers = {
'Content-Type': "multipart/form-data; boundary=---011000010111000001101001",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/jobs/files?team_name=string", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url 'https://example.com/api/datachain/jobs/files?team_name=string' \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: multipart/form-data; boundary=---011000010111000001101001' \
--form file=string
POST /api/datachain/jobs/files
Upload a file to use with a job.
Use the file id returned by this endpoint in the files field of the job input.
Requires a token with write access to JOB scope.
Body parameter
Example responses
200 Response
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | UploadFileOutput |
Get Clusters
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
headers = {
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("GET", "/api/datachain/clusters/", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request GET \
--url https://example.com/api/datachain/clusters/ \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY'
GET /api/datachain/clusters/
Example responses
200 Response
[
{
"id": 1,
"name": "ComputeClusterName",
"status": "ACTIVE",
"cloud_provider": "AWS",
"cloud_credentials": "CredentialsName",
"is_active": true,
"default": true,
"max_workers": 1
}
]
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | Inline |
Response Schema
Status Code 200
Trigger Pipeline
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"datasets\":[\"@[email protected]\",\"@amritghimire.project_name.dataset_name\",\"dataset_name\",\"[email protected]\"],\"team_name\":\"team_name\",\"environment\":\"ENV_NAME=ENV_VALUE\",\"review\":false}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/pipeline/trigger", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/pipeline/trigger \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"datasets":["@[email protected]","@amritghimire.project_name.dataset_name","dataset_name","[email protected]"],"team_name":"team_name","environment":"ENV_NAME=ENV_VALUE","review":false}'
POST /api/datachain/pipeline/trigger
Triggers a pipeline according to the dataset dependencies.
This endpoint accepts an array of datasets. The pipeline will be created for all of them combined, and the necessary jobs to rebuild the dependencies for all specified datasets will be run. Each dataset can be fully qualified by providing the namespace, project, dataset name and version in format: namespace.project.dataset_name@version. If the version is not provided, the latest version will be used.
Body parameter
{
"datasets": [
"@[email protected]",
"@amritghimire.project_name.dataset_name",
"dataset_name",
"[email protected]"
],
"team_name": "team_name",
"environment": "ENV_NAME=ENV_VALUE",
"review": false
}
Example responses
200 Response
{
"ok": true,
"pipeline": {
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | PipelineOutput |
Get Pipeline
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
headers = {
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("GET", "/api/datachain/pipeline/status?team_name=string", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request GET \
--url 'https://example.com/api/datachain/pipeline/status?team_name=string' \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY'
GET /api/datachain/pipeline/status
Example responses
200 Response
{
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | JobPipelineOutput |
Pause Pipeline
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"team_name\":\"team_name\",\"pipeline_id\":\"ec036e81-7903-4e4d-bbfa-ac8516341cf0\",\"name\":\"string\"}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/pipeline/pause", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/pipeline/pause \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"team_name":"team_name","pipeline_id":"ec036e81-7903-4e4d-bbfa-ac8516341cf0","name":"string"}'
POST /api/datachain/pipeline/pause
Body parameter
{
"team_name": "team_name",
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"name": "string"
}
Example responses
200 Response
{
"ok": true,
"pipeline": {
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | PipelineOutput |
Resume Pipeline
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"team_name\":\"team_name\",\"pipeline_id\":\"ec036e81-7903-4e4d-bbfa-ac8516341cf0\",\"name\":\"string\"}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/pipeline/resume", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/pipeline/resume \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"team_name":"team_name","pipeline_id":"ec036e81-7903-4e4d-bbfa-ac8516341cf0","name":"string"}'
POST /api/datachain/pipeline/resume
Body parameter
{
"team_name": "team_name",
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"name": "string"
}
Example responses
200 Response
{
"ok": true,
"pipeline": {
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | PipelineOutput |
Remove Job From Pipeline
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "{\"team_name\":\"team_name\",\"pipeline_id\":\"ec036e81-7903-4e4d-bbfa-ac8516341cf0\",\"name\":\"string\",\"job_id\":\"job_id\"}"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("POST", "/api/datachain/pipeline/remove-job", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request POST \
--url https://example.com/api/datachain/pipeline/remove-job \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '{"team_name":"team_name","pipeline_id":"ec036e81-7903-4e4d-bbfa-ac8516341cf0","name":"string","job_id":"job_id"}'
POST /api/datachain/pipeline/remove-job
Body parameter
{
"team_name": "team_name",
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"name": "string",
"job_id": "job_id"
}
Example responses
200 Response
{
"ok": true,
"pipeline": {
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
}
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | PipelineOutput |
List Pipelines
Code samples
import http.client
conn = http.client.HTTPSConnection("example.com")
payload = "[\"string\"]"
headers = {
'Content-Type': "application/json",
'Accept': "application/json",
'Authorization': "API_KEY"
}
conn.request("GET", "/api/datachain/pipeline/list?team_name=string", payload, headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
curl --request GET \
--url 'https://example.com/api/datachain/pipeline/list?team_name=string' \
--header 'Accept: application/json' \
--header 'Authorization: API_KEY' \
--header 'Content-Type: application/json' \
--data '["string"]'
GET /api/datachain/pipeline/list
Body parameter
Example responses
200 Response
[
{
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
]
Responses
| Status | Meaning | Description | Schema |
|---|---|---|---|
| 200 | OK | OK | Inline |
Response Schema
Status Code 200
Schemas
JobOutput
{
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"url": "https://studio.datachain.ai/team/team_name/datasets/jobs/0502eef6-a32e-45fa-8e3b-d20ecpabbcf0",
"status": "CREATED",
"created_at": "2021-01-01T00:00:00Z",
"created_by": "username",
"finished_at": "2021-01-01T00:00:00Z",
"query": "print('Hello, World!')",
"query_type": "PYTHON",
"team": "TeamName",
"name": "QueryName",
"workers": 1,
"python_version": "3.12",
"requirements": "numpy==1.24.0",
"repository": "https://github.com/user/repo",
"environment": {
"ENV_NAME": "ENV_VALUE"
},
"exit_code": 0,
"error_message": "Error message"
}
JobInput
{
"query": "print('Hello, World!')",
"query_type": "PYTHON",
"team_name": "TeamName",
"environment": "ENV_NAME=ENV_VALUE",
"workers": 1,
"query_name": "QueryName",
"files": [
"2",
"3"
],
"python_version": "3.12",
"requirements": "numpy==1.24.0",
"repository": "https://github.com/user/repo",
"priority": 1,
"compute_cluster_name": "ComputeClusterName",
"compute_cluster_id": 1,
"start_after": "2021-01-01T00:00:00Z",
"cron_expression": "0 0 * * *",
"credentials_name": "CredentialsName"
}
ActionFeedback
JobCancelInput
UploadFileOutput
ComputeClusterOutput
{
"id": 1,
"name": "ComputeClusterName",
"status": "ACTIVE",
"cloud_provider": "AWS",
"cloud_credentials": "CredentialsName",
"is_active": true,
"default": true,
"max_workers": 1
}
JobPipelineOutput
{
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
JobPipelineRunOutput
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
PipelineOutput
{
"ok": true,
"pipeline": {
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"status": "RUNNING",
"job_runs": [
{
"job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
"status": "string",
"parent_job_ids": [
"497f6eca-6276-4993-bfeb-53cbbbba6f08"
],
"name": "string",
"created_job_id": "31f5fe21-b847-420d-8689-ef5a1d3104a4"
}
],
"completed": 10,
"total": 16,
"error_message": "Error message",
"error_stack": "Error stack",
"triggered_from": "@amritghimire.project.dataset",
"name": "rathe-kyat",
"created_at": "2021-01-01T00:00:00Z",
"updated_at": "2021-01-01T00:00:00Z"
}
}
TriggerPipelineInput
{
"datasets": [
"@[email protected]",
"@amritghimire.project_name.dataset_name",
"dataset_name",
"[email protected]"
],
"team_name": "team_name",
"environment": "ENV_NAME=ENV_VALUE",
"review": false
}
PipelineInput
{
"team_name": "team_name",
"pipeline_id": "ec036e81-7903-4e4d-bbfa-ac8516341cf0",
"name": "string"
}