Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Remove AWS SDKv2
  • Loading branch information
Ic3w0lf committed Nov 13, 2024
commit ada1fc2b59d7afde1a5fd55c8b30f96629ee5421
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ Will pass the following command to the container on the AWS ECS Fargate task:
| tail-logs | If set to true, will try to extract the logConfiguration for the first container in the task definition. If `override-container` is passed, it will extract the logConfiguration from that container. Tailing logs is only possible if the provided container uses the `awslogs` logDriver. | `false` | true |
| task-wait-until-stopped | Whether to wait for the task to stop before finishing the action. If set to false, the action will finish immediately after the task reaches the `RUNNING` state (fire and forget). | `false` | true |
| task-start-max-wait-time | How long to wait for the task to start (i.e. reach the `RUNNING` state) in seconds. If the task does not start within this time, the pipeline will fail. | `false` | 120 |
| task-stopped-max-wait-time | How long to wait for the task to stop (i.e. reach the `STOPPED` state) in seconds. The task will not be canceled after this time, the pipeline will just be marked as failed. | `false` | 300 |
| task-stop-max-wait-time | How long to wait for the task to stop (i.e. reach the `STOPPED` state) in seconds. The task will not be canceled after this time, the pipeline will just be marked as failed. | `false` | 300 |
| task-check-state-delay | How long to wait between each AWS API call to check the current state of the task in seconds. This is useful to avoid running into AWS rate limits. **However**, setting this too high might cause the Action to miss the time-window your task is in the "RUNNING" state (if you task is very short lived) and can cause the action to fail. | `false` | 6 |
<!-- action-docs-inputs -->

Expand Down
2 changes: 1 addition & 1 deletion action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ inputs:
required: false
default: 120

task-stopped-max-wait-time:
task-stop-max-wait-time:
description: >-
How long to wait for the task to stop (i.e. reach the `STOPPED` state) in seconds. The task will not be canceled
after this time, the pipeline will just be marked as failed.
Expand Down
61 changes: 31 additions & 30 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -56480,7 +56480,7 @@ const main = async () => {
// Inputs: Waiters
const taskWaitUntilStopped = core.getBooleanInput('task-wait-until-stopped', {required: false});
const taskStartMaxWaitTime = parseInt(core.getInput('task-start-max-wait-time', {required: false}));
const taskStoppedMaxWaitTime = parseInt(core.getInput('task-stopped-max-wait-time', {required: false}));
const taskStopMaxWaitTime = parseInt(core.getInput('task-stop-max-wait-time', {required: false}));
const taskCheckStateDelay = parseInt(core.getInput('task-check-state-delay', {required: false}));

// Build Task parameters
Expand Down Expand Up @@ -56558,31 +56558,11 @@ const main = async () => {
core.setOutput('task-id', taskId);
core.info(`Starting Task with ARN: ${taskArn}\n`);

try {
core.debug(`Waiting for task to be in running state. Waiting for ${taskStartMaxWaitTime} seconds.`);
await waitUntilTasksRunning({
client: ecs,
maxWaitTime: taskStartMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {cluster, tasks: [taskArn]});
} catch (error) {
core.setFailed(`Task did not start successfully. Error: ${error.name}. State: ${error.state}.`);
return;
}

// If taskWaitUntilStopped is false, we can bail out here because we can not tail logs or have any
// information on the exitCodes or status of the task
if (!taskWaitUntilStopped) {
core.info(`Task is running. Exiting without waiting for task to stop.`);
return;
}

// Get CWLogsClient
// Create CWLogsClient
let CWLogClient = new CloudWatchLogsClient();

// Only create logFilterStream if tailLogs is enabled, and we wait for the task to stop in the pipeline
if (tailLogs) {
// Only create StartLiveTailCommand if tailLogs is enabled, and we wait for the task to stop in the pipeline
if (tailLogs && taskWaitUntilStopped) {
core.debug(`Logging enabled. Getting logConfiguration from TaskDefinition.`)
let taskDef = await ecs.describeTaskDefinition({taskDefinition: taskDefinition});
taskDef = taskDef.taskDefinition
Expand Down Expand Up @@ -56611,13 +56591,15 @@ const main = async () => {
const logGroupIdentifier = `arn:aws:logs:${logRegion}:${accountId}:log-group:${logGroup}`;
core.debug(`LogGroupARN for '${container.name}' is: '${logGroupIdentifier}'.`);

// We will use the full logStreamName as a prefix filter. This way the SDK will not crash
// if the logStream does not exist yet.
const logStreamName = [container.logConfiguration.options['awslogs-stream-prefix'], container.name, taskId].join('/')

// Start Live Tail
try {
const response = await CWLogClient.send(new StartLiveTailCommand({
logGroupIdentifiers: [logGroupIdentifier],
logStreamNames: [logStreamName]
logStreamNamePrefixes: [logStreamName]
}));

await handleCWResponseAsync(response);
Expand All @@ -56632,18 +56614,38 @@ const main = async () => {
}

try {
core.debug(`Waiting for task to finish. Waiting for ${taskStoppedMaxWaitTime} seconds.`);
core.debug(`Waiting for task to be in running state. Waiting for ${taskStartMaxWaitTime} seconds.`);
await waitUntilTasksRunning({
client: ecs,
maxWaitTime: taskStartMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {cluster, tasks: [taskArn]});
} catch (error) {
core.setFailed(`Task did not start successfully. Error: ${error.message}.`);
process.exit(1);
}

// If taskWaitUntilStopped is false, we can bail out here because we can not tail logs or have any
// information on the exitCodes or status of the task
if (!taskWaitUntilStopped) {
core.info(`Task is running. Exiting without waiting for task to stop.`);
process.exit(0);
}

try {
core.debug(`Waiting for task to finish. Waiting for ${taskStopMaxWaitTime} seconds.`);
await waitUntilTasksStopped({
client: ecs,
maxWaitTime: taskStoppedMaxWaitTime,
maxWaitTime: taskStopMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {
cluster,
tasks: [taskArn],
});
} catch (error) {
core.setFailed(`Task did not stop successfully. Error: ${error.name}. State: ${error.state}.`);
core.setFailed(`Task did not stop successfully. Error: ${error.message}.`);
}

// Close LogStream and store output
Expand Down Expand Up @@ -56688,10 +56690,9 @@ async function handleCWResponseAsync(response) {
}
} catch (err) {
// If we close the connection, we will get an error with message 'aborted' which we can ignore as it will
// just show as an error in the logs.
// just show as an error in the GHA logs.
if (err.message === 'aborted') {
core.debug("CWLiveTailSession aborted.");

return;
}

Expand Down
61 changes: 31 additions & 30 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ const main = async () => {
// Inputs: Waiters
const taskWaitUntilStopped = core.getBooleanInput('task-wait-until-stopped', {required: false});
const taskStartMaxWaitTime = parseInt(core.getInput('task-start-max-wait-time', {required: false}));
const taskStoppedMaxWaitTime = parseInt(core.getInput('task-stopped-max-wait-time', {required: false}));
const taskStopMaxWaitTime = parseInt(core.getInput('task-stop-max-wait-time', {required: false}));
const taskCheckStateDelay = parseInt(core.getInput('task-check-state-delay', {required: false}));

// Build Task parameters
Expand Down Expand Up @@ -110,31 +110,11 @@ const main = async () => {
core.setOutput('task-id', taskId);
core.info(`Starting Task with ARN: ${taskArn}\n`);

try {
core.debug(`Waiting for task to be in running state. Waiting for ${taskStartMaxWaitTime} seconds.`);
await waitUntilTasksRunning({
client: ecs,
maxWaitTime: taskStartMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {cluster, tasks: [taskArn]});
} catch (error) {
core.setFailed(`Task did not start successfully. Error: ${error.name}. State: ${error.state}.`);
return;
}

// If taskWaitUntilStopped is false, we can bail out here because we can not tail logs or have any
// information on the exitCodes or status of the task
if (!taskWaitUntilStopped) {
core.info(`Task is running. Exiting without waiting for task to stop.`);
return;
}

// Get CWLogsClient
// Create CWLogsClient
let CWLogClient = new CloudWatchLogsClient();

// Only create logFilterStream if tailLogs is enabled, and we wait for the task to stop in the pipeline
if (tailLogs) {
// Only create StartLiveTailCommand if tailLogs is enabled, and we wait for the task to stop in the pipeline
if (tailLogs && taskWaitUntilStopped) {
core.debug(`Logging enabled. Getting logConfiguration from TaskDefinition.`)
let taskDef = await ecs.describeTaskDefinition({taskDefinition: taskDefinition});
taskDef = taskDef.taskDefinition
Expand Down Expand Up @@ -163,13 +143,15 @@ const main = async () => {
const logGroupIdentifier = `arn:aws:logs:${logRegion}:${accountId}:log-group:${logGroup}`;
core.debug(`LogGroupARN for '${container.name}' is: '${logGroupIdentifier}'.`);

// We will use the full logStreamName as a prefix filter. This way the SDK will not crash
// if the logStream does not exist yet.
const logStreamName = [container.logConfiguration.options['awslogs-stream-prefix'], container.name, taskId].join('/')

// Start Live Tail
try {
const response = await CWLogClient.send(new StartLiveTailCommand({
logGroupIdentifiers: [logGroupIdentifier],
logStreamNames: [logStreamName]
logStreamNamePrefixes: [logStreamName]
}));

await handleCWResponseAsync(response);
Expand All @@ -184,18 +166,38 @@ const main = async () => {
}

try {
core.debug(`Waiting for task to finish. Waiting for ${taskStoppedMaxWaitTime} seconds.`);
core.debug(`Waiting for task to be in running state. Waiting for ${taskStartMaxWaitTime} seconds.`);
await waitUntilTasksRunning({
client: ecs,
maxWaitTime: taskStartMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {cluster, tasks: [taskArn]});
} catch (error) {
core.setFailed(`Task did not start successfully. Error: ${error.message}.`);
process.exit(1);
}

// If taskWaitUntilStopped is false, we can bail out here because we can not tail logs or have any
// information on the exitCodes or status of the task
if (!taskWaitUntilStopped) {
core.info(`Task is running. Exiting without waiting for task to stop.`);
process.exit(0);
}

try {
core.debug(`Waiting for task to finish. Waiting for ${taskStopMaxWaitTime} seconds.`);
await waitUntilTasksStopped({
client: ecs,
maxWaitTime: taskStoppedMaxWaitTime,
maxWaitTime: taskStopMaxWaitTime,
maxDelay: taskCheckStateDelay,
minDelay: taskCheckStateDelay,
}, {
cluster,
tasks: [taskArn],
});
} catch (error) {
core.setFailed(`Task did not stop successfully. Error: ${error.name}. State: ${error.state}.`);
core.setFailed(`Task did not stop successfully. Error: ${error.message}.`);
}

// Close LogStream and store output
Expand Down Expand Up @@ -240,10 +242,9 @@ async function handleCWResponseAsync(response) {
}
} catch (err) {
// If we close the connection, we will get an error with message 'aborted' which we can ignore as it will
// just show as an error in the logs.
// just show as an error in the GHA logs.
if (err.message === 'aborted') {
core.debug("CWLiveTailSession aborted.");

return;
}

Expand Down
19 changes: 17 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading