Compare commits

..

3 Commits

Author SHA1 Message Date
Sawjan Gurung
98b6c53270 ci: fix unwanted workflow skip in the cron pipelines (#2117)
* ci: run workflow if not found in the list

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: suffix event in cache key to separate cron and push pipelines

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: prefix test workflow with test

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: create empty cache dir for unit tests

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

---------

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-02-12 17:49:21 +05:45
Sawjan Gurung
fe84d0dec4 [POC] ci: skip previously passed workflows on pipeline restart (#2099)
* ci: add pipeline info check scripts

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: prefix test pipelines with test

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: implement skip-on-pass for test workflows

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: add cache purge workflow

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

---------

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-02-12 17:49:07 +05:45
Sawjan Gurung
17606da390 ci: generate api, cli and e2e pipelines in a similar way (#2095)
* ci: refactor ci config

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: refactor storage and configs

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

* ci: fix format

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>

---------

Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-02-12 17:46:59 +05:45
3 changed files with 637 additions and 330 deletions

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,102 @@
const fs = require("fs");
const CI_REPO_NAME = process.env.CI_REPO_NAME;
const CI_COMMIT_SHA = process.env.CI_COMMIT_SHA;
const CI_WORKFLOW_NAME = process.env.CI_WORKFLOW_NAME;
const CI_PIPELINE_EVENT = process.env.CI_PIPELINE_EVENT;
const opencloudBuildWorkflow = "build-opencloud-for-testing";
const webCacheWorkflows = ["cache-web", "cache-web-pnpm", "cache-browsers"];
const INFO_URL = `https://s3.ci.opencloud.eu/public/${CI_REPO_NAME}/pipelines/${CI_COMMIT_SHA}-${CI_PIPELINE_EVENT}/pipeline_info.json`;
function getWorkflowNames(workflows) {
const allWorkflows = [];
for (const workflow of workflows) {
allWorkflows.push(workflow.name);
}
return allWorkflows;
}
function getFailedWorkflows(workflows) {
const failedWorkflows = [];
for (const workflow of workflows) {
if (workflow.state !== "success") {
failedWorkflows.push(workflow.name);
}
}
return failedWorkflows;
}
function hasFailingTestWorkflow(failedWorkflows) {
for (const workflowName of failedWorkflows) {
if (workflowName.startsWith("test-")) {
return true;
}
}
return false;
}
function hasFailingE2eTestWorkflow(failedWorkflows) {
for (const workflowName of failedWorkflows) {
if (workflowName.startsWith("test-e2e-")) {
return true;
}
}
return false;
}
async function main() {
const infoResponse = await fetch(INFO_URL);
if (infoResponse.status === 404) {
console.log("[INFO] No matching previous pipeline found. Continue...");
process.exit(0);
} else if (!infoResponse.ok) {
console.error(
"[ERROR] Failed to fetch previous pipeline info:" +
`\n URL: ${INFO_URL}\n Status: ${infoResponse.status}`
);
process.exit(1);
}
const info = await infoResponse.json();
console.log(info);
if (info.status === "success") {
console.log(
"[INFO] All workflows passed in previous pipeline. Full restart. Continue..."
);
process.exit(0);
}
const allWorkflows = getWorkflowNames(info.workflows);
const failedWorkflows = getFailedWorkflows(info.workflows);
// NOTE: implement for test pipelines only for now
// // run the build workflow if any test workflow has failed
// if (
// CI_WORKFLOW_NAME === opencloudBuildWorkflow &&
// hasFailingTestWorkflow(failedWorkflows)
// ) {
// process.exit(0);
// }
// // run the web cache workflows if any e2e test workflow has failed
// if (
// webCacheWorkflows.includes(CI_WORKFLOW_NAME) &&
// hasFailingE2eTestWorkflow(failedWorkflows)
// ) {
// process.exit(0);
// }
if (!allWorkflows.includes(CI_WORKFLOW_NAME)) {
process.exit(0);
}
if (!failedWorkflows.includes(CI_WORKFLOW_NAME)) {
console.log("[INFO] Workflow passed in previous pipeline. Skip...");
fs.appendFileSync(".woodpecker.env", "SKIP_WORKFLOW=true\n");
process.exit(0);
}
console.log("[INFO] Restarting previously failed workflow. Continue...");
}
main();

View File

@@ -0,0 +1,29 @@
#!/bin/bash
set -e
CACHE_KEY="$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA-$CI_PIPELINE_EVENT"
mc alias set s3 $MC_HOST $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
# check previous pipeline
URL="https://s3.ci.opencloud.eu/$CACHE_KEY/prev_pipeline"
status=$(curl -s -o prev_pipeline "$URL" -w '%{http_code}')
if [ "$status" == "200" ];
then
source prev_pipeline
REPO_ID=$(printf '%s' "$CI_PIPELINE_URL" | sed 's|.*/repos/\([0-9]*\)/.*|\1|')
p_status=$(curl -s -o pipeline_info.json "$CI_SYSTEM_URL/api/repos/$REPO_ID/pipelines/$PREV_PIPELINE_NUMBER" -w "%{http_code}")
if [ "$p_status" != "200" ];
then
echo -e "[ERROR] Failed to fetch previous pipeline info.\n URL: $CI_SYSTEM_URL/api/repos/$REPO_ID/pipelines/$PREV_PIPELINE_NUMBER\n Status: $p_status"
exit 1
fi
# update previous pipeline info
mc cp -a pipeline_info.json "s3/$CACHE_KEY/"
fi
# upload current pipeline number for the next pipeline
echo "PREV_PIPELINE_NUMBER=$CI_PIPELINE_NUMBER" > prev_pipeline
mc cp -a prev_pipeline "s3/$CACHE_KEY/"