Compare commits

...

8 Commits

Author SHA1 Message Date
Saw-jan
d2b504caa9 ci: prefix test pipelines with test
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 12:59:59 +05:45
Saw-jan
957fa70ec6 ci: fix script command
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 12:35:31 +05:45
Saw-jan
cea9336dde test
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 12:31:28 +05:45
Saw-jan
53f1ace53d ci: use nodejs script
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 12:31:04 +05:45
Saw-jan
125ce0b406 ci: upload info
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 10:09:40 +05:45
Saw-jan
19ef9bdfbe ci: upload info
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 10:09:40 +05:45
Saw-jan
3608d56631 ci: add script to evaluate pipeline
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 10:09:40 +05:45
Saw-jan
7607be2f99 ci: check pipeline info
Signed-off-by: Saw-jan <saw.jan.grg3e@gmail.com>
2026-01-02 10:09:40 +05:45
3 changed files with 210 additions and 43 deletions

View File

@@ -103,13 +103,13 @@ config = {
"basic": {
"suites": [
"apiArchiver",
"apiContract",
"apiCors",
"apiAsyncUpload",
"apiDownloads",
"apiDepthInfinity",
"apiLocks",
"apiActivities",
# "apiContract",
# "apiCors",
# "apiAsyncUpload",
# "apiDownloads",
# "apiDepthInfinity",
# "apiLocks",
# "apiActivities",
],
"skip": False,
},
@@ -117,7 +117,7 @@ config = {
"suites": [
"apiSettings",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
"emailNeeded": True,
"extraTestEnvironment": {
@@ -135,7 +135,7 @@ config = {
},
"graph": {
"suites": [
"apiGraph",
# "apiGraph",
"apiServiceAvailability",
# skip tests for collaborativePosix. see https://github.com/opencloud-eu/opencloud/issues/2036
#"collaborativePosix",
@@ -147,38 +147,38 @@ config = {
"suites": [
"apiGraphUserGroup",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
},
"spaces": {
"suites": [
"apiSpaces",
],
"skip": False,
"skip": True,
},
"spacesShares": {
"suites": [
"apiSpacesShares",
],
"skip": False,
"skip": True,
},
"spacesDavOperation": {
"suites": [
"apiSpacesDavOperation",
],
"skip": False,
"skip": True,
},
"search1": {
"suites": [
"apiSearch1",
],
"skip": False,
"skip": True,
},
"search2": {
"suites": [
"apiSearch2",
],
"skip": False,
"skip": True,
},
"sharingNg": {
"suites": [
@@ -186,23 +186,23 @@ config = {
"apiSharingNg1",
"apiSharingNg2",
],
"skip": False,
"skip": True,
},
"sharingNgShareInvitation": {
"suites": [
"apiSharingNgShareInvitation",
],
"skip": False,
"skip": True,
},
"sharingNgLinkShare": {
"suites": [
"apiSharingNgLinkSharePermission",
"apiSharingNgLinkShareRoot",
],
"skip": False,
"skip": True,
},
"accountsHashDifficulty": {
"skip": False,
"skip": True,
"suites": [
"apiAccountsHashDifficulty",
],
@@ -212,7 +212,7 @@ config = {
"suites": [
"apiNotification",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
"emailNeeded": True,
"extraTestEnvironment": {
@@ -232,7 +232,7 @@ config = {
"suites": [
"apiAntivirus",
],
"skip": False,
"skip": True,
"antivirusNeeded": True,
"generateVirusFiles": True,
"extraServerEnvironment": {
@@ -248,14 +248,14 @@ config = {
"suites": [
"apiSearchContent",
],
"skip": False,
"skip": True,
"tikaNeeded": True,
},
"ocm": {
"suites": [
"apiOcm",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
"federationServer": True,
"emailNeeded": True,
@@ -281,7 +281,7 @@ config = {
"suites": [
"apiCollaboration",
],
"skip": False,
"skip": True,
"collaborationServiceNeeded": True,
"extraServerEnvironment": {
"GATEWAY_GRPC_ADDR": "0.0.0.0:9142",
@@ -291,14 +291,14 @@ config = {
"suites": [
"apiAuthApp",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
},
"cliCommands": {
"suites": [
"cliCommands",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
"antivirusNeeded": True,
"generateVirusFiles": True,
@@ -314,7 +314,7 @@ config = {
"suites": [
"apiTenancy",
],
"skip": False,
"skip": True,
"withRemotePhp": [True],
"ldapNeeded": True,
"extraTestEnvironment": {
@@ -348,12 +348,12 @@ config = {
"coreApiTests": {
"numberOfParts": 7,
"skip": False,
"skipExceptParts": [],
"skipExceptParts": [1],
"storages": ["posix"],
},
"e2eTests": {
"part": {
"skip": False,
"skip": True,
"totalParts": 4, # divide and run all suites in parts (divide pipelines)
# suites to skip
"xsuites": [
@@ -377,7 +377,7 @@ config = {
},
"e2eMultiService": {
"testSuites": {
"skip": False,
"skip": True,
"suites": [
"smoke",
"shares",
@@ -492,6 +492,8 @@ def main(ctx):
none
"""
return savePipelineNumber(ctx) + test()
if ctx.build.event == "cron" and ctx.build.sender == "translation-sync":
return translation_sync(ctx)
@@ -566,9 +568,58 @@ def main(ctx):
pipelineSanityChecks(pipelines)
return pipelines
def savePipelineNumber(ctx):
base_url = "https://raw.githubusercontent.com/%s" % repo_slug
script_link = "%s/%s/tests/config/woodpecker/upload_pipeline_info.sh" % (base_url, ctx.build.commit)
return [{
"name": "save-pipeline-info",
"skip_clone": True,
"steps": [{
"name": "upload-info",
"image": MINIO_MC,
"environment": MINIO_MC_ENV,
"commands": [
"curl -s -o upload_pipeline_info.sh %s" % script_link,
"bash -x upload_pipeline_info.sh",
],
}],
"when": [
{
"event": ["push", "manual"],
"branch": ["main", "stable-*"],
},
event["tag"],
event["cron"],
event["pull_request"],
],
}]
def test():
return [{
"name": "test-pipeline",
"steps": [{
"name": "get-previous-pipeline",
"image": OC_CI_NODEJS % DEFAULT_NODEJS_VERSION,
"commands": [
"node --version",
"node tests/config/woodpecker/evaluate_pipeline.js",
],
}],
"depends_on": ["save-pipeline-info"],
"when": [
{
"event": ["push", "manual"],
"branch": ["main", "stable-*"],
},
event["tag"],
event["cron"],
event["pull_request"],
],
}]
def cachePipeline(ctx, name, steps):
return {
"name": "build-%s-cache" % name,
"name": "cache-%s" % name,
"steps": steps,
"when": [
{
@@ -790,7 +841,7 @@ def testOpencloud(ctx):
]
return {
"name": "linting_and_unitTests",
"name": "test-lint-unit",
"steps": steps,
"when": [
event["base"],
@@ -837,7 +888,7 @@ def scanOpencloud(ctx):
def buildOpencloudBinaryForTesting(ctx):
return [{
"name": "build_opencloud_binary_for_testing",
"name": "build-opencloud-for-testing",
"steps": makeNodeGenerate("") +
makeGoGenerate("") +
build() +
@@ -1003,7 +1054,7 @@ def codestyle(ctx):
def cs3ApiTests(ctx, storage, accounts_hash_difficulty = 4):
return {
"name": "cs3ApiTests-%s" % storage,
"name": "test-cs3-API-%s" % storage,
"steps": restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
opencloudServer(storage, accounts_hash_difficulty, deploy_type = "cs3api_validator") +
[
@@ -1104,7 +1155,7 @@ def wopiValidatorTests(ctx, storage, wopiServerType, accounts_hash_difficulty =
})
return {
"name": "wopiValidatorTests-%s-%s" % (wopiServerType, storage),
"name": "test-wopi-validator-%s-%s" % (wopiServerType, storage),
"services": fakeOffice(),
"steps": restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
waitForServices("fake-office", ["fakeoffice:8080"]) +
@@ -1188,9 +1239,9 @@ def localApiTestPipeline(ctx):
for storage in params["storages"]:
for run_with_remote_php in params["withRemotePhp"]:
for run_with_watch_fs_enabled in params["enableWatchFs"]:
pipeline_name = "API"
pipeline_name = "test-API"
if name.startswith("cli"):
pipeline_name = "CLI"
pipeline_name = "test-CLI"
pipeline_name += "-%s" % name
if not run_with_remote_php:
pipeline_name += "-withoutRemotePhp"
@@ -1319,7 +1370,7 @@ def coreApiTestPipeline(ctx):
for run_with_remote_php in params["withRemotePhp"]:
for run_with_watch_fs_enabled in params["enableWatchFs"]:
if not debugPartsEnabled or (debugPartsEnabled and runPart in debugParts):
pipeline_name = "Core-API-%s" % runPart
pipeline_name = "test-Core-API-%s" % runPart
if not run_with_remote_php:
pipeline_name += "-withoutRemotePhp"
pipeline_name += "-%s" % storage
@@ -1501,7 +1552,7 @@ def e2eTestPipeline(ctx):
"bash run-e2e.sh %s --run-part %d" % (e2e_args, run_part),
]
pipelines.append({
"name": "e2e-tests-%s-%s-%s%s" % (name, run_part, storage, "-watchfs" if watch_fs_enabled else ""),
"name": "test-e2e-%s-%s-%s%s" % (name, run_part, storage, "-watchfs" if watch_fs_enabled else ""),
"steps": steps_before + [run_e2e] + steps_after,
"depends_on": getPipelineNames(buildOpencloudBinaryForTesting(ctx) + buildWebCache(ctx)),
"when": e2e_trigger,
@@ -1509,7 +1560,7 @@ def e2eTestPipeline(ctx):
else:
step_e2e["commands"].append("bash run-e2e.sh %s" % e2e_args)
pipelines.append({
"name": "e2e-tests-%s-%s%s" % (name, storage, "-watchfs" if watch_fs_enabled else ""),
"name": "test-e2e-%s-%s%s" % (name, storage, "-watchfs" if watch_fs_enabled else ""),
"steps": steps_before + [step_e2e] + steps_after,
"depends_on": getPipelineNames(buildOpencloudBinaryForTesting(ctx) + buildWebCache(ctx)),
"when": e2e_trigger,
@@ -2285,9 +2336,9 @@ def opencloudServer(storage = "decomposed", accounts_hash_difficulty = 4, depend
"%s/bin/ocwrapper serve --bin %s --url %s --admin-username admin --admin-password admin" % (dirs["ocWrapper"], dirs["opencloudBin"], environment["OC_URL"]),
]
else:
server_commands += [
server_commands.append(
"%s server" % dirs["opencloudBin"],
]
)
wait_for_opencloud = {
"name": "wait-for-%s" % container_name,
@@ -2674,7 +2725,7 @@ def litmus(ctx, storage):
litmusCommand = "/usr/local/bin/litmus-wrapper"
result = {
"name": "litmus",
"name": "test-litmus",
"steps": restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
opencloudServer(storage) +
setupForLitmus() +

View File

@@ -0,0 +1,89 @@
// const INFO_URL="https://s3.ci.opencloud.eu/public/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/pipeline_info.json"
const CI_REPO_NAME = process.env.CI_REPO_NAME;
const CI_COMMIT_SHA = process.env.CI_COMMIT_SHA;
const CI_WORKFLOW_NAME = process.env.CI_WORKFLOW_NAME;
const opencloudBuildWorkflow = "build-opencloud-for-testing";
const webCacheWorkflows = ["cache-web", "cache-web-pnpm", "cache-browsers"];
console.log("[INFO] skip...");
process.exit(78);
// const INFO_URL = `https://s3.ci.opencloud.eu/public/${CI_REPO_NAME}/pipelines/${CI_COMMIT_SHA}/pipeline_info.json`;
const INFO_URL =
"https://s3.ci.opencloud.eu/public/opencloud/pipelines/fcbcb3d9cc73fcd1cab49774632a92b531718a76/pipeline_info.json";
function getFailedWorkflows(workflows) {
const failedWorkflows = [];
for (const workflow of workflows) {
if (workflow.state !== "success") {
failedWorkflows.push(workflow.name);
}
}
return failedWorkflows;
}
function hasFailingTestWorkflow(failedWorkflows) {
for (const workflowName of failedWorkflows) {
if (workflowName.startsWith("test-")) {
return true;
}
}
return false;
}
function hasFailingE2eTestWorkflow(failedWorkflows) {
for (const workflowName of failedWorkflows) {
if (workflowName.startsWith("test-e2e-")) {
return true;
}
}
return false;
}
async function main() {
const infoResponse = await fetch(INFO_URL);
console.log(infoResponse);
if (infoResponse.status === 404) {
console.log("[INFO] No matching previous pipeline found. Continue...");
process.exit(0);
} else if (!infoResponse.ok) {
console.error(
"[ERROR] Failed to fetch previous pipeline info:" +
`\n URL: ${INFO_URL}\n Status: ${infoResponse.status}`
);
process.exit(1);
}
const info = await infoResponse.json();
if (info.status === "success") {
process.exit(0);
}
const failedWorkflows = getFailedWorkflows(info.workflows);
// run the build workflow if any test workflow has failed
if (
CI_WORKFLOW_NAME === opencloudBuildWorkflow &&
hasFailingTestWorkflow(failedWorkflows)
) {
process.exit(0);
}
// run the web cache workflows if any e2e test workflow has failed
if (
webCacheWorkflows.includes(CI_WORKFLOW_NAME) &&
hasFailingE2eTestWorkflow(failedWorkflows)
) {
process.exit(0);
}
if (!failedWorkflows.includes(CI_WORKFLOW_NAME)) {
console.log("[INFO] Workflow passed in previous pipeline. Skip...");
process.exit(78);
}
console.log("[INFO] Restarting previously failed workflow. Continue...");
}
main();

View File

@@ -0,0 +1,27 @@
#!/bin/bash
set -e
CI_COMMIT_SHA=fcbcb3d9cc73fcd1cab49774632a92b531718a76
mc alias set s3 $MC_HOST $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
# check previous pipeline
URL="https://s3.ci.opencloud.eu/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/prev_pipeline"
status=$(curl -s -o prev_pipeline "$URL" -w '%{http_code}')
if [ "$status" == "200" ];
then
source prev_pipeline
REPO_ID=$(printf '%s' "$CI_PIPELINE_URL" | sed 's|.*/repos/\([0-9]*\)/.*|\1|')
p_status=$(curl -s -o pipeline_info.json "$CI_SYSTEM_URL/api/repos/$REPO_ID/pipelines/$PREV_PIPELINE_NUMBER" -w "%{http_code}")
if [ "$p_status" != "200" ];
then
echo -e "[ERROR] Failed to fetch previous pipeline info.\n URL: $CI_SYSTEM_URL/api/repos/$REPO_ID/pipelines/$PREV_PIPELINE_NUMBER\n Status: $p_status"
exit 1
fi
# update previous pipeline info
mc cp -a pipeline_info.json "s3/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/"
fi
# upload current pipeline number for the next pipeline
echo "PREV_PIPELINE_NUMBER=$CI_PIPELINE_NUMBER" > prev_pipeline
mc cp -a prev_pipeline "s3/$PUBLIC_BUCKET/$CI_REPO_NAME/pipelines/$CI_COMMIT_SHA/"