diff --git a/dockerfiles/pipeline/Dockerfile b/dockerfiles/pipeline/Dockerfile index 7496fa73479e9e6ee19c6c7dbd2442a4535b4b75..66274deae0d4e7cde8d870744277066382c6fd7e 100644 --- a/dockerfiles/pipeline/Dockerfile +++ b/dockerfiles/pipeline/Dockerfile @@ -73,8 +73,7 @@ RUN curl https://install.meteor.com/ | sh && \ RUN yarn global add ts-node typescript -RUN mkdir p /scripts -COPY ./scripts/* /scripts/ +COPY ./scripts /scripts RUN chmod +x /scripts/* COPY package.json /scripts/ diff --git a/dockerfiles/pipeline/package.json b/dockerfiles/pipeline/package.json index 09308137887f9197f5eff36b4c528789abdcc51b..63423d00fdc68f7f767472f0f403142d3358adf8 100644 --- a/dockerfiles/pipeline/package.json +++ b/dockerfiles/pipeline/package.json @@ -10,6 +10,7 @@ "@gitbeaker/node": "^32.1.2", "lodash": "^4.17.21", "yaml": "^1.10.2", - "yargs": "^17.2.1" + "yargs": "^17.2.1", + "slugify": "^1.6.3" } } diff --git a/dockerfiles/pipeline/scripts/extractValuesFromEnvVars.ts b/dockerfiles/pipeline/scripts/extractValuesFromEnvVars.ts deleted file mode 100755 index 5bf2fc14054f936421b3b9464c372c57b2d49702..0000000000000000000000000000000000000000 --- a/dockerfiles/pipeline/scripts/extractValuesFromEnvVars.ts +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env ts-node -import { set } from "lodash"; -const values = {}; - -const PREFIX = "DEFAULT_VALUE_"; -for (const [key, value] of Object.entries(process.env).filter(([key]) => - key.startsWith(PREFIX) -)) { - const path = key.replace(PREFIX, "").split("_"); - let theValue; - try { - theValue = JSON.parse(value ?? ""); - } catch (e) { - theValue = value; - } - set(values, path, theValue); -} - -console.log(JSON.stringify(values, null, 2) ?? ""); diff --git a/dockerfiles/pipeline/scripts/getCommitInfo b/dockerfiles/pipeline/scripts/getCommitInfo new file mode 100755 index 0000000000000000000000000000000000000000..f906ba919277a1084db4821d8a5e02f2395f0b7e --- /dev/null +++ b/dockerfiles/pipeline/scripts/getCommitInfo @@ -0,0 +1,11 @@ +#!/bin/bash + +export BUILD_TAG="$(git describe --tags || git rev-parse HEAD)" +export BUILD_ID="$BUILD_TAG" +export BUILD_COMMIT="$(git rev-parse HEAD)" +export BUILD_TIME="$(date)" +echo "BUILD_TAG=$BUILD_TAG" >>build.env +echo "BUILD_ID=$BUILD_ID" >>build.env +echo "BUILD_COMMIT=$BUILD_COMMIT" >>build.env +echo "BUILD_TIME=$BUILD_TIME" >>build.env +echo '{"id":"'$BUILD_ID'","commit":"'$BUILD_COMMIT'","tag":"'$BUILD_TAG'","time":"'$BUILD_TIME'"}' >__build_info.json diff --git a/dockerfiles/pipeline/scripts/kubernetesDeploy b/dockerfiles/pipeline/scripts/kubernetesDeploy index 5a5782818f68718a6c38df584ac33fbaaa9f4bc7..f8f517a046b3c913dbccbdf8369953a4b1f3fab0 100755 --- a/dockerfiles/pipeline/scripts/kubernetesDeploy +++ b/dockerfiles/pipeline/scripts/kubernetesDeploy @@ -16,20 +16,10 @@ readarray -t helmArgsArray <<<"$helmArgsEvaluated" #Log the time until selfdestruct in case of review environment if [[ ${ENV_SHORT} == "review" ]]; then echo "NOTE - This deployment will stop itself after 2 weeks"; fi -# find all values files and generate an array out of it -extractValuesFromEnvVars.ts >$VALUES_PATH/default-values-from-env.yml +echo "reading values files from $VALUES_PATH for $ENV_SHORT" -if [[ ! -z "$CI_MERGE_REQUEST_IID" ]]; then - echo "extra values from mr:" - extractValuesFromMr.ts - extractValuesFromMr.ts >$VALUES_PATH/values-from-mr.yml - echo "----------------------" -fi - -files=(default-values-from-env.yml values.yml values-$ENV_SHORT.yml values-$CI_ENVIRONMENT_SLUG.yml values-from-mr.yml) -values=() -for i in "${files[@]}"; do if [ -f $VALUES_PATH/$i ]; then values+=(--values $VALUES_PATH/$i); fi; done -echo "reading values files from $VALUES_PATH. found ${values[@]}" +printAllValues.ts -d $VALUES_PATH -e $ENV_SHORT +printAllValues.ts -d $VALUES_PATH -e $ENV_SHORT >__all_values.yml echo "doing helm upgrade" helm3 upgrade --install "$RELEASE_NAME" /tmp/$HELM_GITLAB_CHART_NAME \ @@ -63,5 +53,5 @@ helm3 upgrade --install "$RELEASE_NAME" /tmp/$HELM_GITLAB_CHART_NAME \ --set namespace="$NAMESPACE" \ --namespace="$NAMESPACE" \ ${helmArgsArray[@]} \ - ${values[@]} + --valeues __all_values.yml echo "Deployment successful 😻" diff --git a/dockerfiles/pipeline/scripts/monorepoGenerate.ts b/dockerfiles/pipeline/scripts/monorepoGenerate.ts index 28f5439424b933f7af758339dad3ff0e2883409b..8a2c533bb8961ba4fd28fb6213c0754b6fab45e3 100755 --- a/dockerfiles/pipeline/scripts/monorepoGenerate.ts +++ b/dockerfiles/pipeline/scripts/monorepoGenerate.ts @@ -6,8 +6,13 @@ import { parse } from "yaml"; import yargs from "yargs"; import { hideBin } from "yargs/helpers"; +import { extractBuildEnv } from "./utils/extractBuildEnv"; type Type = "test" | "deploy"; -const args = yargs(hideBin(process.argv)).argv as Record; +const args = yargs(hideBin(process.argv)).array("env").argv as Record< + string, + unknown +>; + const thisGitlabCi = parse( readFileSync(".gitlab-ci.yml", { encoding: "utf-8" }) ); @@ -20,51 +25,86 @@ const subcis = readdirSync(".") })) .filter(({ ci }) => existsSync(ci)); -const generateSubPipelein = (type: Type) => { - return subcis.reduce((acc, { dirname, ci }) => { - const changes = [dirname + "/**/*"]; - const rules = [ - // same as rules "always", but with `changes` to only trigger changed branches - { if: "$CI_COMMIT_TAG" }, - { - if: "$CI_COMMIT_MESSAGE =~ /^chore(release).*/", - when: "never", - }, - { if: "$CI_COMMIT_BRANCH =~ /^[0-9]+.([0-9]+|x).x$/" }, - { - if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH", - changes, - }, - // add it manually when no changes - { - if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH", - when: "manual", - }, - { if: "$CI_MERGE_REQUEST_ID", changes }, - // add it manually when no changes - { if: "$CI_MERGE_REQUEST_ID", when: "manual" }, - ]; +const makeConfig = async ({ + subapp, + type, + ci, + env, +}: { + subapp: string; + type: Type; + env: string; + ci: string; +}) => { + const changes = [subapp + "/**/*"]; + const rules = [ + // same as rules "always", but with `changes` to only trigger changed branches + { if: "$CI_COMMIT_TAG" }, + { + if: "$CI_COMMIT_MESSAGE =~ /^chore(release).*/", + when: "never", + }, + { if: "$CI_COMMIT_BRANCH =~ /^[0-9]+.([0-9]+|x).x$/" }, + { + if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH", + changes, + }, + // add it manually when no changes + { + if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH", + when: "manual", + }, + { if: "$CI_MERGE_REQUEST_ID", changes }, + // add it manually when no changes + { if: "$CI_MERGE_REQUEST_ID", when: "manual" }, + ]; + return { + variables: { + // merge + ...thisGitlabCi.variables, + APP_DIR: subapp, + VALUES_DIR: subapp, + COMPONENT_NAME: subapp, + PARENT_PIPELINE_ID: "$CI_PIPELINE_ID", + [type.toUpperCase() + "_ONLY"]: "1", + ...(await extractBuildEnv(subapp, env)), + }, + stage: type, + rules: rules, + trigger: { + include: ci, + strategy: "depend", + }, + }; +}; +const generateSubPipeline = async (type: Type, envs: string[]) => { + return await envs.reduce(async (acc, env) => { return { - ...acc, - [type + " " + dirname]: { - variables: { - // merge - ...thisGitlabCi.variables, - APP_DIR: dirname, - VALUES_DIR: dirname, - COMPONENT_NAME: dirname, - [type.toUpperCase() + "_ONLY"]: "1", - }, - stage: type, - rules: rules, - trigger: { - include: ci, - strategy: "depend", - }, - }, + ...(await acc), + ...(await subcis.reduce( + async (inneracc, { dirname, ci }) => ({ + ...(await inneracc), + fudi: { + stage: "test", + script: ["env"], + rules: [{ if: "$CI_MERGE_REQUEST_ID" }], + }, + /* + [`${env}-${dirname} ${type}`]: await makeConfig({ + subapp: dirname, + ci, + type, + env, + }), + */ + }), + {} as Promise> + )), }; - }, {}); + }, {} as Promise>); }; -console.log(JSON.stringify(generateSubPipelein(args.t as Type), null, 2)); +generateSubPipeline(args.t as Type, args.env as string[]).then((p) => + console.log(JSON.stringify(p, null, 2)) +); diff --git a/dockerfiles/pipeline/scripts/printAllValues.ts b/dockerfiles/pipeline/scripts/printAllValues.ts new file mode 100644 index 0000000000000000000000000000000000000000..a06a2def1b6e89a2cf7225f8184c2706d9291bfc --- /dev/null +++ b/dockerfiles/pipeline/scripts/printAllValues.ts @@ -0,0 +1,17 @@ +#!/usr/bin/env ts-node + +import yargs from "yargs"; +import { hideBin } from "yargs/helpers"; +import { extractAllValues } from "./utils/extractAllValues"; + +const args = yargs(hideBin(process.argv)).argv as Record; + +extractAllValues(args.d as string, args.e as string) + .then(async (values) => { + console.log(JSON.stringify(values, null, 2) ?? ""); + }) + .catch((e) => { + console.error("could not extract values", { + error: e, + }); + }); diff --git a/dockerfiles/pipeline/scripts/printBuildEnv.ts b/dockerfiles/pipeline/scripts/printBuildEnv.ts new file mode 100644 index 0000000000000000000000000000000000000000..209527a849a8d02d737f6f303c13cbcc6050011b --- /dev/null +++ b/dockerfiles/pipeline/scripts/printBuildEnv.ts @@ -0,0 +1,22 @@ +#!/usr/bin/env ts-node + +import { isObject } from "lodash"; +import yargs from "yargs"; +import { hideBin } from "yargs/helpers"; +import { extractBuildEnv } from "./utils/extractBuildEnv"; + +const args = yargs(hideBin(process.argv)).argv as Record; + +extractBuildEnv(args.d as string, args.e as string) + .then(async (values) => { + Object.entries(values).forEach(([key, value]) => + console.log( + `${key}: "${isObject(value) ? JSON.stringify(value) : value}"` + ) + ); + }) + .catch((e) => { + console.error("could not extract values", { + error: e, + }); + }); diff --git a/dockerfiles/pipeline/scripts/utils/extractAllValues.ts b/dockerfiles/pipeline/scripts/utils/extractAllValues.ts new file mode 100644 index 0000000000000000000000000000000000000000..36f7d4bdd29a607556cd5a7f26a2fd0bd94611f4 --- /dev/null +++ b/dockerfiles/pipeline/scripts/utils/extractAllValues.ts @@ -0,0 +1,32 @@ +import { readFileSync } from "fs"; +import { merge } from "lodash"; +import { parse } from "yaml"; +import { extractValuesFromEnvVars } from "./extractValuesFromEnvVars"; +import { extractValuesFromMr } from "./extractValuesFromMr"; + +const { CI_PROJECT_ID, CI_MERGE_REQUEST_IID } = process.env; + +const readYaml = (path: string) => { + try { + return parse(readFileSync(path, { encoding: "utf-8" })); + } catch (e) { + return {}; + } +}; + +export const extractAllValues = async ( + appDirectory: string, + env: string | null +) => { + const valuesFromEnvVars = extractValuesFromEnvVars(); + const valuesDefault = readYaml(appDirectory + "/values.yml"); + const valuesEnv = env + ? readYaml(appDirectory + "/values-" + env + ".yml") + : {}; + const valuesFromMR = + CI_PROJECT_ID && CI_MERGE_REQUEST_IID + ? await extractValuesFromMr(CI_PROJECT_ID, Number(CI_MERGE_REQUEST_IID)) + : {}; + + return merge({}, valuesFromEnvVars, valuesDefault, valuesEnv, valuesFromMR); +}; diff --git a/dockerfiles/pipeline/scripts/utils/extractBuildEnv.ts b/dockerfiles/pipeline/scripts/utils/extractBuildEnv.ts new file mode 100644 index 0000000000000000000000000000000000000000..37fd4c5224a56d0ab25fb2b02d1296e0977a60ca --- /dev/null +++ b/dockerfiles/pipeline/scripts/utils/extractBuildEnv.ts @@ -0,0 +1,92 @@ +import { extractAllValues } from "./extractAllValues"; +import slugify from "slugify"; + +type EnvVars = Record; +export const extractBuildEnv = async ( + componentName: string, + env: string | null, + visitedReferencedApps: { [app: string]: EnvVars } = {} // this is just to avoid infinit recursion +): Promise => { + const values = await extractAllValues(componentName, env); + const { + APP_NAME, + CUSTOMER_NAME, + CI_COMMIT_REF_SLUG, + CI_MERGE_REQUEST_IID, + CI_ENVIRONMENT_SLUG, + } = process.env; + + const isNotMonorepo = componentName === "."; + + const KUBE_APP_NAME = CI_MERGE_REQUEST_IID + ? `${componentName}-${CI_COMMIT_REF_SLUG}` + : componentName; + const KUBE_APP_SLUG = isNotMonorepo + ? CI_ENVIRONMENT_SLUG + : slugify(KUBE_APP_NAME); + const CANONICAL_URL = `https://${APP_NAME}-${KUBE_APP_SLUG}.${CUSTOMER_NAME}.panter.cloud`; + const defaultBuildEnv = { + KUBE_APP_NAME, + CANONICAL_URL, + IMAGE_PULL_SECRET: `gitlab-registry-${componentName}`, + ROOT_URL: values?.application?.host || CANONICAL_URL, + }; + const publicBuildEnv = values?.buildEnv?.public ?? {}; + const merged = { + ...defaultBuildEnv, + ...publicBuildEnv, + }; + if (process.env.DEBUG) { + console.log({ values, defaultBuildEnv, publicBuildEnv }); + } + // extract referenced build env (only works in monorepo + // like this + /* + fromComponents: + vendure: + API_URL: ROOT_URL +*/ + const fromComponents: { + [otherApp: string]: { + [ourKey: string]: string; + }; + } = values?.buildEnv?.fromComponents ?? {}; + const referencedBuildEnv = await Object.entries(fromComponents).reduce( + async (acc, [otherApp, mapping]) => { + if (process.env.DEBUG) { + console.log("extract reference build env", otherApp, mapping); + } + if (!mapping) { + return await acc; + } + + // prevent infinit loop + const otherBuildEnv = + visitedReferencedApps[otherApp] ?? + (await extractBuildEnv(otherApp, env, { + ...visitedReferencedApps, + [componentName]: merged, + })); + if (process.env.DEBUG) { + console.log({ otherBuildEnv }); + } + // resolve mapping + return { + ...(await acc), + ...Object.entries(mapping).reduce( + (inneracc, [ourKey, otherKey]) => ({ + ...inneracc, + [ourKey]: otherBuildEnv[otherKey], + }), + {} as EnvVars + ), + }; + }, + {} as Promise + ); + + return { + ...merged, + ...referencedBuildEnv, + }; +}; diff --git a/dockerfiles/pipeline/scripts/utils/extractValuesFromEnvVars.ts b/dockerfiles/pipeline/scripts/utils/extractValuesFromEnvVars.ts new file mode 100755 index 0000000000000000000000000000000000000000..2d65e65fed9ebfbb1ca92a260be9571b22cbde2c --- /dev/null +++ b/dockerfiles/pipeline/scripts/utils/extractValuesFromEnvVars.ts @@ -0,0 +1,21 @@ +#!/usr/bin/env ts-node +import { set } from "lodash"; + +export const extractValuesFromEnvVars = () => { + const values = {}; + + const PREFIX = "DEFAULT_VALUE_"; + for (const [key, value] of Object.entries(process.env).filter(([key]) => + key.startsWith(PREFIX) + )) { + const path = key.replace(PREFIX, "").split("_"); + let theValue; + try { + theValue = JSON.parse(value ?? ""); + } catch (e) { + theValue = value; + } + set(values, path, theValue); + } + return values; +}; diff --git a/dockerfiles/pipeline/scripts/extractValuesFromMr.ts b/dockerfiles/pipeline/scripts/utils/extractValuesFromMr.ts similarity index 60% rename from dockerfiles/pipeline/scripts/extractValuesFromMr.ts rename to dockerfiles/pipeline/scripts/utils/extractValuesFromMr.ts index 38347e4071129e4d3f20b4bc6c559c20bc7fb898..8d4e73b34e1e029b0875056e67e73a7489623b6c 100755 --- a/dockerfiles/pipeline/scripts/extractValuesFromMr.ts +++ b/dockerfiles/pipeline/scripts/utils/extractValuesFromMr.ts @@ -1,16 +1,8 @@ -#!/usr/bin/env ts-node - import { Gitlab } from "@gitbeaker/node"; // Just the Project Resource import { parse } from "yaml"; import { merge } from "lodash"; -const { - NODE_ENV, - GL_TOKEN, - CI_SERVER_URL, - CI_PROJECT_ID, - CI_MERGE_REQUEST_IID, -} = process.env; +const { GL_TOKEN, CI_SERVER_URL } = process.env; const gitlabApi = new Gitlab({ token: GL_TOKEN, @@ -38,17 +30,10 @@ export const extractValuesFromMRMessage = (mrMessage: string) => { }, {}); }; -if (NODE_ENV !== "test" && CI_PROJECT_ID && CI_MERGE_REQUEST_IID) { - getMRInfo(CI_PROJECT_ID, Number(CI_MERGE_REQUEST_IID)) - .then(async (r) => { - const values = extractValuesFromMRMessage(r.description); - console.log(JSON.stringify(values, null, 2) ?? ""); - }) - .catch((e) => { - console.error("could not extract values from mr", { - CI_PROJECT_ID, - CI_MERGE_REQUEST_IID, - error: e, - }); - }); -} +export const extractValuesFromMr = async ( + projectId: string, + mergeRequestId: number +) => { + const mrInfo = await getMRInfo(projectId, mergeRequestId); + return extractValuesFromMRMessage(mrInfo.description); +}; diff --git a/dockerfiles/pipeline/tests/extractValuesFromMr.test.ts b/dockerfiles/pipeline/tests/extractValuesFromMr.test.ts index 735be21bf9f88eaa30af90f0835de99e9c914ed3..561c7bfeeeb763898461feb551a5c120d7022cbc 100644 --- a/dockerfiles/pipeline/tests/extractValuesFromMr.test.ts +++ b/dockerfiles/pipeline/tests/extractValuesFromMr.test.ts @@ -1,4 +1,4 @@ -import { extractValuesFromMRMessage } from "../scripts/extractValuesFromMr"; +import { extractValuesFromMRMessage } from "../scripts/utils/extractValuesFromMr"; describe("extractValuesFromMr", () => { describe("extractValuesFromMRMessage", () => { diff --git a/helm-chart.yml b/helm-chart.yml index 43d2d4e5e02077d2bc57c3dd89f0cbd4ad96665d..ef4733d8943e3b08f511a5610b3415f47d2aad76 100644 --- a/helm-chart.yml +++ b/helm-chart.yml @@ -11,7 +11,7 @@ image: variables: HELM_EXPERIMENTAL_OCI: 1 AUTO_RELEASE: "true" - PIPELINE_IMAGE_TAG: dcda17fdc37f0e368ed2ef4482cfdd71e2cb3d77 + PIPELINE_IMAGE_TAG: c88ce3adb43135b527482bf24a9420bc4fbd0d62 stages: - lint diff --git a/includes/rules.yml b/includes/rules.yml index d762c9d30fe0f478a08042d7d14946d1a7a6082d..2a0344ad23bbfbc9d3f4f97b67a9c58de8474482 100644 --- a/includes/rules.yml +++ b/includes/rules.yml @@ -38,6 +38,8 @@ .rules-dev-deploy: rules: + - if: $CUSTOM_BUILD_ENV + when: never - if: $TEST_ONLY when: never - if: $CI_COMMIT_TAG # after releases @@ -51,6 +53,8 @@ .rules-stage-deploy: rules: + - if: $CUSTOM_BUILD_ENV + when: never - if: $TEST_ONLY when: never - if: $CI_COMMIT_TAG && $CI_COMMIT_MESSAGE =~ /^chore\(maintenance\).*/ # d @@ -60,6 +64,8 @@ .rules-prod-deploy: rules: + - if: $CUSTOM_BUILD_ENV + when: never - if: $TEST_ONLY when: never - if: $CI_COMMIT_TAG && $CI_COMMIT_MESSAGE =~ /^chore\(maintenance\).*/ # d diff --git a/includes/setup.yml b/includes/setup.yml index bd3d7b819141002a20a12284482e23a2bbfaef02..25ccef6b7bc270d3504f0a19ef24cb45feee285c 100644 --- a/includes/setup.yml +++ b/includes/setup.yml @@ -5,15 +5,7 @@ - .retry-default - .rules-build-and-deploy script: - - echo "triggered by $CI_PIPELINE_SOURCE" - - BUILD_TAG="$(git describe --tags || git rev-parse HEAD)" - - BUILD_ID="$COMPONENT_NAME-$BUILD_TAG" - - BUILD_COMMIT="$(git rev-parse HEAD)" - - BUILD_TIME="$(date)" - - echo "BUILD_TAG=$BUILD_TAG" >> build.env - - echo "BUILD_ID=$BUILD_ID" >> build.env - - echo "BUILD_COMMIT=$BUILD_COMMIT" >> build.env - - echo "BUILD_TIME=$BUILD_TIME" >> build.env + - . getCommitInfo - echo '{"id":"'$BUILD_ID'","commit":"'$BUILD_COMMIT'","tag":"'$BUILD_TAG'","time":"'$BUILD_TIME'"}' > $APP_DIR/__build_info.json artifacts: paths: diff --git a/includes/verify.yml b/includes/verify.yml index 86cd865f11db19de9ac6e377b48aec0d723f0290..681aa3c42ca1d5bc6d97603979dc345d294c2a97 100644 --- a/includes/verify.yml +++ b/includes/verify.yml @@ -32,6 +32,7 @@ .verify-base: before_script: + - . getCommitInfo - set +e # Disable exit on error - *checkBuildId - set -e # Enable exit on error @@ -50,14 +51,14 @@ - if: $TEST_ONLY when: never - if: $CI_MERGE_REQUEST_ID - needs: ["review-deploy", "setup-build-info"] + needs: ["review-deploy"] .verify-dev: stage: verify extends: - .verify-base - .env-dev - needs: ["dev-deploy", "setup-build-info"] + needs: ["dev-deploy"] rules: - if: $TEST_ONLY when: never @@ -71,7 +72,7 @@ extends: - .verify-base - .env-stage - needs: ["stage-deploy", "setup-build-info"] + needs: ["stage-deploy"] rules: - if: $TEST_ONLY when: never @@ -82,7 +83,7 @@ extends: - .verify-base - .env-prod - needs: ["prod-deploy", "setup-build-info"] + needs: ["prod-deploy"] rules: - if: $TEST_ONLY when: never diff --git a/monorepo.yml b/monorepo.yml index 34b91adfeb74bebb01c5419f09aed769079467ff..0b522dee627d841184023559665e8a03d50a487f 100644 --- a/monorepo.yml +++ b/monorepo.yml @@ -12,7 +12,8 @@ include: - /includes/open-mr.yml variables: - PIPELINE_IMAGE_TAG: dcda17fdc37f0e368ed2ef4482cfdd71e2cb3d77 + IS_MONOREPO: 1 + PIPELINE_IMAGE_TAG: c88ce3adb43135b527482bf24a9420bc4fbd0d62 image: git.panter.ch:5001/catladder/gitlab-ci/pipeline:$PIPELINE_IMAGE_TAG @@ -23,15 +24,15 @@ stages: - verify - create-release -setup: +create-pipeline: stage: setup + interruptible: true extends: - .rules-build-and-deploy script: - - monorepoGenerate.ts -t=test - - monorepoGenerate.ts -t=test > generated-test-config.yml - - monorepoGenerate.ts -t=deploy - - monorepoGenerate.ts -t=deploy > generated-deploy-config.yml + - monorepoGenerate.ts -t=test --env dev > generated-test-config.yml + - DEBUG=1 monorepoGenerate.ts -t=deploy --env dev stage prod + - monorepoGenerate.ts -t=deploy --env dev stage prod> generated-deploy-config.yml artifacts: paths: - generated-test-config.yml @@ -39,25 +40,25 @@ setup: test: stage: test - needs: [setup] + needs: [create-pipeline] extends: - .rules-always trigger: strategy: depend include: - artifact: generated-test-config.yml - job: setup + job: create-pipeline deploy: stage: deploy - needs: [setup] + needs: [create-pipeline] extends: - .rules-always trigger: strategy: depend include: - artifact: generated-deploy-config.yml - job: setup + job: create-pipeline create-release: extends: .create-release diff --git a/panter-kubernetes-base.yml b/panter-kubernetes-base.yml index ad8e61ca94b0cdc3b88273966e44892fad400950..25a14b16b8113dff71792b645e5ca72da6ad8ccf 100644 --- a/panter-kubernetes-base.yml +++ b/panter-kubernetes-base.yml @@ -12,7 +12,7 @@ include: - /includes/open-mr.yml variables: - PIPELINE_IMAGE_TAG: dcda17fdc37f0e368ed2ef4482cfdd71e2cb3d77 + PIPELINE_IMAGE_TAG: c88ce3adb43135b527482bf24a9420bc4fbd0d62 CUSTOMER_NAME: panter APP_NAME: demo COMPONENT_NAME: web diff --git a/yarn.lock b/yarn.lock index 3f45637befd88858fe63fd27a4c14ab47b2b390e..9204b9eb906cd13d4c2bfdb2fdffe69dcb0af177 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2465,6 +2465,11 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== +slugify@^1.6.3: + version "1.6.3" + resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.6.3.tgz#325aec50871acfb17976f2d3cb09ee1e7ab563be" + integrity sha512-1MPyqnIhgiq+/0iDJyqSJHENdnH5MMIlgJIBxmkRMzTNKlS/QsN5dXsB+MdDq4E6w0g9jFA4XOTRkVDjDae/2w== + source-map-support@^0.5.6: version "0.5.19" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"