diff --git a/.github/workflows/docker-jupyterlab.yml b/.github/workflows/docker-jupyterlab.yml
index 56a8ab49c..103f698be 100644
--- a/.github/workflows/docker-jupyterlab.yml
+++ b/.github/workflows/docker-jupyterlab.yml
@@ -1,4 +1,4 @@
-name: Docker Image Build Jupyterlab
+name: Docker Image Build JupyterLab
on:
push:
@@ -13,12 +13,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Build the Docker image
run: |
cd applications/jupyterlab
- docker build -t myjlab -f Dockerfile --no-cache .
+ ./build_local.sh
- name: Info on Docker image sizes
run: |
@@ -26,4 +26,5 @@ jobs:
- name: Run the Docker container and list python installs
run: |
- docker run -t --rm --entrypoint /bin/bash myjlab -c "pip3 list"
+ cd applications/jupyterlab
+ ./pip_omv_info.sh
diff --git a/.gitignore b/.gitignore
index 082c4e5b0..9df469a54 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,3 +14,4 @@ skaffold-overrides.yaml
/libraries/client/netpyne-web/
/.venv
/libraries/client/*.pyc
+/applications/netpyne/shared
diff --git a/applications/accounts/deploy/resources/realm.json b/applications/accounts/deploy/resources/realm.json
index 80888301c..8a3f11593 100644
--- a/applications/accounts/deploy/resources/realm.json
+++ b/applications/accounts/deploy/resources/realm.json
@@ -158,63 +158,6 @@
]
},
"clients": [
- {
- "id": "18893fbb-8252-4aaa-bc9b-60799ceb9932",
- "clientId": "account-console",
- "name": "${client_account-console}",
- "rootUrl": "${authBaseUrl}",
- "baseUrl": {{ printf "/realms/%s/account" .Values.namespace | quote }},
- "surrogateAuthRequired": false,
- "enabled": true,
- "alwaysDisplayInConsole": false,
- "clientAuthenticatorType": "client-secret",
- "redirectUris": [
- {{ printf "/realms/%s/account/*" .Values.namespace | quote }}
- ],
- "webOrigins": [],
- "notBefore": 0,
- "bearerOnly": false,
- "consentRequired": false,
- "standardFlowEnabled": true,
- "implicitFlowEnabled": false,
- "directAccessGrantsEnabled": false,
- "serviceAccountsEnabled": false,
- "publicClient": true,
- "frontchannelLogout": false,
- "protocol": "openid-connect",
- "attributes": {
- "realm_client": "false",
- "post.logout.redirect.uris": "+",
- "pkce.code.challenge.method": "S256"
- },
- "authenticationFlowBindingOverrides": {},
- "fullScopeAllowed": false,
- "nodeReRegistrationTimeout": 0,
- "protocolMappers": [
- {
- "id": "9a68ec2d-943d-49cb-9fdd-cd821d606210",
- "name": "audience resolve",
- "protocol": "openid-connect",
- "protocolMapper": "oidc-audience-resolve-mapper",
- "consentRequired": false,
- "config": {}
- }
- ],
- "defaultClientScopes": [
- "service_account",
- "web-origins",
- "acr",
- "address",
- "administrator-scope",
- "phone",
- "profile",
- "roles",
- "microprofile-jwt",
- "basic",
- "email"
- ],
- "optionalClientScopes": []
- },
{
"id": "9a6a2560-c6be-4493-8bd5-3fdc4522d82b",
"clientId": {{ .Values.apps.accounts.client.id | quote }},
diff --git a/applications/jupyterlab/build_local.sh b/applications/jupyterlab/build_local.sh
new file mode 100755
index 000000000..4eebcecc9
--- /dev/null
+++ b/applications/jupyterlab/build_local.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -ex
+
+# Set the platform flag if we're on ARM
+arch=$(uname -m)
+if [[ "$arch" == "arm64" || "$arch" == "aarch64" ]]; then
+ platform_flag="--platform linux/amd64"
+else
+ platform_flag=""
+fi
+
+time DOCKER_BUILDKIT=1 docker build $platform_flag -t myjlab -f Dockerfile .
+
diff --git a/applications/jupyterlab/pip_omv_info.sh b/applications/jupyterlab/pip_omv_info.sh
new file mode 100755
index 000000000..c3b56a48c
--- /dev/null
+++ b/applications/jupyterlab/pip_omv_info.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -e
+
+# A script to print info versions of packages in the JupyterLab container
+
+docker run -t --rm --entrypoint /bin/bash myjlab -c "pip3 list"
+echo "--------------------------------------------------------"
+docker run -t --rm --entrypoint /bin/bash myjlab -c "omv list -V"
diff --git a/applications/jupyterlab/rebuild_local.sh b/applications/jupyterlab/rebuild_local.sh
new file mode 100755
index 000000000..15eca6be3
--- /dev/null
+++ b/applications/jupyterlab/rebuild_local.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -ex
+
+# Set the platform flag if we're on ARM
+arch=$(uname -m)
+if [[ "$arch" == "arm64" || "$arch" == "aarch64" ]]; then
+ platform_flag="--platform linux/amd64"
+else
+ platform_flag=""
+fi
+
+time DOCKER_BUILDKIT=1 docker build $platform_flag -t myjlab -f Dockerfile --no-cache .
+
diff --git a/applications/jupyterlab/requirements.txt b/applications/jupyterlab/requirements.txt
index cce0760db..9874151aa 100644
--- a/applications/jupyterlab/requirements.txt
+++ b/applications/jupyterlab/requirements.txt
@@ -6,7 +6,7 @@ neurotune
neuron
# Install specific version of NetPyNE
-git+https://github.com/Neurosim-lab/netpyne.git@osbv2#egg=netpyne
+git+https://github.com/Neurosim-lab/netpyne.git@osbv2-dev#egg=netpyne
#### Other simulators
diff --git a/applications/jupyterlab/run_local.sh b/applications/jupyterlab/run_local.sh
new file mode 100755
index 000000000..269054b73
--- /dev/null
+++ b/applications/jupyterlab/run_local.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -e
+
+# A script to run the JupyterLab container locally (build it first with ./build_local.sh)
+
+docker run --network host -it --rm --name myjupyterlab myjlab
+
+
diff --git a/applications/netpyne/Dockerfile b/applications/netpyne/Dockerfile
index eca06eea7..eb88bf469 100644
--- a/applications/netpyne/Dockerfile
+++ b/applications/netpyne/Dockerfile
@@ -16,7 +16,7 @@ RUN yarn build-dev
### Download on a separate stage to run in parallel with buildkit
FROM quay.io/jupyter/base-notebook:python-3.12 AS downloads
USER root
-RUN wget --no-check-certificate -O /nyhead.mat https://www.parralab.org/nyhead/sa_nyhead.mat
+RUN apt-get update -qq && apt-get install aria2 -y && aria2c -j10 -x 10 https://www.parralab.org/nyhead/sa_nyhead.mat && mv sa_nyhead.mat /nyhead.mat
###
FROM quay.io/jupyter/base-notebook:python-3.12
@@ -27,10 +27,13 @@ ENV NP_LFPYKIT_HEAD_FILE=/home/jovyan/nyhead.mat
USER root
+### Some helpful aliases
+RUN echo -e '\n\nalias cd..="cd .." \nalias h=history \nalias ll="ls -alt" \n' >> ~/.bashrc
+
RUN rm -rf /var/lib/apt/lists
RUN apt-get update -qq &&\
apt-get install python3-tk vim nano unzip git make libtool g++ -qq pkg-config libfreetype6-dev libpng-dev libopenmpi-dev -y
-RUN apt-get install openjdk-11-jre-headless -y
+RUN apt-get install openjdk-11-jre-headless htop ncdu -y
# RUN conda install python=3.7 -y
@@ -87,7 +90,7 @@ RUN ln -s /opt/workspace workspace
# RUN jupyter labextension disable @jupyterlab/hub-extension
-COPY --from=downloads --chown=1000:1000 /nyhead.mat $NP_LFPYKIT_HEAD_FILE
+COPY --from=downloads --chown=1000:1000 nyhead.mat $NP_LFPYKIT_HEAD_FILE
COPY --from=jsbuild --chown=1000:1000 $FOLDER/webapp/build webapp/build
RUN chown -R $NB_UID /home/jovyan/.jupyter
diff --git a/applications/netpyne/deploy/values.yaml b/applications/netpyne/deploy/values.yaml
index 542737608..4ecf10686 100644
--- a/applications/netpyne/deploy/values.yaml
+++ b/applications/netpyne/deploy/values.yaml
@@ -1,6 +1,6 @@
harness:
subdomain: netpyne
- secured: true
+ secured: false
uri_role_mapping: []
service:
auto: false
@@ -21,8 +21,8 @@ harness:
hard:
- jupyterhub
git:
- - url: https://github.com/MetaCell/NetPyNE-UI.git
- branch_tag: 1.1.0
+ - url: https://github.com/OpenSourceBrain/NetPyNE-UI.git
+ branch_tag: osbv2_tests
singleuser:
cpu:
limit: 1
diff --git a/applications/netpyne/overrides/requirements.txt b/applications/netpyne/overrides/requirements.txt
index 5326f342c..7640ea03c 100644
--- a/applications/netpyne/overrides/requirements.txt
+++ b/applications/netpyne/overrides/requirements.txt
@@ -71,7 +71,7 @@ jupyterthemes==0.20.0
kiwisolver==1.4.8
lesscpy==0.15.1
LFPykit==0.5.1
-libNeuroML==0.5.1
+libNeuroML==0.6.7
lxml==5.3.1
MarkupSafe==1.1.1
matplotlib==3.10.0
@@ -88,9 +88,9 @@ nbconvert==5.6.1
nbformat==5.2.0
ndindex==1.9.2
nest-asyncio==1.6.0
-netpyne @ git+https://github.com/Neurosim-lab/netpyne.git@3d633bcda9a3ab3fe4a90b7c705cd3692a729185
+netpyne @ git+https://github.com/Neurosim-lab/netpyne.git@osbv2-dev#egg=netpyne
networkx==3.4.2
-neuromllite==0.5.4
+neuromllite==0.6.1
NEURON==8.2.6
notebook==6.4.5
notebook_shim==0.2.3
@@ -117,9 +117,9 @@ pycparser==2.22
pyecore==0.15.2
pygeppetto==0.9.0
Pygments==2.19.1
-PyLEMS==0.5.9
+PyLEMS==0.6.8
pymongo==4.11.1
-pyNeuroML==1.0.10
+pyNeuroML==1.3.21
pyparsing==3.2.1
pytest==6.2.5
python-dateutil==2.9.0.post0
diff --git a/applications/netpyne/run_local.sh b/applications/netpyne/run_local.sh
index 595ccb418..88fce41a1 100755
--- a/applications/netpyne/run_local.sh
+++ b/applications/netpyne/run_local.sh
@@ -3,6 +3,6 @@ set -e
# A script to run the NetPyNE container locally (build it first with ./build_local.sh)
-docker run --network host -it --rm --name mynp mynetpyneosb
+docker run --network host -v $PWD/shared:/opt/workspace/local:rw -it --rm --name mynp mynetpyneosb
diff --git a/applications/osb-portal/src/components/MainDrawer/MainDrawer.tsx b/applications/osb-portal/src/components/MainDrawer/MainDrawer.tsx
index 41c18be0c..6b0c7d408 100644
--- a/applications/osb-portal/src/components/MainDrawer/MainDrawer.tsx
+++ b/applications/osb-portal/src/components/MainDrawer/MainDrawer.tsx
@@ -338,7 +338,7 @@ export const MainDrawer = (props: {
-
+
{
- const logoMetaCell = "/images/metacell.png";
- const logoWellcome = "/images/wellcome.png";
+ const logoMetaCell = "https://raw.githubusercontent.com/OpenSourceBrain/OSBv2/refs/heads/master/applications/osb-portal/src/assets/images/metacell.png";
+ const logoWellcome = "https://raw.githubusercontent.com/OpenSourceBrain/OSBv2/refs/heads/master/applications/osb-portal/src/assets/images/wellcome.png";
return (
@@ -60,9 +60,16 @@ export const AboutContent = (props: any) => {
OSBv2 is being developed by the{" "}
- Silver Lab at University College London
+ Silver Lab
{" "}
- in collaboration with{" "}
+ and the{" "}
+
+ Gleeson Lab
+ {" "}
+ at {" "}
+
+ University College London
+ , in collaboration with{" "}
MetaCell
diff --git a/applications/workspaces/api/openapi.yaml b/applications/workspaces/api/openapi.yaml
index 8b2037ce0..83f37a7e1 100644
--- a/applications/workspaces/api/openapi.yaml
+++ b/applications/workspaces/api/openapi.yaml
@@ -271,6 +271,8 @@ paths:
security:
-
bearerAuth: []
+ -
+ cookieAuth: []
operationId: workspaces.controllers.workspace_controller.setthumbnail
summary: Sets the thumbnail of the workspace.
parameters:
@@ -302,6 +304,8 @@ paths:
security:
-
bearerAuth: []
+ -
+ cookieAuth: []
operationId: workspaces.controllers.workspace_controller.addimage
summary: Adds and image to the workspace.
parameters:
@@ -324,6 +328,8 @@ paths:
security:
-
bearerAuth: []
+ -
+ cookieAuth: []
operationId: delimage
summary: Delete a Workspace Image from the workspace.
x-openapi-router-controller: workspaces.controllers.workspace_controller
@@ -365,6 +371,8 @@ paths:
security:
-
bearerAuth: []
+ -
+ cookieAuth: []
summary: Used to save a WorkspaceResource to the repository.
'/workspaceresource/{id}':
get:
@@ -382,6 +390,8 @@ paths:
security:
-
bearerAuth: []
+ -
+ cookieAuth: []
summary: Used to retrieve a WorkspaceResource.
put:
requestBody:
@@ -1517,6 +1527,11 @@ components:
bearerFormat: JWT
type: http
x-bearerInfoFunc: cloudharness.auth.AuthClient.decode_token
+ cookieAuth:
+ type: apiKey
+ name: kc-access
+ in: cookie
+ x-apikeyInfoFunc: cloudharness.auth.decode_token
tags:
-
name: rest
diff --git a/deployment/codefresh-dev.yaml b/deployment/codefresh-dev.yaml
index 78433979b..0dab9ed13 100644
--- a/deployment/codefresh-dev.yaml
+++ b/deployment/codefresh-dev.yaml
@@ -27,14 +27,14 @@ steps:
revision: '${{CLOUDHARNESS_BRANCH}}'
working_directory: .
git: github
- clone_NetPyNE-UI_git_netpyne:
+ clone_NetPyNE-UI_git_osbv2_tests_netpyne:
title: Cloning NetPyNE-UI.git repository...
type: git-clone
- repo: https://github.com/MetaCell/NetPyNE-UI.git
- revision: dev_netpyne_updates
+ repo: https://github.com/OpenSourceBrain/NetPyNE-UI.git
+ revision: osbv2_tests
working_directory: applications/netpyne/dependencies/
git: github
- clone_nwb-explorer_git_nwb-explorer:
+ clone_nwb-explorer_git_development_nwb-explorer:
title: Cloning nwb-explorer.git repository...
type: git-clone
repo: https://github.com/MetaCell/nwb-explorer.git
@@ -65,29 +65,29 @@ steps:
type: parallel
stage: build
steps:
- cloudharness-frontend-build:
+ netpyne:
type: build
stage: build
- dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile
+ dockerfile: Dockerfile
registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: cloud-harness/cloudharness-frontend-build
- title: Cloudharness frontend build
- working_directory: ./cloud-harness
+ image_name: osb/netpyne
+ title: Netpyne
+ working_directory: ./applications/netpyne
tags:
- - '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}'
+ - '${{NETPYNE_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}',
- '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true
- forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}',
- '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false
- workspaces-dandi-copy:
+ buildDoesNotExist: includes('${{NETPYNE_TAG_EXISTS}}', '{{NETPYNE_TAG_EXISTS}}')
+ == true
+ forceNoCache: includes('${{NETPYNE_TAG_FORCE_BUILD}}', '{{NETPYNE_TAG_FORCE_BUILD}}')
+ == false
+ osb-portal:
type: build
stage: build
dockerfile: Dockerfile
@@ -95,21 +95,21 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/workspaces-dandi-copy
- title: Workspaces dandi copy
- working_directory: ./applications/workspaces/tasks/dandi-copy
+ image_name: osb/osb-portal
+ title: Osb portal
+ working_directory: ./applications/osb-portal
tags:
- - '${{WORKSPACES_DANDI_COPY_TAG}}'
+ - '${{OSB_PORTAL_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKSPACES_DANDI_COPY_TAG_EXISTS}}',
- '{{WORKSPACES_DANDI_COPY_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKSPACES_DANDI_COPY_TAG_FORCE_BUILD}}',
- '{{WORKSPACES_DANDI_COPY_TAG_FORCE_BUILD}}') == false
- backoffice:
+ buildDoesNotExist: includes('${{OSB_PORTAL_TAG_EXISTS}}', '{{OSB_PORTAL_TAG_EXISTS}}')
+ == true
+ forceNoCache: includes('${{OSB_PORTAL_TAG_FORCE_BUILD}}', '{{OSB_PORTAL_TAG_FORCE_BUILD}}')
+ == false
+ test-e2e:
type: build
stage: build
dockerfile: Dockerfile
@@ -117,21 +117,22 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/backoffice
- title: Backoffice
- working_directory: ./applications/backoffice
+ image_name: osb/test-e2e
+ title: Test e2e
+ working_directory: ./cloud-harness/test/test-e2e
tags:
- - '${{BACKOFFICE_TAG}}'
+ - '${{TEST_E2E_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ - latest
when:
condition:
any:
- buildDoesNotExist: includes('${{BACKOFFICE_TAG_EXISTS}}', '{{BACKOFFICE_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{BACKOFFICE_TAG_FORCE_BUILD}}', '{{BACKOFFICE_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}')
== false
- test-e2e:
+ jupyterhub-jupyterhub-examples-postgres-hub:
type: build
stage: build
dockerfile: Dockerfile
@@ -139,22 +140,44 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: cloud-harness/test-e2e
- title: Test e2e
- working_directory: ./cloud-harness/test/test-e2e
+ image_name: osb/jupyterhub-jupyterhub-examples-postgres-hub
+ title: Jupyterhub jupyterhub examples postgres hub
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/examples/postgres/hub
tags:
- - '${{TEST_E2E_TAG}}'
+ - '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
- - latest
when:
condition:
any:
- buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_FORCE_BUILD}}')
+ == false
+ cloudharness-base:
+ type: build
+ stage: build
+ dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ image_name: osb/cloudharness-base
+ title: Cloudharness base
+ working_directory: ./cloud-harness
+ tags:
+ - '${{CLOUDHARNESS_BASE_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}')
== false
- accounts:
+ nwb-explorer:
type: build
stage: build
dockerfile: Dockerfile
@@ -162,21 +185,21 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/accounts
- title: Accounts
- working_directory: ./.overrides/applications/accounts
+ image_name: osb/nwb-explorer
+ title: Nwb explorer
+ working_directory: ./applications/nwb-explorer
tags:
- - '${{ACCOUNTS_TAG}}'
+ - '${{NWB_EXPLORER_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{NWB_EXPLORER_TAG_EXISTS}}', '{{NWB_EXPLORER_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{NWB_EXPLORER_TAG_FORCE_BUILD}}', '{{NWB_EXPLORER_TAG_FORCE_BUILD}}')
== false
- osb-portal:
+ jupyterhub-jupyterhub-examples-postgres-db:
type: build
stage: build
dockerfile: Dockerfile
@@ -184,43 +207,110 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/osb-portal
- title: Osb portal
- working_directory: ./applications/osb-portal
+ image_name: osb/jupyterhub-jupyterhub-examples-postgres-db
+ title: Jupyterhub jupyterhub examples postgres db
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/examples/postgres/db
tags:
- - '${{OSB_PORTAL_TAG}}'
+ - '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{OSB_PORTAL_TAG_EXISTS}}', '{{OSB_PORTAL_TAG_EXISTS}}')
- == true
- forceNoCache: includes('${{OSB_PORTAL_TAG_FORCE_BUILD}}', '{{OSB_PORTAL_TAG_FORCE_BUILD}}')
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_FORCE_BUILD}}')
== false
- cloudharness-base:
+ workspaces-dandi-copy:
type: build
stage: build
- dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile
+ dockerfile: Dockerfile
registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: cloud-harness/cloudharness-base
- title: Cloudharness base
+ image_name: osb/workspaces-dandi-copy
+ title: Workspaces dandi copy
+ working_directory: ./applications/workspaces/tasks/dandi-copy
+ tags:
+ - '${{WORKSPACES_DANDI_COPY_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{WORKSPACES_DANDI_COPY_TAG_EXISTS}}',
+ '{{WORKSPACES_DANDI_COPY_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKSPACES_DANDI_COPY_TAG_FORCE_BUILD}}',
+ '{{WORKSPACES_DANDI_COPY_TAG_FORCE_BUILD}}') == false
+ jupyterhub-jupyterhub-demo-image:
+ type: build
+ stage: build
+ dockerfile: Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ image_name: osb/jupyterhub-jupyterhub-demo-image
+ title: Jupyterhub jupyterhub demo image
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/demo-image
+ tags:
+ - '${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_FORCE_BUILD}}') == false
+ jupyterhub-jupyterhub-singleuser:
+ type: build
+ stage: build
+ dockerfile: Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ image_name: osb/jupyterhub-jupyterhub-singleuser
+ title: Jupyterhub jupyterhub singleuser
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/singleuser
+ tags:
+ - '${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_FORCE_BUILD}}') == false
+ cloudharness-frontend-build:
+ type: build
+ stage: build
+ dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ image_name: osb/cloudharness-frontend-build
+ title: Cloudharness frontend build
working_directory: ./cloud-harness
tags:
- - '${{CLOUDHARNESS_BASE_TAG}}'
+ - '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}')
- == true
- forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}')
- == false
- nwb-explorer:
+ buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}',
+ '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}',
+ '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false
+ jupyterhub-jupyterhub-onbuild:
type: build
stage: build
dockerfile: Dockerfile
@@ -228,20 +318,20 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/nwb-explorer
- title: Nwb explorer
- working_directory: ./applications/nwb-explorer
+ image_name: osb/jupyterhub-jupyterhub-onbuild
+ title: Jupyterhub jupyterhub onbuild
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/onbuild
tags:
- - '${{NWB_EXPLORER_TAG}}'
+ - '${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{NWB_EXPLORER_TAG_EXISTS}}', '{{NWB_EXPLORER_TAG_EXISTS}}')
- == true
- forceNoCache: includes('${{NWB_EXPLORER_TAG_FORCE_BUILD}}', '{{NWB_EXPLORER_TAG_FORCE_BUILD}}')
- == false
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_FORCE_BUILD}}') == false
jupyterlab:
type: build
stage: build
@@ -264,7 +354,7 @@ steps:
== true
forceNoCache: includes('${{JUPYTERLAB_TAG_FORCE_BUILD}}', '{{JUPYTERLAB_TAG_FORCE_BUILD}}')
== false
- netpyne:
+ backoffice:
type: build
stage: build
dockerfile: Dockerfile
@@ -272,26 +362,21 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- image_name: osb/netpyne
- title: Netpyne
- working_directory: ./applications/netpyne
+ image_name: osb/backoffice
+ title: Backoffice
+ working_directory: ./applications/backoffice
tags:
- - '${{NETPYNE_TAG}}'
+ - '${{BACKOFFICE_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{NETPYNE_TAG_EXISTS}}', '{{NETPYNE_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{BACKOFFICE_TAG_EXISTS}}', '{{BACKOFFICE_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{NETPYNE_TAG_FORCE_BUILD}}', '{{NETPYNE_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{BACKOFFICE_TAG_FORCE_BUILD}}', '{{BACKOFFICE_TAG_FORCE_BUILD}}')
== false
- title: Build parallel step 1
- build_application_images_1:
- type: parallel
- stage: build
- steps:
- notifications:
+ jupyterhub-jupyterhub-examples-service-fastapi:
type: build
stage: build
dockerfile: Dockerfile
@@ -299,22 +384,23 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/notifications
- title: Notifications
- working_directory: ./.overrides/applications/notifications/server
+ image_name: osb/jupyterhub-jupyterhub-examples-service-fastapi
+ title: Jupyterhub jupyterhub examples service fastapi
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub/examples/service-fastapi
tags:
- - '${{NOTIFICATIONS_TAG}}'
+ - '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{NOTIFICATIONS_TAG_EXISTS}}', '{{NOTIFICATIONS_TAG_EXISTS}}')
- == true
- forceNoCache: includes('${{NOTIFICATIONS_TAG_FORCE_BUILD}}', '{{NOTIFICATIONS_TAG_FORCE_BUILD}}')
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_EXISTS}}') ==
+ true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_FORCE_BUILD}}')
== false
- workflows-extract-download:
+ jupyterhub-jupyterhub:
type: build
stage: build
dockerfile: Dockerfile
@@ -322,22 +408,21 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/workflows-extract-download
- title: Workflows extract download
- working_directory: ./cloud-harness/applications/workflows/tasks/extract-download
+ image_name: osb/jupyterhub-jupyterhub
+ title: Jupyterhub jupyterhub
+ working_directory: ./.overrides/applications/jupyterhub/jupyterhub
tags:
- - '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}'
+ - '${{JUPYTERHUB_JUPYTERHUB_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}',
- '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}',
- '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false
- workflows-send-result-event:
+ buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_TAG_EXISTS}}',
+ '{{JUPYTERHUB_JUPYTERHUB_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_TAG_FORCE_BUILD}}',
+ '{{JUPYTERHUB_JUPYTERHUB_TAG_FORCE_BUILD}}') == false
+ accounts:
type: build
stage: build
dockerfile: Dockerfile
@@ -345,21 +430,25 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/workflows-send-result-event
- title: Workflows send result event
- working_directory: ./cloud-harness/applications/workflows/tasks/send-result-event
+ image_name: osb/accounts
+ title: Accounts
+ working_directory: ./.overrides/applications/accounts
tags:
- - '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}'
+ - '${{ACCOUNTS_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}',
- '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}',
- '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false
+ buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}')
+ == true
+ forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}')
+ == false
+ title: Build parallel step 1
+ build_application_images_1:
+ type: parallel
+ stage: build
+ steps:
workspaces-figshare-copy:
type: build
stage: build
@@ -368,7 +457,7 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: osb/workspaces-figshare-copy
title: Workspaces figshare copy
working_directory: ./applications/workspaces/tasks/figshare-copy
@@ -383,29 +472,28 @@ steps:
'{{WORKSPACES_FIGSHARE_COPY_TAG_EXISTS}}') == true
forceNoCache: includes('${{WORKSPACES_FIGSHARE_COPY_TAG_FORCE_BUILD}}',
'{{WORKSPACES_FIGSHARE_COPY_TAG_FORCE_BUILD}}') == false
- test-api:
+ cloudharness-flask:
type: build
stage: build
- dockerfile: test/test-api/Dockerfile
+ dockerfile: Dockerfile
registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/test-api
- title: Test api
- working_directory: ./cloud-harness
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/cloudharness-flask
+ title: Cloudharness flask
+ working_directory: ./cloud-harness/infrastructure/common-images/cloudharness-flask
tags:
- - '${{TEST_API_TAG}}'
+ - '${{CLOUDHARNESS_FLASK_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
- - latest
when:
condition:
any:
- buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}')
== false
workspaces-biomodels-copy:
type: build
@@ -415,7 +503,7 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: osb/workspaces-biomodels-copy
title: Workspaces biomodels copy
working_directory: ./applications/workspaces/tasks/biomodels-copy
@@ -430,30 +518,31 @@ steps:
'{{WORKSPACES_BIOMODELS_COPY_TAG_EXISTS}}') == true
forceNoCache: includes('${{WORKSPACES_BIOMODELS_COPY_TAG_FORCE_BUILD}}',
'{{WORKSPACES_BIOMODELS_COPY_TAG_FORCE_BUILD}}') == false
- workspaces-github-copy:
+ test-api:
type: build
stage: build
- dockerfile: Dockerfile
+ dockerfile: test/test-api/Dockerfile
registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: osb/workspaces-github-copy
- title: Workspaces github copy
- working_directory: ./applications/workspaces/tasks/github-copy
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/test-api
+ title: Test api
+ working_directory: ./cloud-harness
tags:
- - '${{WORKSPACES_GITHUB_COPY_TAG}}'
+ - '${{TEST_API_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ - latest
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKSPACES_GITHUB_COPY_TAG_EXISTS}}',
- '{{WORKSPACES_GITHUB_COPY_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKSPACES_GITHUB_COPY_TAG_FORCE_BUILD}}',
- '{{WORKSPACES_GITHUB_COPY_TAG_FORCE_BUILD}}') == false
- cloudharness-flask:
+ buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}')
+ == true
+ forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}')
+ == false
+ notifications:
type: build
stage: build
dockerfile: Dockerfile
@@ -461,21 +550,90 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/cloudharness-flask
- title: Cloudharness flask
- working_directory: ./cloud-harness/infrastructure/common-images/cloudharness-flask
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/notifications
+ title: Notifications
+ working_directory: ./.overrides/applications/notifications/server
tags:
- - '${{CLOUDHARNESS_FLASK_TAG}}'
+ - '${{NOTIFICATIONS_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{NOTIFICATIONS_TAG_EXISTS}}', '{{NOTIFICATIONS_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{NOTIFICATIONS_TAG_FORCE_BUILD}}', '{{NOTIFICATIONS_TAG_FORCE_BUILD}}')
== false
+ workspaces-scan-workspace:
+ type: build
+ stage: build
+ dockerfile: Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/workspaces-scan-workspace
+ title: Workspaces scan workspace
+ working_directory: ./applications/workspaces/tasks/scan-workspace
+ tags:
+ - '${{WORKSPACES_SCAN_WORKSPACE_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{WORKSPACES_SCAN_WORKSPACE_TAG_EXISTS}}',
+ '{{WORKSPACES_SCAN_WORKSPACE_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKSPACES_SCAN_WORKSPACE_TAG_FORCE_BUILD}}',
+ '{{WORKSPACES_SCAN_WORKSPACE_TAG_FORCE_BUILD}}') == false
+ workflows-send-result-event:
+ type: build
+ stage: build
+ dockerfile: Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/workflows-send-result-event
+ title: Workflows send result event
+ working_directory: ./cloud-harness/applications/workflows/tasks/send-result-event
+ tags:
+ - '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}',
+ '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}',
+ '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false
+ workflows-notify-queue:
+ type: build
+ stage: build
+ dockerfile: Dockerfile
+ registry: '${{CODEFRESH_REGISTRY}}'
+ buildkit: true
+ build_arguments:
+ - NOCACHE=${{CF_BUILD_ID}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/workflows-notify-queue
+ title: Workflows notify queue
+ working_directory: ./cloud-harness/applications/workflows/tasks/notify-queue
+ tags:
+ - '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}'
+ - '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
+ - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
+ when:
+ condition:
+ any:
+ buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}',
+ '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}',
+ '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false
jupyterhub:
type: build
stage: build
@@ -484,7 +642,7 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: osb/jupyterhub
title: Jupyterhub
working_directory: ./.overrides/applications/jupyterhub
@@ -499,7 +657,7 @@ steps:
== true
forceNoCache: includes('${{JUPYTERHUB_TAG_FORCE_BUILD}}', '{{JUPYTERHUB_TAG_FORCE_BUILD}}')
== false
- workspaces-scan-workspace:
+ workspaces-github-copy:
type: build
stage: build
dockerfile: Dockerfile
@@ -507,22 +665,22 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: osb/workspaces-scan-workspace
- title: Workspaces scan workspace
- working_directory: ./applications/workspaces/tasks/scan-workspace
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/workspaces-github-copy
+ title: Workspaces github copy
+ working_directory: ./applications/workspaces/tasks/github-copy
tags:
- - '${{WORKSPACES_SCAN_WORKSPACE_TAG}}'
+ - '${{WORKSPACES_GITHUB_COPY_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKSPACES_SCAN_WORKSPACE_TAG_EXISTS}}',
- '{{WORKSPACES_SCAN_WORKSPACE_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKSPACES_SCAN_WORKSPACE_TAG_FORCE_BUILD}}',
- '{{WORKSPACES_SCAN_WORKSPACE_TAG_FORCE_BUILD}}') == false
- workflows-notify-queue:
+ buildDoesNotExist: includes('${{WORKSPACES_GITHUB_COPY_TAG_EXISTS}}',
+ '{{WORKSPACES_GITHUB_COPY_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKSPACES_GITHUB_COPY_TAG_FORCE_BUILD}}',
+ '{{WORKSPACES_GITHUB_COPY_TAG_FORCE_BUILD}}') == false
+ workflows-extract-download:
type: build
stage: build
dockerfile: Dockerfile
@@ -530,27 +688,27 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
- image_name: cloud-harness/workflows-notify-queue
- title: Workflows notify queue
- working_directory: ./cloud-harness/applications/workflows/tasks/notify-queue
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ image_name: osb/workflows-extract-download
+ title: Workflows extract download
+ working_directory: ./cloud-harness/applications/workflows/tasks/extract-download
tags:
- - '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}'
+ - '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}',
- '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true
- forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}',
- '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false
+ buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}',
+ '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true
+ forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}',
+ '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false
title: Build parallel step 2
build_application_images_2:
type: parallel
stage: build
steps:
- common:
+ workflows:
type: build
stage: build
dockerfile: Dockerfile
@@ -558,20 +716,20 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
- image_name: cloud-harness/common
- title: Common
- working_directory: ./.overrides/applications/common/server
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ image_name: osb/workflows
+ title: Workflows
+ working_directory: ./cloud-harness/applications/workflows/server
tags:
- - '${{COMMON_TAG}}'
+ - '${{WORKFLOWS_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}')
== false
volumemanager:
type: build
@@ -581,8 +739,8 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
- image_name: cloud-harness/volumemanager
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ image_name: osb/volumemanager
title: Volumemanager
working_directory: ./cloud-harness/applications/volumemanager/server
tags:
@@ -604,7 +762,7 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
image_name: osb/workspaces
title: Workspaces
working_directory: ./applications/workspaces/server
@@ -627,7 +785,7 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
image_name: osb/accounts-api
title: Accounts api
working_directory: ./applications/accounts-api
@@ -642,7 +800,7 @@ steps:
== true
forceNoCache: includes('${{ACCOUNTS_API_TAG_FORCE_BUILD}}', '{{ACCOUNTS_API_TAG_FORCE_BUILD}}')
== false
- workflows:
+ common:
type: build
stage: build
dockerfile: Dockerfile
@@ -650,20 +808,20 @@ steps:
buildkit: true
build_arguments:
- NOCACHE=${{CF_BUILD_ID}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
- image_name: cloud-harness/workflows
- title: Workflows
- working_directory: ./cloud-harness/applications/workflows/server
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ image_name: osb/common
+ title: Common
+ working_directory: ./.overrides/applications/common/server
tags:
- - '${{WORKFLOWS_TAG}}'
+ - '${{COMMON_TAG}}'
- '${{DEPLOYMENT_PUBLISH_TAG}}-dev'
- '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}'
when:
condition:
any:
- buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}')
== true
- forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}')
== false
title: Build parallel step 3
tests_unit:
@@ -692,10 +850,10 @@ steps:
custom_value_files:
- ./deployment/helm/values.yaml
custom_values:
- - apps_notifications_harness_secrets_email-user=${{EMAIL-USER}}
- - apps_notifications_harness_secrets_email-password=${{EMAIL-PASSWORD}}
- apps_workspaces_harness_secrets_github-user=${{GITHUB-USER}}
- apps_workspaces_harness_secrets_github-token=${{GITHUB-TOKEN}}
+ - apps_notifications_harness_secrets_email-user=${{EMAIL-USER}}
+ - apps_notifications_harness_secrets_email-password=${{EMAIL-PASSWORD}}
image: codefresh/cfstep-helm:3.6.2
environment:
- CHART_REF=./deployment/helm
@@ -714,20 +872,19 @@ steps:
commands:
- kubectl config use-context ${{CLUSTER_NAME}}
- kubectl config set-context --current --namespace=${{NAMESPACE}}
+ - kubectl rollout status deployment/events-gk
- kubectl rollout status deployment/volumemanager
- - kubectl rollout status deployment/accounts-api
- kubectl rollout status deployment/osb-portal
- kubectl rollout status deployment/www-gk
- - kubectl rollout status deployment/accounts
- - kubectl rollout status deployment/events-gk
- - kubectl rollout status deployment/workflows
- - kubectl rollout status deployment/common
- - kubectl rollout status deployment/notifications
- kubectl rollout status deployment/backoffice
- kubectl rollout status deployment/admin-gk
- - kubectl rollout status deployment/argo-gk
+ - kubectl rollout status deployment/workflows
+ - kubectl rollout status deployment/accounts-api
- kubectl rollout status deployment/workspaces
- - kubectl rollout status deployment/netpyne-gk
+ - kubectl rollout status deployment/notifications
+ - kubectl rollout status deployment/common
+ - kubectl rollout status deployment/accounts
+ - kubectl rollout status deployment/argo-gk
- sleep 60
when:
condition:
@@ -737,21 +894,11 @@ steps:
stage: qa
title: Api tests
working_directory: /home/test
- image: '${{REGISTRY}}/cloud-harness/test-api:${{TEST_API_TAG}}'
+ image: '${{REGISTRY}}/osb/test-api:${{TEST_API_TAG}}'
fail_fast: false
commands:
- echo $APP_NAME
scale:
- volumemanager_api_test:
- title: volumemanager api test
- volumes:
- - '${{CF_REPO_NAME}}/cloud-harness/applications/volumemanager:/home/test'
- - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml'
- environment:
- - APP_URL=https://volumemanager.${{DOMAIN}}/api
- commands:
- - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
- https://volumemanager.${{DOMAIN}}/api -c all
common_api_test:
title: common api test
volumes:
@@ -780,7 +927,7 @@ steps:
stage: qa
title: End to end tests
working_directory: /home/test
- image: '${{REGISTRY}}/cloud-harness/test-e2e:${{TEST_E2E_TAG}}'
+ image: '${{REGISTRY}}/osb/test-e2e:${{TEST_E2E_TAG}}'
fail_fast: false
commands:
- npx puppeteer browsers install chrome
@@ -818,7 +965,7 @@ steps:
stage: publish
type: push
title: Cloudharness base
- candidate: '${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}'
+ candidate: '${{REGISTRY}}/osb/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -827,7 +974,7 @@ steps:
stage: publish
type: push
title: Cloudharness frontend build
- candidate: '${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}'
+ candidate: '${{REGISTRY}}/osb/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -836,7 +983,7 @@ steps:
stage: publish
type: push
title: Cloudharness flask
- candidate: '${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}'
+ candidate: '${{REGISTRY}}/osb/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -845,7 +992,7 @@ steps:
stage: publish
type: push
title: Notifications
- candidate: '${{REGISTRY}}/cloud-harness/notifications:${{NOTIFICATIONS_TAG}}'
+ candidate: '${{REGISTRY}}/osb/notifications:${{NOTIFICATIONS_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -863,7 +1010,7 @@ steps:
stage: publish
type: push
title: Volumemanager
- candidate: '${{REGISTRY}}/cloud-harness/volumemanager:${{VOLUMEMANAGER_TAG}}'
+ candidate: '${{REGISTRY}}/osb/volumemanager:${{VOLUMEMANAGER_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -881,7 +1028,7 @@ steps:
stage: publish
type: push
title: Common
- candidate: '${{REGISTRY}}/cloud-harness/common:${{COMMON_TAG}}'
+ candidate: '${{REGISTRY}}/osb/common:${{COMMON_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -890,7 +1037,7 @@ steps:
stage: publish
type: push
title: Workflows send result event
- candidate: '${{REGISTRY}}/cloud-harness/workflows-send-result-event:${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}'
+ candidate: '${{REGISTRY}}/osb/workflows-send-result-event:${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -899,7 +1046,7 @@ steps:
stage: publish
type: push
title: Workflows extract download
- candidate: '${{REGISTRY}}/cloud-harness/workflows-extract-download:${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}'
+ candidate: '${{REGISTRY}}/osb/workflows-extract-download:${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -908,7 +1055,7 @@ steps:
stage: publish
type: push
title: Workflows notify queue
- candidate: '${{REGISTRY}}/cloud-harness/workflows-notify-queue:${{WORKFLOWS_NOTIFY_QUEUE_TAG}}'
+ candidate: '${{REGISTRY}}/osb/workflows-notify-queue:${{WORKFLOWS_NOTIFY_QUEUE_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -917,7 +1064,7 @@ steps:
stage: publish
type: push
title: Workflows
- candidate: '${{REGISTRY}}/cloud-harness/workflows:${{WORKFLOWS_TAG}}'
+ candidate: '${{REGISTRY}}/osb/workflows:${{WORKFLOWS_TAG}}'
tags:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
@@ -1030,6 +1177,69 @@ steps:
- '${{DEPLOYMENT_PUBLISH_TAG}}'
- latest
registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub:${{JUPYTERHUB_JUPYTERHUB_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-singleuser:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub singleuser
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-singleuser:${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-examples-service-fastapi:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub examples service fastapi
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-examples-service-fastapi:${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-examples-postgres-db:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub examples postgres db
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-examples-postgres-db:${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-examples-postgres-hub:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub examples postgres hub
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-examples-postgres-hub:${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-demo-image:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub demo image
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-demo-image:${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
+ publish_jupyterhub-jupyterhub-onbuild:
+ stage: publish
+ type: push
+ title: Jupyterhub jupyterhub onbuild
+ candidate: '${{REGISTRY}}/osb/jupyterhub-jupyterhub-onbuild:${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG}}'
+ tags:
+ - '${{DEPLOYMENT_PUBLISH_TAG}}'
+ - latest
+ registry: '${{REGISTRY_PUBLISH_URL}}'
when:
condition:
all:
diff --git a/local-scripts/osbv2-local.sh b/local-scripts/osbv2-local.sh
index c5d3c1908..57d59b5d8 100755
--- a/local-scripts/osbv2-local.sh
+++ b/local-scripts/osbv2-local.sh
@@ -153,8 +153,8 @@ deploy () {
harness_deployment
echo "-> running skaffold"
- $SKAFFOLD dev --cleanup=false || { notify_fail "Failed: skaffold" ; minikube stop; }
- #$SKAFFOLD dev || notify_fail "Failed: skaffold"
+ $SKAFFOLD dev --cleanup=false || { notify_fail "Failed: skaffold" ; }
+ minkube status
popd
}
@@ -193,10 +193,18 @@ harness_deployment() {
if [ "YES" == "$LIVE" ]
then
echo "-> harnessing live configuration deployment, and deploying"
+ set -x
+ set -v
harness-deployment ../cloud-harness . -l -n ${OSB_NAMESPACE} -d osb.local -r gcr.io/metacellllc -e "local" -t "$LIVE_TAG" || notify_fail "Failed: harness-deployment (live)"
+ set +x
+ set +v
else
echo "-> harnessing development deployment"
+ set -x
+ set -v
harness-deployment ../cloud-harness . -l -n ${OSB_NAMESPACE} -d osb.local -dtls -e "local" ${DEPLOYMENT_APP:+-i $DEPLOYMENT_APP} || notify_fail "Failed: harness-deployment (dev)"
+ set +x
+ set +v
fi
popd
}
@@ -242,10 +250,12 @@ deactivate_venv() {
print_versions() {
echo "** docker **"
docker version
+ echo -e "\n** kubernetes **"
+ kubectl version
echo -e "\n** minikube **"
minikube version
echo -e "\n** cloud harness **"
- pushd "${CLOUD_HARNESS_DIR}" && git log --oneline | head -1 && popd
+ pushd "${CLOUD_HARNESS_DIR}" 2>&1 > /dev/null && git log --oneline | head -1 && popd 2>&1 > /dev/null
echo -e "\n** helm **"
helm version
echo -e "\n** skaffold **"