Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,40 +1,30 @@
= MsSQL Operator
= NooBaa Workload

== Deploying MsSQL Operator workload
This workload deploys and configures NooBaa operator on OpenShift 4 cluster.

== Deploy the workload
[source, bash]
----
ansible-playbook -i "bastion.${GUID}.${BASE_DOMAIN}", ./ansible/configs/ocp-workloads/ocp-workload.yml \
-e"ansible_ssh_private_key_file=${ANSIBLE_USER_KEY_FILE}" \
-e"ansible_user=${ANSIBLE_USER}" \
-e"ocp_workload=ocp4-workload-operator-mssql" \
-e"ocp_workload=ocp-workload-ocs-operator" \
-e"silent=False" \
-e"ACTION=create" \
-e @./secret.yaml \ <1>
-e @./workload_vars.yaml <2>
----
<1> This is the same file you used while deploying OCP cluster using agnosticd. Optionally, include sensitive workload variables here.
<2> This file contains all the variables required by the workload.
<1> This is the same file you used while deploying OCP cluster using agnosticd. Your AWS credentials go in this file

=== Variables

[source, yaml]
----
mssql_private_img_registry: <PRIVATE_IMG_REGISTRY>
mssql_private_img_registry_user: <REGISTRY_USER>
mssql_private_img_registry_pass: <REGISTRY_PASSWORD>
----

=== Delete MsSQL Operator workload

[source, bash]
== Delete the workload
----
ansible-playbook -i "bastion.${GUID}.${BASE_DOMAIN}", ./ansible/configs/ocp-workloads/ocp-workload.yml \
-e"ansible_ssh_private_key_file=${ANSIBLE_USER_KEY_FILE}" \
-e"ansible_user=${ANSIBLE_USER}" \
-e"ocp_workload=ocp-workload-operator-mssql" \
-e"ansible_user=${ANSIBLE_USER}" \
-e"ocp_workload=ocp-workload-ocs-operator" \
-e"silent=False" \
-e"ACTION=remove" \
-e @./secret.yaml \
-e @./workload_vars.yaml
----
-e @./secret.yaml \ <1>
-e @./workload_vars.yaml <2>
----
<1> This is the same file you used while deploying OCP cluster using agnosticd. Your AWS credentials go in this file
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
ocs_expected_crds:
- backingstores.noobaa.io
- bucketclasses.noobaa.io
- noobaas.noobaa.io
- objectbucketclaims.objectbucket.io
ocs_channel: stable-4.2
ocs_install_mcg: true
ocs_mcg_core_cpu: 0.5
ocs_mcg_core_mem: 500Mi
ocs_mcg_db_cpu: 0.5
ocs_mcg_db_mem: 1Gi
ocs_ceph_mds_cpu: 0.5
ocs_ceph_mds_mem: 1Gi
ocs_ceph_mon_cpu: 0.5
ocs_ceph_mon_mem: 1Gi
ocs_ceph_mgr_cpu: 0.5
ocs_ceph_mgr_mem: 1Gi
ocs_ceph_osd_cpu: 0.5
ocs_ceph_osd_mem: 1Gi
ocs_mcg_pv_pool: true
ocs_mcg_pv_pool_bucket_name: mcg
ocs_mcg_pv_pool_pv_size: 50Gi
ocs_mcg_pv_pool_pv_quantity: 3
ocs_mcg_pv_pool_pv_storageclass: gp2
ocs_namespace: openshift-storage
ocs_release: v4.2.2
ocs_operator_workload_destroy: "{{ False if (ACTION=='create' or ACTION=='provision') else True }}"
silent: false

# Command separated string list each git repo url to preload on the notebook pod when it spawns
workshop_preload_repos: "https://github.com/willb/fraud-notebooks"

och_project: open-data-hub
user_project: user
workload_destroy: false

och_expected_crds:
- opendatahubs.opendatahub.io

knative_expected_crds:
- knativeservings.operator.knative.dev

install_storage: false
install_open_data_hub: true
install_postgres: false
install_codeready: false
install_knative: true
install_pipelines: true

# The first user number to start with when creating projects
user_count_start: 1
# The last user number to start with when creating projects
user_count_end: "{{user_count_start|int + num_users|int}}"

# Amount of memory for the JupyterHub server
jupyterhub_memory: "1Gi"
# Amount of memory for the spawned Jupyter Notebook pods
jupyter_notebook_memory: "4Gi"

# Image to use for the Spark Cluster
spark_node_image: "quay.io/llasmith/openshift-spark:spark-2.3.2_hadoop-2.8.5"
# Number of Spark master nodes
spark_master_count: 1
# Number of Spark worker nodes
spark_worker_count: 2
# Amount of memory to allocate to each Spark node. This amount will be used for master AND worker nodes
spark_node_memory: "4Gi"
# Amount of cpu to allocate to each Spark node. This amount will be used for master AND worker nodes
spark_node_cpu: 1

# Path to append to env var PYTHONPATH for pyspark module
spark_pythonpath: "/opt/app-root/lib/python3.6/site-packages/pyspark/python/:/opt/app-root/lib/python3.6/site-packages/pyspark/python/lib/py4j-0.10.7-src.zip"
# PySpark submit args to be set as the env var SPARK_SUBMIT_ARGS
spark_submit_args: "--conf spark.cores.max=1 --conf spark.executor.instances=1 --conf spark.executor.memory=4G --conf spark.executor.cores=1 --conf spark.driver.memory=2G --packages com.amazonaws:aws-java-sdk:1.8.0,org.apache.hadoop:hadoop-aws:2.8.5 pyspark-shell"

jupyterhub_image_registry: 'quay.io'
jupyterhub_image_repository: 'odh-jupyterhub'

# Custom notebook image source that will be used for the workshop
workshop_jupyter_notebook_imagestream_image: "quay.io/willbenton/jh-ml-workflows-notebook:devconf-us-2019"
# Imagestream name for the custom workshop image
workshop_jupyter_notebook_imagestream_name: "ml-workflows-notebook"
# Imagestream tag for the custom workshop image
workshop_jupyter_notebook_imagestream_tag: "latest"
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
- name: "Create Subscription"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'codeready/subscription.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: "Create Cluster"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'codeready/cluster.yml.j2') }}"
vars:
namespace: "{{ user_project }}"
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
---
- name: "Create Subscription"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'serverless/subscription.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: "Wait for Knative CRD's to exist"
k8s_facts:
api_version: "apiextensions.k8s.io/v1beta1"
kind: CustomResourceDefinition
name: "{{ item }}"
loop: "{{ knative_expected_crds }}"
register: crds
until: crds.resources|length > 0
retries: 30
delay: 10


- name: "Create Namespace"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'general/namespace.yml.j2') }}"
vars:
namespace: knative-serving

- name: "Create Knative"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'serverless/knative.yml.j2') }}"
vars:
namespace: knative-serving
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
---
- name: "Create Open Data Hub subscription in project {{ user_project }}"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/subscription.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: "Create ODH Config Map configuration in {{ user_project }}"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/odh-configmap.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: "Create Jupyter Single User Profile Config Map for Student User {{ user_name }} in project {{ user_name }}"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/jupyterhub-single-user-profile-user.configmap.yml.j2') }}"
vars:
namespace: "{{ user_project }}"
suffix: "{{ user_name }}"

- name: "Create Jupyter Single User Profile Config Map for Admin in project {{ user_project }}"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/jupyterhub-single-user-profile-user.configmap.yml.j2') }}"
vars:
namespace: "{{ user_project }}"
suffix: opentlc-mgr

- name: "Wait for Open Data Hub CRD's to exist"
k8s_facts:
api_version: "apiextensions.k8s.io/v1beta1"
kind: CustomResourceDefinition
name: "{{ item }}"
loop: "{{ och_expected_crds }}"
register: crds
until: crds.resources|length > 0
retries: 30
delay: 10


- name: "Create the ImageStream for the notebook used in this workshop"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/workshop-notebook.imagestream.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: "Create Open Data Hub"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'open-data-hub/opendatahub.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

- name: Open Data Hub tasks complete
debug:
msg: "Open Data Hub Tasks completed successfully."
when: not silent|bool
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
---
- set_fact:
user_name: "user{{ user_num }}"

- set_fact:
user_project: "opendatahub-{{user_name}}"

- name: Setting up user project
import_tasks: ./user_project.yml
become: false
when: ACTION == "create" or ACTION == "provision"

- name: Installing Open Data Hub
import_tasks: ./open_data_hub.yml
become: false
when: (ACTION == "create" or ACTION == "provision") and install_open_data_hub

- name: Installing Postgresql
import_tasks: ./postgresql.yml
become: false
when: (ACTION == "create" or ACTION == "provision") and install_postgres

- name: Installing CodeReady Workspaces
import_tasks: ./codeready.yml
become: false
when: (ACTION == "create" or ACTION == "provision") and install_codeready

Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
- name: "Create Pipelines Subscription"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'pipelines/subscription.yml.j2') }}"
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
---
# Implement your Post Workload deployment tasks here

# Leave this as the last task in the playbook.
- name: post_workload tasks complete
debug:
msg: "Post-Workload Tasks completed successfully."
msg: "Post-Workload tasks completed successfully."
when: not silent|bool
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---

- name: "Create Postgresql Subscription"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'postgresql/subscription.yml.j2') }}"
vars:
namespace: "{{ user_project }}"

# TODO: set up Postgresql

- name: Postgres tasks complete
debug:
msg: "Open Data Hub Tasks completed successfully."
when: not silent|bool
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
---
- name: Set state
set_fact:
state: present
when: not workload_destroy|bool

- name: Set state
set_fact:
state: absent
when: workload_destroy|bool

- name: Verify python-openshift installed
yum:
name: python-openshift
state: present
become: yes
become_method: sudo

# Leave this as the last task in the playbook.
- name: pre_workload tasks complete
debug:
msg: "Pre-Workload tasks completed successfully."
when: not silent|bool
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
- "./workload.yml"
- "./post_workload.yml"
vars:
noobaa_workload_destroy: yes
ocs_operator_workload_destroy: yes
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
---
- name: "Create Knative Subscription"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'serverless/subscription.yml.j2') }}"

- name: "Create 'knative-serving' Project"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'general/namespace.yml.j2') }}"
vars:
namespace: knative-serving

- name: "Wait for Knative CRD's to exist"
k8s_facts:
api_version: "apiextensions.k8s.io/v1beta1"
kind: CustomResourceDefinition
name: "{{ item }}"
loop: "{{ och_expected_crds }}"
register: crds
until: crds.resources|length > 0
retries: 30
delay: 10

- name: "Create Knative instance"
k8s:
state: "{{ state }}"
definition: "{{ lookup('template', 'serverless/knative.yml.j2') }}"
vars:
namespace: knative-serving
Loading