diff --git a/deployments/aglais/bin/aglais-test.sh b/deployments/aglais/bin/aglais-test.sh
new file mode 100755
index 00000000..8b74bee5
--- /dev/null
+++ b/deployments/aglais/bin/aglais-test.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+
+echo "---- ----"
+echo "date [$(date)]"
+echo "host [$(hostname)]"
+echo "PATH [${PATH}]"
+echo "---- ----"
+
+
diff --git a/deployments/aglais/bin/create-user-tools.sh b/deployments/aglais/bin/create-user-tools.sh
new file mode 100755
index 00000000..5f259405
--- /dev/null
+++ b/deployments/aglais/bin/create-user-tools.sh
@@ -0,0 +1,63 @@
+#!/bin/sh
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# -----------------------------------------------------
+# Settings ...
+
+# set -eu
+# set -o pipefail
+#
+# binfile="$(basename ${0})"
+# binpath="$(dirname $(readlink -f ${0}))"
+# treetop="$(dirname $(dirname ${binpath}))"
+#
+# echo ""
+# echo "---- ---- ----"
+# echo "File [${binfile}]"
+# echo "Path [${binpath}]"
+# echo "Tree [${treetop}]"
+# echo "---- ---- ----"
+#
+
+
+ # get the next available uid
+ # https://www.commandlinefu.com/commands/view/5684/determine-next-available-uid
+ getnextuid()
+ {
+ getent passwd | awk -F: '($3>600) && ($3<60000) && ($3>maxuid) { maxuid=$3; } END { print maxuid+1; }'
+ }
+
+
+ # Generate a new password hash.
+ newpasshash()
+ {
+ local password="${1:?}"
+ java \
+ -jar "${HOME}/lib/shiro-tools-hasher.jar" \
+ -i 500000 \
+ -f shiro1 \
+ -a SHA-256 \
+ -gss 128 \
+ '${password:?}'
+ }
+
diff --git a/deployments/common/users/test-users.yml b/deployments/common/users/test-users.yml
new file mode 100644
index 00000000..d7aacdcb
--- /dev/null
+++ b/deployments/common/users/test-users.yml
@@ -0,0 +1,51 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+test-users:
+
+ - name: "Nelia"
+ uuid: "5cf0cf95-157e-4a40-b95e-b163f22c2d92"
+ uid: 2050
+ gid: 2050
+ home: "/home/Nelia"
+ data:
+ path: "/user/Nelia"
+ size: "20"
+
+ - name: "Ghoria"
+ uuid: "237983b5-a21f-47c8-8fb3-80cbbc70ba56"
+ uid: 2051
+ home: "/home/Ghoria"
+ data:
+ path: "/user/Ghoria"
+
+ - name: "Nalla"
+ uuid: "65c7aeb1-3c2a-43b7-acc0-8c4497997c70"
+ uid: 2052
+ home: "/home/Nalla"
+
+ - name: "Wenia"
+ uid: 2053
+
+ - name: "Ava"
+
+
diff --git a/deployments/common/zeppelin/sql/auth-test.sql b/deployments/common/zeppelin/sql/auth-test.sql
index 6ac4ae3c..502d0e79 100644
--- a/deployments/common/zeppelin/sql/auth-test.sql
+++ b/deployments/common/zeppelin/sql/auth-test.sql
@@ -1,10 +1,10 @@
USE zeppelin;
-CREATE TABLE users (username TEXT, password TEXT, password_salt TEXT);
-CREATE TABLE user_roles (username TEXT, role_name TEXT);
-CREATE TABLE user_permissions (username TEXT, permission TEXT);
-GRANT ALL PRIVILEGES ON zeppelin.users TO 'zeppelin'@'localhost';
-GRANT ALL PRIVILEGES ON zeppelin.user_roles TO 'zeppelin'@'localhost';
-GRANT ALL PRIVILEGES ON zeppelin.user_permissions TO 'zeppelin'@'localhost';
+--CREATE TABLE users (username TEXT, password TEXT, password_salt TEXT);
+--CREATE TABLE user_roles (username TEXT, role_name TEXT);
+--CREATE TABLE user_permissions (username TEXT, permission TEXT);
+--GRANT ALL PRIVILEGES ON zeppelin.users TO 'zeppelin'@'localhost';
+--GRANT ALL PRIVILEGES ON zeppelin.user_roles TO 'zeppelin'@'localhost';
+--GRANT ALL PRIVILEGES ON zeppelin.user_permissions TO 'zeppelin'@'localhost';
# Create test users
diff --git a/deployments/common/zeppelin/sql/auth.sql b/deployments/common/zeppelin/sql/auth.sql
index da9b948a..10898ff6 100644
--- a/deployments/common/zeppelin/sql/auth.sql
+++ b/deployments/common/zeppelin/sql/auth.sql
@@ -1,10 +1,10 @@
USE zeppelin;
-CREATE TABLE users (username TEXT, password TEXT, password_salt TEXT);
-CREATE TABLE user_roles (username TEXT, role_name TEXT);
-CREATE TABLE user_permissions (username TEXT, permission TEXT);
-GRANT ALL PRIVILEGES ON zeppelin.users TO 'zeppelin'@'localhost';
-GRANT ALL PRIVILEGES ON zeppelin.user_roles TO 'zeppelin'@'localhost';
-GRANT ALL PRIVILEGES ON zeppelin.user_permissions TO 'zeppelin'@'localhost';
+--CREATE TABLE users (username TEXT, password TEXT, password_salt TEXT);
+--CREATE TABLE user_roles (username TEXT, role_name TEXT);
+--CREATE TABLE user_permissions (username TEXT, permission TEXT);
+--GRANT ALL PRIVILEGES ON zeppelin.users TO 'zeppelin'@'localhost';
+--GRANT ALL PRIVILEGES ON zeppelin.user_roles TO 'zeppelin'@'localhost';
+--GRANT ALL PRIVILEGES ON zeppelin.user_permissions TO 'zeppelin'@'localhost';
INSERT INTO users (username, password) VALUES ('gaiauser', '$shiro1$SHA-256$500...........R0GxWVAH028tjMyIkbKmMDW2E0=');
diff --git a/deployments/hadoop-yarn/ansible/10-install-aglais.yml b/deployments/hadoop-yarn/ansible/10-install-aglais.yml
new file mode 100644
index 00000000..09604b0b
--- /dev/null
+++ b/deployments/hadoop-yarn/ansible/10-install-aglais.yml
@@ -0,0 +1,93 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+
+---
+- name: "Install Aglais tools"
+ hosts: zeppelin
+ gather_facts: false
+ vars_files:
+ - config/aglais.yml
+ - config/ansible.yml
+ - config/zeppelin.yml
+ - /tmp/ansible-vars.yml
+ tasks:
+
+ - name: "Create Aglais directory [{{aghome}}]"
+ become: true
+ ansible.builtin.file:
+ path: '{{aghome}}'
+ owner: 'root'
+ group: 'root'
+ state: directory
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Create Aglais bin directory [{{aghome}}/bin]"
+ become: true
+ ansible.builtin.file:
+ path: '{{aghome}}/bin'
+ owner: 'root'
+ group: 'root'
+ state: directory
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Create Aglais lib directory [{{aghome}}/lib]"
+ become: true
+ ansible.builtin.file:
+ path: '{{aghome}}/lib'
+ owner: 'root'
+ group: 'root'
+ state: directory
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Create Aglais tmp directory [{{aghome}}/tmp]"
+ become: true
+ ansible.builtin.file:
+ path: '{{aghome}}/tmp'
+ owner: "{{zepuser}}"
+ group: "{{zepuser}}"
+ state: directory
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Copy Aglais tools to [{{aghome}}/bin]"
+ become: true
+ ansible.builtin.copy:
+ src: '{{playbook_dir | dirname | dirname }}/aglais/bin/'
+ dest: '{{aghome}}/bin'
+ owner: 'root'
+ group: 'root'
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Create [/etc/profile.d/aglais.sh]"
+ become: true
+ ansible.builtin.blockinfile:
+ dest: '/etc/profile.d/aglais.sh'
+ state: present
+ owner: 'root'
+ group: 'root'
+ mode: 'u=rw,g=r,o=r'
+ create: true
+ insertafter: 'EOF'
+ marker: '# {mark} Ansible managed configuration'
+ block: |
+ export PATH=${PATH}:{{aghome}}/bin
+
diff --git a/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml b/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
index 04aa3da8..fe751aaa 100644
--- a/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+++ b/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
@@ -37,7 +37,7 @@
zeppelin.server.addr
- {{ hostvars['zeppelin'].ansible_default_ipv4.address }}
+ 0.0.0.0
Server binding address
@@ -218,12 +218,18 @@
tasks:
- - name: "Install Zeppelin"
+
+ - name: "Download and install Zeppelin"
unarchive:
- src: "https://downloads.apache.org/zeppelin/{{zepname}}/{{zepname}}-bin-all.tgz"
+ src: "https://downloads.apache.org/zeppelin/zeppelin-{{zepvers}}/{{zepname}}.tgz"
dest: "{{zepbase}}"
remote_src: true
+ - name: "Symlink Zeppelin"
+ ansible.builtin.file:
+ src: "{{zepbase}}/{{zepname}}"
+ dest: "{{zepbase}}/zeppelin"
+ state: link
- name: "Create [/etc/profile.d/zeppelin.sh]"
become: true
@@ -274,6 +280,20 @@
group: "{{zepuser}}"
mode: 0775
+ - name: "Create [/etc/profile.d/zeppelin.sh]"
+ become: true
+ ansible.builtin.blockinfile:
+ dest: '/etc/profile.d/zeppelin.sh'
+ state: present
+ owner: 'root'
+ group: 'root'
+ mode: 'u=rw,g=r,o=r'
+ create: true
+ insertafter: 'EOF'
+ marker: '# {mark} Ansible managed configuration'
+ block: |
+ export PATH=${PATH}:{{zephome}}/bin
+
- name: "Add group for Zeppelin Users"
become: true
command: groupadd {{ zepusersgroup }}
diff --git a/deployments/hadoop-yarn/ansible/38-install-user-db.yml b/deployments/hadoop-yarn/ansible/38-install-user-db.yml
index 57c38c05..2b17463c 100644
--- a/deployments/hadoop-yarn/ansible/38-install-user-db.yml
+++ b/deployments/hadoop-yarn/ansible/38-install-user-db.yml
@@ -20,45 +20,29 @@
#
#
-- name: "Get Zeppelin IP Address"
- hosts: localhost
- vars_files:
- - config/ansible.yml
- - /tmp/ansible-vars.yml
- - config/openstack.yml
-
- tasks:
-
- - name: "Discover our Zeppelin node and store IP address in temp file"
- os_server_info:
- cloud: "{{ cloudname }}"
- server: "{{ deployname }}-zeppelin"
- register:
- zeppelinnode
-
- - local_action: copy content={{ zeppelinnode.openstack_servers[0].accessIPv4 }} dest=/tmp/zeppelin_ip.txt
-
-
+---
- name: "Install MySQL Database for Zeppelin/Shiro"
hosts: zeppelin
become: true
- gather_facts: true
+
vars_files:
+ - config/shiro.yml
+ - config/aglais.yml
- config/ansible.yml
- - config/hadoop.yml
- - config/spark.yml
- - /tmp/ansible-vars.yml
- config/zeppelin.yml
- /tmp/ansible-vars.yml
+
vars:
- zepipaddress: "{{ lookup('file', '/tmp/zeppelin_ip.txt') | trim }}"
- zeppelinshiro: |
+
+ mariadbjavaversion: "3.0.4"
+ shirohasherversion: "1.9.0"
+
+ shirodbconfig: |
[main]
- ds = com.mysql.cj.jdbc.MysqlDataSource
- ds.serverName = localhost
- ds.databaseName = zeppelin
- ds.user = zeppelin
- ds.password = {{ mysql_zeppelin_password }}
+ ds = org.mariadb.jdbc.MariaDbDataSource
+ ds.url = jdbc:mariadb://localhost:3306/{{shirodbname}}
+ ds.user = {{shirodbuser}}
+ ds.password = {{shirodbpass}}
jdbcRealm = org.apache.shiro.realm.jdbc.JdbcRealm
ps = org.apache.shiro.authc.credential.DefaultPasswordService
@@ -93,97 +77,89 @@
tasks:
- set_fact:
- mysql_root_password: "{{ lookup('password','/dev/null chars=ascii_letters,digits,hexdigits length=20') }}!"
- mysql_zeppelin_password: "{{ lookup('password','/dev/null chars=ascii_letters,digits,hexdigits length=20') }}!"
+ shirodbpass: "{{ lookup('password','/dev/null chars=ascii_letters,digits,hexdigits length=20') }}!"
- - name: "Enable MySQL Community release repo"
- yum:
- disable_gpg_check: True
- name: https://repo.mysql.com//mysql80-community-release-fc31-1.noarch.rpm
- state: present
+ - name: "Install MariaDB connector"
+ get_url:
+ url: https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/{{mariadbjavaversion}}/mariadb-java-client-{{mariadbjavaversion}}.jar
+ dest: "{{zephome}}/lib/"
- - name: "Get MySQL Connector jar"
+ - name: "Download Shiro password hasher"
get_url:
- url: https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.28/mysql-connector-java-8.0.28.jar
- dest: "{{zephome}}/lib/mysql-connector-java-8.0.28.jar"
+ url: "https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/{{shirohasherversion}}/shiro-tools-hasher-{{shirohasherversion}}-cli.jar"
+ dest: "{{aghome}}/lib/"
- - name: "Install MySQL Server"
- yum: name=mysql-server state=installed
+ - name: "Symlink Shiro password hasher"
+ ansible.builtin.file:
+ src: "{{aghome}}/lib//shiro-tools-hasher-{{shirohasherversion}}-cli.jar"
+ dest: "{{aghome}}/lib//shiro-tools-hasher-cli.jar"
+ state: link
- - name: "Install MySQL-devel"
- yum: name=mysql-devel state=installed
+ - name: "Install MariaDB server"
+ dnf:
+ name: "mariadb-server"
+ state: "installed"
- - name: "Make sure pymysql is present"
+ - name: "Install PyMySQL"
pip:
- name: pymysql
- state: present
-
- - name: "reload systemd"
- command: systemctl daemon-reload
-
- - name: "Start the MySQL service"
- service:
- name: mysqld
- state: started
- enabled: yes
+ name: "pymysql"
+ state: "present"
+
+ - name: "Start the MariaDB service"
+ service:
+ name: "mariadb"
+ state: "started"
+ enabled: true
daemon_reload: yes
- - name: "Find temporary password"
- shell: "echo `grep 'temporary.*root@localhost' /var/log/mysqld.log | sed
-'s/.*root@localhost: //'`"
- register: mysql_root_password_temp
- tags: register
-
- - name: "Update expired root user password"
- shell: 'mysql -e "ALTER USER ''root''@''localhost'' IDENTIFIED WITH mysql_native_password BY ''{{ mysql_root_password }}''" --connect-expired-password -uroot -p"{{ mysql_root_password_temp.stdout }}"'
+# Root password is not needed if connecting via Unix socket
+# https://mariadb.com/kb/en/authentication-from-mariadb-104/
- - name: "Create MySQL config"
+ - name: "Create MariaDB config for fedora"
become: true
blockinfile:
- dest: '/root/.my.cnf'
+ dest: '/home/fedora/.my.cnf'
state: present
- owner: 'root'
- group: 'root'
+ owner: 'fedora'
+ group: 'fedora'
mode: 'u=rw,g=r,o=r'
create: true
insertafter: 'EOF'
block: |
[client]
- user=root
- password={{ mysql_root_password }}
- [client2]
- user=root
- password={{ mysql_zeppelin_password }}
- [mysqld]
- bind-address = 0.0.0.0
-
- - name: "Create Zeppelin database"
- mysql_db: name=zeppelin state=present login_user=root login_password={{ mysql_root_password }}
-
- - name: "Create MySQL Zeppelin user with appropriate privileges"
- mysql_user:
- name: zeppelin
- password: '{{ mysql_zeppelin_password }}'
- priv: "zeppelin.*:ALL"
- state: present
+ user={{shirodbuser}}
+ password={{shirodbpass}}
+ database={{shirodbname}}
- - name: "Create a Zeppelin Shiro Configuration"
+ - name: "Create MariaDB database [{{shirodbname}}]"
+ become: true
+ mysql_db:
+ name: "{{shirodbname}}"
+ state: 'present'
+
+ - name: "Create MariaDB user [{{shirodbuser}}]"
+ become: true
+ mysql_user:
+ name: "{{shirodbuser}}"
+ password: "{{shirodbpass}}"
+ priv: "{{shirodbname}}.*:ALL"
+ state: "present"
+
+ - name: "Create Shiro Configuration"
copy:
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rw,g=r,o=r'
dest: "{{zephome}}/conf/shiro.ini"
- content: "{{ zeppelinshiro }}"
+ content: "{{ shirodbconfig }}"
tags:
- always
- - name: "Copy Create SQL file to Zeppelin"
- copy: src="{{ playbook_dir | dirname | dirname }}/common/zeppelin/sql/create.sql" dest=/tmp
-
- - name: "Create User SQL database"
- mysql_db: name=zeppelin state=import target=/tmp/create.sql
+ - name: "Create Shiro tables"
+ mysql_query:
+ login_db: "{{shirodbname}}"
+ query:
+ - CREATE TABLE users (username TEXT, password TEXT, password_salt TEXT)
+ - CREATE TABLE user_roles (username TEXT, role_name TEXT)
+ - CREATE TABLE user_permissions (username TEXT, permission TEXT)
- - name: "Download shiro hasher jar"
- get_url:
- url: https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
- dest: "{{zephome}}/lib/shiro-tools-hasher-1.9.0-cli.jar"
diff --git a/deployments/hadoop-yarn/ansible/39-create-user-scripts.yml b/deployments/hadoop-yarn/ansible/39-create-user-scripts.yml
index 3d2f3f23..16913bb3 100644
--- a/deployments/hadoop-yarn/ansible/39-create-user-scripts.yml
+++ b/deployments/hadoop-yarn/ansible/39-create-user-scripts.yml
@@ -20,46 +20,24 @@
#
#
-- name: "Get Zeppelin IP Address"
- hosts: localhost
- vars_files:
- - config/ansible.yml
- - /tmp/ansible-vars.yml
- - config/openstack.yml
-
- tasks:
-
- - name: "Discover our Zeppelin node and store IP address in temp file"
- os_server_info:
- cloud: "{{ cloudname }}"
- server: "{{ deployname }}-zeppelin"
- register:
- zeppelinnode
-
- - local_action: copy content={{ zeppelinnode.openstack_servers[0].accessIPv4 }} dest=/tmp/zeppelin_ip.txt
-
-
- name: "Install MySQL Database for Zeppelin/Shiro"
hosts: zeppelin
become: true
gather_facts: true
vars_files:
+ - config/shiro.yml
+ - config/aglais.yml
- config/ansible.yml
- config/hadoop.yml
- - config/spark.yml
- - /tmp/ansible-vars.yml
- config/zeppelin.yml
- /tmp/ansible-vars.yml
vars:
- zepipaddress: "{{ lookup('file', '/tmp/zeppelin_ip.txt') | trim }}"
export_users: |
#!/bin/bash
printf "Exporting users to file: auth.sql"
printf "\n"
- DB_USER='zeppelin'
- DB_NAME='zeppelin'
- mysqldump $DB_NAME -u root --no-create-info > {{zephome}}/auth.sql
+ mysqldump --no-create-info --databases "{{shirodbname}}" > {{aghome}}/tmp/auth.sql
create_hdfs_user: |
#!/bin/bash
@@ -76,19 +54,16 @@
NEW_PASSWORD=$2
NEW_USER_ROLE=$3
- DB_USER='zeppelin'
- DB_NAME='zeppelin'
USER_TABLE='users';
USER_ROLES_TABLE='user_roles'
-
- NEW_PASSWORD_ENCRYPTED="$(java -jar {{zephome}}/lib/shiro-tools-hasher-1.9.0-cli.jar -i 500000 -f shiro1 -a SHA-256 -gss 128 $NEW_PASSWORD)"
-
- mysql --user=$DB_USER $DB_NAME << EOF
+ NEW_PASSWORD_ENCRYPTED="$(java -jar {{aghome}}/lib/shiro-tools-hasher-cli.jar -i 500000 -f shiro1 -a SHA-256 -gss 128 $NEW_PASSWORD)"
+
+ mysql {{shirodbname}} << EOF
INSERT INTO $USER_TABLE (username, password) VALUES ("$NEW_USERNAME", "$NEW_PASSWORD_ENCRYPTED");
INSERT INTO $USER_ROLES_TABLE (username, role_name) VALUES ("$NEW_USERNAME", "$NEW_USER_ROLE");
EOF
-
+
create_unix_user: |
#!/bin/bash
# Add user
@@ -101,12 +76,12 @@
sudo su -c "cat /home/{{ zepuser }}/.ssh/id_rsa.pub >> $NEW_UNIX_DIR/.ssh/authorized_keys"
sudo usermod -a -G {{ zepusersgroup }} $NEW_USERNAME
sudo chown -R $NEW_USERNAME:$NEW_USERNAME $NEW_UNIX_DIR/.ssh
-
+
create_notebook_clone: |
#!/bin/bash
NEW_USERNAME=$1
NEW_PASSWORD=$2
- ZEPPELIN_URL=$3
+ ZEPPELIN_URL='http://localhost:8080'
# Setup Clone of notebooks
NEW_ZEPPELIN_DIR="{{zephome}}/notebook/Users/$NEW_USERNAME"
@@ -116,43 +91,48 @@
sudo chown -R {{ zepuser }}:{{ zepuser }} $NEW_ZEPPELIN_DIR
zepcookies=/tmp/${NEW_USERNAME}.cookies
-
+
curl \
--silent \
--request 'POST' \
--cookie-jar "${zepcookies:?}" \
--data "userName=${NEW_USERNAME:?}" \
--data "password=${NEW_PASSWORD:?}" \
- ${ZEPPELIN_URL:?}/api/login
+ ${ZEPPELIN_URL:?}/api/login
curl --silent --cookie "${zepcookies:?}" "${ZEPPELIN_URL:?}/api/notebook"| jq -r '.body[] | select(.path | startswith("/Public")) | [.id, .path] | @tsv' |
while IFS=$'\t' read -r id path; do
curl -L -H 'Content-Type: application/json' -d "{'name': '${path/Public Examples/Users/$NEW_USERNAME}' }" --request POST --cookie "${zepcookies:?}" $ZEPPELIN_URL/api/notebook/$id
done
-
add_user: |
#!/bin/bash
printf "Create a new Zeppelin user"
printf "\n"
printf "Username: "
read NEW_USERNAME
-
+
stty -echo
printf "Password: "
read NEW_PASSWORD
stty echo
-
+
printf "\n"
printf "User role: "
read NEW_USER_ROLE
- ZEPPELIN_URL="http://{{ zepipaddress }}:8080"
+ source {{aghome}}/bin/create_user.sh $NEW_USERNAME $NEW_PASSWORD $NEW_USER_ROLE
+
+ create_user: |
+ #!/bin/bash
+ NEW_USERNAME=$1
+ NEW_PASSWORD=$2
+ NEW_USER_ROLE=$3
- source {{zephome}}/bin/create_mysql_user.sh $NEW_USERNAME $NEW_PASSWORD $NEW_USER_ROLE
- source {{zephome}}/bin/create_unix_user.sh $NEW_USERNAME
- source {{zephome}}/bin/create_hdfs_user.sh $NEW_USERNAME
- source {{zephome}}/bin/create_notebook_clone.sh $NEW_USERNAME $NEW_PASSWORD $ZEPPELIN_URL
+ source {{aghome}}/bin/create_mysql_user.sh $NEW_USERNAME $NEW_PASSWORD $NEW_USER_ROLE
+ source {{aghome}}/bin/create_unix_user.sh $NEW_USERNAME
+ source {{aghome}}/bin/create_hdfs_user.sh $NEW_USERNAME
+ source {{aghome}}/bin/create_notebook_clone.sh $NEW_USERNAME $NEW_PASSWORD
tasks:
@@ -161,7 +141,7 @@
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/export_users.sh"
+ dest: "{{aghome}}/bin/export_users.sh"
content: "{{ export_users }}"
tags:
- always
@@ -171,17 +151,27 @@
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/add_user.sh"
+ dest: "{{aghome}}/bin/add_user.sh"
content: "{{ add_user }}"
tags:
- always
+ - name: "Script to create a new user"
+ copy:
+ owner: "{{zepuser}}"
+ group: "{{zepuser}}"
+ mode: 'u=rwx,g=rwx,o=rwx'
+ dest: "{{aghome}}/bin/create_user.sh"
+ content: "{{ create_user }}"
+ tags:
+ - always
+
- name: "Script to add a unix user"
copy:
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/create_unix_user.sh"
+ dest: "{{aghome}}/bin/create_unix_user.sh"
content: "{{ create_unix_user }}"
tags:
- always
@@ -191,7 +181,7 @@
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/create_hdfs_user.sh"
+ dest: "{{aghome}}/bin/create_hdfs_user.sh"
content: "{{ create_hdfs_user }}"
tags:
- always
@@ -201,7 +191,7 @@
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/create_mysql_user.sh"
+ dest: "{{aghome}}/bin/create_mysql_user.sh"
content: "{{ create_mysql_user }}"
tags:
- always
@@ -211,7 +201,7 @@
owner: "{{zepuser}}"
group: "{{zepuser}}"
mode: 'u=rwx,g=rwx,o=rwx'
- dest: "{{zephome}}/bin/create_notebook_clone.sh"
+ dest: "{{aghome}}/bin/create_notebook_clone.sh"
content: "{{ create_notebook_clone }}"
tags:
- always
diff --git a/deployments/hadoop-yarn/ansible/41-config-zeppelin-ssh.yml b/deployments/hadoop-yarn/ansible/41-config-zeppelin-ssh.yml
new file mode 100644
index 00000000..f6a07869
--- /dev/null
+++ b/deployments/hadoop-yarn/ansible/41-config-zeppelin-ssh.yml
@@ -0,0 +1,82 @@
+#
+#
+#
+# Copyright (c) 2021, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+- name: "Configure Zeppelin ssh "
+ hosts: zeppelin
+ gather_facts: false
+ vars_files:
+ - config/aglais.yml
+ - config/ansible.yml
+ - config/zeppelin.yml
+ - /tmp/ansible-vars.yml
+
+ tasks:
+
+ - name: "Create the Zeppelin user's SSH directory"
+ become: true
+ ansible.builtin.file:
+ path: "{{zepuserhome}}/.ssh"
+ owner: "{{zepuser}}"
+ group: "{{zepuser}}"
+ state: directory
+ mode: 'u=rwx,g=,o='
+
+ # https://docs.ansible.com/ansible/latest/collections/community/crypto/openssh_keypair_module.html
+ - name: "Generate the Zeppelin user's SSH keypair"
+ become: true
+ community.crypto.openssh_keypair:
+ path: "{{zepuserhome}}/.ssh/id_ssh_rsa"
+ owner: "{{zepuser}}"
+
+ - name: "Create our public keys directory [{{aghome}}/ssh]"
+ become: true
+ ansible.builtin.file:
+ path: "{{aghome}}/ssh"
+ owner: 'root'
+ group: 'root'
+ state: directory
+ mode: 'u=rwx,g=rwx,o=rx'
+
+ - name: "Publish the Zeppelin user's public key"
+ become: true
+ ansible.builtin.copy:
+ remote_src: true
+ src: "{{zepuserhome}}/.ssh/id_ssh_rsa.pub"
+ dest: "{{aghome}}/ssh/ssh-{{zepuser}}.pub"
+ owner: 'root'
+ group: 'root'
+ mode: 'a=r'
+
+ - name: "Add the Zeppelin user's public key to authorized_keys"
+ become: true
+ ansible.builtin.shell: |
+ cat "{{zepuserhome}}/.ssh/id_ssh_rsa.pub" \
+ >> "{{zepuserhome}}/.ssh/authorized_keys"
+
+ - name: "Add the localhost public keys to known_hosts"
+ become: true
+ ansible.builtin.shell: |
+ ssh-keyscan 'localhost' >> "{{zepuserhome}}/.ssh//known_hosts"
+
+
+
+
diff --git a/deployments/hadoop-yarn/ansible/config/aglais.yml b/deployments/hadoop-yarn/ansible/config/aglais.yml
new file mode 100644
index 00000000..3b36be21
--- /dev/null
+++ b/deployments/hadoop-yarn/ansible/config/aglais.yml
@@ -0,0 +1,28 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# Aglais vars
+agbase: "/opt"
+aghome: "/opt/aglais"
+
+
+
diff --git a/deployments/hadoop-yarn/ansible/config/shiro.yml b/deployments/hadoop-yarn/ansible/config/shiro.yml
new file mode 100644
index 00000000..9cf2b323
--- /dev/null
+++ b/deployments/hadoop-yarn/ansible/config/shiro.yml
@@ -0,0 +1,28 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# Shiro database settings
+shirodbname: "shirodata"
+shirodbuser: "shirouser"
+
+
+
diff --git a/deployments/hadoop-yarn/ansible/config/zeppelin.yml b/deployments/hadoop-yarn/ansible/config/zeppelin.yml
index aacd8772..da8d1dad 100644
--- a/deployments/hadoop-yarn/ansible/config/zeppelin.yml
+++ b/deployments/hadoop-yarn/ansible/config/zeppelin.yml
@@ -21,10 +21,14 @@
#
# Zeppelin vars
-zepname: "zeppelin-0.10.0"
+zepvers: "0.10.0"
+zepname: "zeppelin-0.10.0-bin-all"
zepbase: "/home/fedora"
-zephome: "/home/fedora/zeppelin-0.10.0-bin-all"
+zephome: "/home/fedora/zeppelin"
zephost: "zeppelin"
zepuser: "fedora"
zepmavendest: "/var/local/zeppelin/maven"
zepusersgroup: "zeppelinusers"
+# Temp fix until we move Zeppelin
+zepuserhome: "/home/fedora"
+
diff --git a/deployments/hadoop-yarn/ansible/create-all.yml b/deployments/hadoop-yarn/ansible/create-all.yml
index 1a8dccd8..2845affd 100644
--- a/deployments/hadoop-yarn/ansible/create-all.yml
+++ b/deployments/hadoop-yarn/ansible/create-all.yml
@@ -39,6 +39,7 @@
- import_playbook: 09-cinder-volumes.yml
- import_playbook: 10-install-java.yml
+- import_playbook: 10-install-aglais.yml
- import_playbook: 11-install-hadoop.yml
- import_playbook: 12-config-hadoop-core.yml
@@ -63,6 +64,7 @@
- import_playbook: 27-install-zeppelin.yml
- import_playbook: 28-install-zeppelin-requirements.yml
- import_playbook: 35-config-zeppelin-interpreters.yml
+- import_playbook: 41-config-zeppelin-ssh.yml
- import_playbook: 29-install-pip-libs.yml
- import_playbook: 30-zeppelin-security.yml
diff --git a/deployments/hadoop-yarn/bin/cephfs-mount.sh b/deployments/hadoop-yarn/bin/cephfs-mount.sh
index 35eab58d..0125ce62 100755
--- a/deployments/hadoop-yarn/bin/cephfs-mount.sh
+++ b/deployments/hadoop-yarn/bin/cephfs-mount.sh
@@ -131,14 +131,31 @@
# Get details of the access rule.
# TODO Move this to an openstack script.
- accessrule=$(
- openstack \
- --os-cloud "${sharecloud:?}" \
- share access list \
- --format json \
- "${shareid:?}" \
- | jq -r '.[] | select(.access_level == "'${mountmode:?}'") | .id'
- )
+ #
+ # Yes, some numpty thought it was a good idea to change the JSON field names.
+ # Changing 'id' to 'ID', and 'access_level' to 'Access Level'.
+ # Possibly because they thought it would be pretty ?
+ # Waste of an afternoon chasing that down.
+ if [ "$(openstack --version)" == "openstack 5.8.0" ]
+ then
+ accessrule=$(
+ openstack \
+ --os-cloud "${sharecloud:?}" \
+ share access list \
+ --format json \
+ "${shareid:?}" \
+ | jq -r '.[] | select(."Access Level" == "'${mountmode:?}'") | .ID'
+ )
+ else
+ accessrule=$(
+ openstack \
+ --os-cloud "${sharecloud:?}" \
+ share access list \
+ --format json \
+ "${shareid:?}" \
+ | jq -r '.[] | select(.access_level == "'${mountmode:?}'") | .id'
+ )
+ fi
openstack \
--os-cloud "${sharecloud:?}" \
diff --git a/deployments/hadoop-yarn/bin/create-all.sh b/deployments/hadoop-yarn/bin/create-all.sh
index fbc5706a..e1da0df5 100755
--- a/deployments/hadoop-yarn/bin/create-all.sh
+++ b/deployments/hadoop-yarn/bin/create-all.sh
@@ -173,170 +173,35 @@
# -----------------------------------------------------
-# Mount the data shares.
-# Using a hard coded cloud name to make it portable.
-
- sharelist="${treetop:?}/common/manila/datashares.yaml"
- mountmode='ro'
- mounthost='zeppelin:masters:workers'
-
- for shareid in $(
- yq eval '.datashares.[].id' "${sharelist:?}"
- )
- do
- echo ""
- echo "Share [${shareid:?}]"
-
- sharecloud=$(
- yq eval ".datashares.[] | select(.id == \"${shareid:?}\").cloudname" "${sharelist:?}"
- )
- sharename=$(
- yq eval ".datashares.[] | select(.id == \"${shareid:?}\").sharename" "${sharelist:?}"
- )
- mountpath=$(
- yq eval ".datashares.[] | select(.id == \"${shareid:?}\").mountpath" "${sharelist:?}"
- )
-
- "${treetop:?}/hadoop-yarn/bin/cephfs-mount.sh" \
- "${inventory:?}" \
- "${sharecloud:?}" \
- "${sharename:?}" \
- "${mountpath:?}" \
- "${mounthost:?}" \
- "${mountmode:?}"
-
- done
+# Create our data shares.
-# -----------------------------------------------------
-# Add the data symlinks.
-# Needs to be done after the data shares have been mounted.
-
- pushd "/deployments/hadoop-yarn/ansible"
+ "${treetop:?}/hadoop-yarn/bin/create-data-shares.sh" \
+ "${cloudname:?}" \
+ "${deployconf:?}"
- ansible-playbook \
- --verbose \
- --verbose \
- --inventory "${inventory:?}" \
- "61-data-links.yml"
- popd
+# -----------------------------------------------------
+# Create our user shares.
+ "${treetop:?}/hadoop-yarn/bin/create-user-shares.sh" \
+ "${cloudname:?}" \
+ "${deployconf:?}"
# -----------------------------------------------------
-# Check the data shares.
-# Using a hard coded cloud name to make it portable.
-
- sharelist="${treetop:?}/common/manila/datashares.yaml"
- testhost=zeppelin
-
- for shareid in $(
- yq eval '.datashares.[].id' "${sharelist}"
- )
- do
-
- checkbase=$(
- yq eval ".datashares.[] | select(.id == \"${shareid}\").mountpath" "${sharelist}"
- )
- checknum=$(
- yq eval ".datashares.[] | select(.id == \"${shareid}\").checksums | length" "${sharelist}"
- )
-
- for (( i=0; i
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+ set -eu
+ set -o pipefail
+
+ binfile="$(basename ${0})"
+ binpath="$(dirname $(readlink -f ${0}))"
+ treetop="$(dirname $(dirname ${binpath}))"
+
+ echo ""
+ echo "---- ---- ----"
+ echo "File [${binfile}]"
+ echo "Path [${binpath}]"
+ echo "Tree [${treetop}]"
+
+ cloudbase='arcus'
+ cloudname="${1:?}"
+ deployconf="${2:?}"
+
+ inventory="${treetop:?}/hadoop-yarn/ansible/config/${deployconf:?}.yml"
+
+ echo "---- ---- ----"
+ echo "Deploy conf [${deployconf}]"
+ echo "---- ---- ----"
+
+# -----------------------------------------------------
+# Create the Shiro user database.
+
+ echo ""
+ echo "---- ----"
+ echo "Creating Shiro user database"
+
+ pushd "${treetop:?}/hadoop-yarn/ansible"
+
+ ansible-playbook \
+ --inventory "${inventory:?}" \
+ "38-install-user-db.yml"
+
+ popd
+
+# -----------------------------------------------------
+# Install the create-user scripts.
+
+ echo ""
+ echo "---- ----"
+ echo "Installing create-user scripts"
+
+ pushd "${treetop:?}/hadoop-yarn/ansible"
+
+ ansible-playbook \
+ --inventory "${inventory:?}" \
+ "39-create-user-scripts.yml"
+
+ popd
+
diff --git a/deployments/hadoop-yarn/bin/create-data-shares.sh b/deployments/hadoop-yarn/bin/create-data-shares.sh
new file mode 100755
index 00000000..ca2af188
--- /dev/null
+++ b/deployments/hadoop-yarn/bin/create-data-shares.sh
@@ -0,0 +1,166 @@
+ #!/bin/sh
+#
+#
+#
+# Copyright (c) 2021, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# -----------------------------------------------------
+# Settings ...
+
+ set -eu
+ set -o pipefail
+
+ binfile="$(basename ${0})"
+ binpath="$(dirname $(readlink -f ${0}))"
+ treetop="$(dirname $(dirname ${binpath}))"
+
+ echo ""
+ echo "---- ---- ----"
+ echo "File [${binfile}]"
+ echo "Path [${binpath}]"
+ echo "Tree [${treetop}]"
+
+ cloudbase='arcus'
+ cloudname=${1:?}
+
+ deployconf="${2:?}"
+ inventory="${treetop:?}/hadoop-yarn/ansible/config/${deployconf:?}.yml"
+
+ echo "---- ---- ----"
+ echo "Cloud base [${cloudbase}]"
+ echo "Cloud name [${cloudname}]"
+ echo "---- ---- ----"
+ echo "Deploy conf [${deployconf}]"
+ echo "---- ---- ----"
+
+
+# -----------------------------------------------------
+# Mount the data shares.
+# Using a hard coded cloud name to make it portable.
+
+ sharelist="${treetop:?}/common/manila/datashares.yaml"
+ mountmode='ro'
+ mounthost='zeppelin:masters:workers'
+
+ for shareid in $(
+ yq eval '.datashares.[].id' "${sharelist:?}"
+ )
+ do
+ echo ""
+ echo "Share [${shareid:?}]"
+
+ sharecloud=$(
+ yq eval ".datashares.[] | select(.id == \"${shareid:?}\").cloudname" "${sharelist:?}"
+ )
+ sharename=$(
+ yq eval ".datashares.[] | select(.id == \"${shareid:?}\").sharename" "${sharelist:?}"
+ )
+ mountpath=$(
+ yq eval ".datashares.[] | select(.id == \"${shareid:?}\").mountpath" "${sharelist:?}"
+ )
+
+ "${treetop:?}/hadoop-yarn/bin/cephfs-mount.sh" \
+ "${inventory:?}" \
+ "${sharecloud:?}" \
+ "${sharename:?}" \
+ "${mountpath:?}" \
+ "${mounthost:?}" \
+ "${mountmode:?}"
+
+ done
+
+# -----------------------------------------------------
+# Add the data symlinks.
+# Needs to be done after the data shares have been mounted.
+
+ pushd "/deployments/hadoop-yarn/ansible"
+
+ ansible-playbook \
+ --inventory "${inventory:?}" \
+ "61-data-links.yml"
+
+ popd
+
+
+# -----------------------------------------------------
+# Check the data shares.
+# Using a hard coded cloud name to make it portable.
+
+ sharelist="${treetop:?}/common/manila/datashares.yaml"
+ testhost=zeppelin
+
+ for shareid in $(
+ yq eval '.datashares.[].id' "${sharelist}"
+ )
+ do
+
+ checkbase=$(
+ yq eval ".datashares.[] | select(.id == \"${shareid}\").mountpath" "${sharelist}"
+ )
+ checknum=$(
+ yq eval ".datashares.[] | select(.id == \"${shareid}\").checksums | length" "${sharelist}"
+ )
+
+ for (( i=0; i
+#
+# Copyright (c) 2021, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# -----------------------------------------------------
+# Settings ...
+
+ set -eu
+ set -o pipefail
+
+ binfile="$(basename ${0})"
+ binpath="$(dirname $(readlink -f ${0}))"
+ treetop="$(dirname $(dirname ${binpath}))"
+
+ echo ""
+ echo "---- ---- ----"
+ echo "File [${binfile}]"
+ echo "Path [${binpath}]"
+ echo "Tree [${treetop}]"
+
+ cloudbase='arcus'
+ cloudname=${1:?}
+
+ deployconf="${2:?}"
+ inventory="${treetop:?}/hadoop-yarn/ansible/config/${deployconf:?}.yml"
+
+
+ echo "---- ---- ----"
+ echo "Cloud base [${cloudbase}]"
+ echo "Cloud name [${cloudname}]"
+ echo "---- ---- ----"
+ echo "Deploy conf [${deployconf}]"
+ echo "---- ---- ----"
+
+
+# -----------------------------------------------------
+# Mount the user shares.
+# Using a hard coded cloud name to make it portable.
+
+ sharelist="${treetop:?}/common/manila/usershares.yaml"
+ mountmode='rw'
+ mounthost='zeppelin:masters:workers'
+
+ for shareid in $(
+ yq eval ".usershares.[].id" "${sharelist:?}"
+ )
+ do
+ echo ""
+ echo "Share [${shareid:?}]"
+
+ sharecloud=$(
+ yq eval ".usershares.[] | select(.id == \"${shareid:?}\").cloudname" "${sharelist:?}"
+ )
+ sharename=$(
+ yq eval ".usershares.[] | select(.id == \"${shareid:?}\").sharename" "${sharelist:?}"
+ )
+ mountpath=$(
+ yq eval ".usershares.[] | select(.id == \"${shareid:?}\").mountpath" "${sharelist:?}"
+ )
+
+ "${treetop:?}/hadoop-yarn/bin/cephfs-mount.sh" \
+ "${inventory:?}" \
+ "${sharecloud:?}" \
+ "${sharename:?}" \
+ "${mountpath:?}" \
+ "${mounthost:?}" \
+ "${mountmode:?}"
+
+ done
+
+
+
diff --git a/deployments/hadoop-yarn/bin/restart-zeppelin.sh b/deployments/hadoop-yarn/bin/restart-zeppelin.sh
index bd32dec8..b36599ad 100755
--- a/deployments/hadoop-yarn/bin/restart-zeppelin.sh
+++ b/deployments/hadoop-yarn/bin/restart-zeppelin.sh
@@ -20,6 +20,6 @@
ssh zeppelin \
'
- /home/fedora/zeppelin-0.10.0-bin-all/bin/zeppelin-daemon.sh restart
+ zeppelin-daemon.sh restart
'
diff --git a/deployments/hadoop-yarn/bin/start-zeppelin.sh b/deployments/hadoop-yarn/bin/start-zeppelin.sh
index 8eae502d..23c429ae 100755
--- a/deployments/hadoop-yarn/bin/start-zeppelin.sh
+++ b/deployments/hadoop-yarn/bin/start-zeppelin.sh
@@ -20,6 +20,6 @@
ssh zeppelin \
'
- /home/fedora/zeppelin-0.10.0-bin-all/bin/zeppelin-daemon.sh start
+ zeppelin-daemon.sh start
'
diff --git a/deployments/zeppelin/bin/create-user-tools.sh b/deployments/zeppelin/bin/create-user-tools.sh
new file mode 100755
index 00000000..cc71b6dc
--- /dev/null
+++ b/deployments/zeppelin/bin/create-user-tools.sh
@@ -0,0 +1,252 @@
+#!/bin/sh
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#
+
+# -----------------------------------------------------
+# Settings ...
+
+# set -eu
+# set -o pipefail
+#
+# binfile="$(basename ${0})"
+# binpath="$(dirname $(readlink -f ${0}))"
+# treetop="$(dirname $(dirname ${binpath}))"
+#
+# echo ""
+# echo "---- ---- ----"
+# echo "File [${binfile}]"
+# echo "Path [${binpath}]"
+# echo "Tree [${treetop}]"
+# echo "---- ---- ----"
+#
+
+ hadoopuid=5000
+ hadoopgid=5000
+
+ defaultsharesize=10
+
+ datahostname='data.aglais.uk'
+ datahostuser='fedora'
+
+ # Get the next available uid
+ # https://www.commandlinefu.com/commands/view/5684/determine-next-available-uid
+ # TODO Move this to the Zeppelin node.
+ getnextuid()
+ {
+ getent passwd | awk -F: '($3>600) && ($3<60000) && ($3>maxuid) { maxuid=$3; } END { print maxuid+1; }'
+ }
+
+ # Get the password hash for a user name.
+ # Calls 'getpasshash' on data project VM.
+ getpasshash()
+ {
+ local key="${1:?}"
+ ssh -n "${datahostuser:?}@${datahostname:?}" \
+ "
+ getpasshash '${key:?}'
+ "
+ }
+
+ # Generate a new password hash.
+ newpasshash()
+ {
+ local password="${1:?}"
+ java \
+ -jar "${HOME}/lib/shiro-tools-hasher.jar" \
+ -i 500000 \
+ -f shiro1 \
+ -a SHA-256 \
+ -gss 128 \
+ '${password:?}'
+ }
+
+ createshirouser()
+ {
+ local user="${1:?}"
+ local hash="$(getpasshash \"${user}\")";
+ local pass=''
+
+ if [ -z "${hash}" ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi
+
+ #
+ # Call to Zeppelin node to create the user account in the Shiro database.
+ #
+
+cat << EOF
+{
+"pass": "${pass}",
+"hash": "${hash}"
+}
+EOF
+ }
+
+ createlinuxuser()
+ {
+ local user="${1:?}"
+ local uid="${2}"
+ local gid="${3}"
+ local home="${4:-/home/${user}}"
+
+ #
+ # Call to Zeppelin node to create the Linux user account.
+ # The test for zero and the call to 'getnextuid' would be done on the Zeppelin node.
+ #
+
+ if [ -z ${uid} ]
+ then
+ uid=$(getnextuid)
+ fi
+ if [ -z ${gid} ]
+ then
+ gid=${uid}
+ fi
+
+
+cat << EOF
+{
+"name": "${user}",
+"uid": ${uid},
+"gid": ${gid},
+"home": "${home}"
+}
+EOF
+ }
+
+ createusershare()
+ {
+ local username="${1:?}"
+ local uid="${2:?}"
+ local gid="${3:?}"
+ local sharepath="${4:-/user/${user}}"
+ local sharesize="${5:-${defaultsharesize}}"
+ local sharename="user-data-${username}"
+ local shareuuid=$(uuidgen)
+
+ #
+ # Call to Openstack to create the share.
+ #
+
+ #
+ # Call to Zeppelin node to mount the share.
+ #
+
+cat << EOF
+{
+"name": "${sharename}",
+"uuid": "${shareuuid}",
+"path": "${sharepath}",
+"size": ${sharesize}
+}
+EOF
+ }
+
+
+ createusermain()
+ {
+ local user="${1:?}"
+ local uid="${2}"
+ local gid="${3}"
+ local home="${4}"
+ local data="${5}"
+ local size="${6}"
+
+ shirouserjson=$(
+ createshirouser \
+ "${user}"
+ )
+
+ linuxuserjson=$(
+ createlinuxuser \
+ "${user}" \
+ "${uid}" \
+ "${gid}" \
+ "${home}"
+ )
+
+ uid=$(jq -r '.uid' <<< ${linuxuserjson})
+ gid=$(jq -r '.gid' <<< ${linuxuserjson})
+
+ shareinfojson=$(
+ createusershare \
+ "${user}" \
+ "${uid}" \
+ "${hadoopgid}" \
+ "${data}" \
+ "${size}"
+ )
+
+cat << EOF
+{
+"linux": ${linuxuserjson},
+"shiro": ${shirouserjson},
+"share": ${shareinfojson}
+}
+EOF
+ }
+
+
+ createarrayusers()
+ {
+ local names=("$@")
+ local name
+ local comma=''
+ echo '{ "users": ['
+ for name in "${names[@]}"
+ do
+ echo "${comma}" ; comma=','
+ createusermain "${name}"
+ done
+ echo ']}'
+ }
+
+ createyamlusers()
+ {
+ local yamlfile=${1:?}
+ local yamlpath=${2:-'users'}
+ local comma=''
+
+ echo '{ "users": ['
+ while read -r userinfo
+ do
+ echo "${comma}" ; comma=','
+ createusermain \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uid // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.gid // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.home // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.data.path // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.data.size // empty')"
+ done <<< $(
+ yq -I 0 -o json '.'${yamlpath}'[]' \
+ "${yamlfile}"
+ )
+ echo ']}'
+ }
+
diff --git a/deployments/zeppelin/test/bin/rest-tests.sh b/deployments/zeppelin/bin/zeppelin-rest-tools.sh
similarity index 100%
rename from deployments/zeppelin/test/bin/rest-tests.sh
rename to deployments/zeppelin/bin/zeppelin-rest-tools.sh
diff --git a/notes/zrq/20220411-02-git-rebase.txt b/notes/zrq/20220411-02-git-rebase.txt
index a1276650..d5a2f911 100644
--- a/notes/zrq/20220411-02-git-rebase.txt
+++ b/notes/zrq/20220411-02-git-rebase.txt
@@ -69,20 +69,16 @@
popd
popd
---START--
-Updating 9881f35..6c6edec
-Fast-forward
- deployments/common/pip/requirements.txt | 2 +-
- notes/stv/20220405-test-deploy-01.txt | 203 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
- 2 files changed, 204 insertions(+), 1 deletion(-)
- create mode 100644 notes/stv/20220405-test-deploy-01.txt
---END--
-
---START--
-Total 0 (delta 0), reused 0 (delta 0), pack-reused 0
-To github.com:Zarquan/aglais.git
- 9881f35..6c6edec master -> master
---END--
+ > Updating 9881f35..6c6edec
+ > Fast-forward
+ > deployments/common/pip/requirements.txt | 2 +-
+ > notes/stv/20220405-test-deploy-01.txt | 203 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ > 2 files changed, 204 insertions(+), 1 deletion(-)
+ > create mode 100644 notes/stv/20220405-test-deploy-01.txt
+
+ > Total 0 (delta 0), reused 0 (delta 0), pack-reused 0
+ > To github.com:Zarquan/aglais.git
+ > 9881f35..6c6edec master -> master
# -----------------------------------------------------
@@ -96,63 +92,51 @@ To github.com:Zarquan/aglais.git
git checkout 20220331-zrq-infra-ops
---START--
-Switched to branch '20220331-zrq-infra-ops'
-Your branch is up to date with 'origin/20220331-zrq-infra-ops'.
---END--
+ > Switched to branch '20220331-zrq-infra-ops'
+ > Your branch is up to date with 'origin/20220331-zrq-infra-ops'.
git rebase master
---START--
-Successfully rebased and updated refs/heads/20220331-zrq-infra-ops.
---END--
+ > Successfully rebased and updated refs/heads/20220331-zrq-infra-ops.
git status
---START--
-On branch 20220331-zrq-infra-ops
-Your branch and 'origin/20220331-zrq-infra-ops' have diverged,
-and have 11 and 8 different commits each, respectively.
- (use "git pull" to merge the remote branch into yours)
---END--
+ > On branch 20220331-zrq-infra-ops
+ > Your branch and 'origin/20220331-zrq-infra-ops' have diverged,
+ > and have 11 and 8 different commits each, respectively.
+ > (use "git pull" to merge the remote branch into yours)
git pull
---START--
-Auto-merging deployments/infra-ops/ansible/templates/ssh/ssh-local-config.old
-Auto-merging deployments/infra-ops/ansible/create-all.yml
-Auto-merging deployments/infra-ops/ansible/config/aglais.yml
-Auto-merging deployments/infra-ops/ansible/10-base-deploy.yml
-Auto-merging deployments/infra-ops/ansible/06-test.yml
-Merge made by the 'recursive' strategy.
---END--
+ > Auto-merging deployments/infra-ops/ansible/templates/ssh/ssh-local-config.old
+ > Auto-merging deployments/infra-ops/ansible/create-all.yml
+ > Auto-merging deployments/infra-ops/ansible/config/aglais.yml
+ > Auto-merging deployments/infra-ops/ansible/10-base-deploy.yml
+ > Auto-merging deployments/infra-ops/ansible/06-test.yml
+ > Merge made by the 'recursive' strategy.
git status
---START--
-On branch 20220331-zrq-infra-ops
-Your branch is ahead of 'origin/20220331-zrq-infra-ops' by 12 commits.
- (use "git push" to publish your local commits)
---END--
+ > On branch 20220331-zrq-infra-ops
+ > Your branch is ahead of 'origin/20220331-zrq-infra-ops' by 12 commits.
+ > (use "git push" to publish your local commits)
git push
---START--
-Enumerating objects: 68, done.
-Counting objects: 100% (68/68), done.
-Delta compression using up to 4 threads
-Compressing objects: 100% (48/48), done.
-Writing objects: 100% (49/49), 10.79 KiB | 1.80 MiB/s, done.
-Total 49 (delta 30), reused 0 (delta 0), pack-reused 0
-remote: Resolving deltas: 100% (30/30), completed with 12 local objects.
-To github.com:Zarquan/aglais.git
- 510f6eb..c22fd4c 20220331-zrq-infra-ops -> 20220331-zrq-infra-ops
---END--
+ > Enumerating objects: 68, done.
+ > Counting objects: 100% (68/68), done.
+ > Delta compression using up to 4 threads
+ > Compressing objects: 100% (48/48), done.
+ > Writing objects: 100% (49/49), 10.79 KiB | 1.80 MiB/s, done.
+ > Total 49 (delta 30), reused 0 (delta 0), pack-reused 0
+ > remote: Resolving deltas: 100% (30/30), completed with 12 local objects.
+ > To github.com:Zarquan/aglais.git
+ > 510f6eb..c22fd4c 20220331-zrq-infra-ops -> 20220331-zrq-infra-ops
popd
@@ -186,14 +170,12 @@ To github.com:Zarquan/aglais.git
git status
---START--
-On branch 20220331-zrq-infra-ops
-Your branch is up to date with 'origin/20220331-zrq-infra-ops'.
-
-Untracked files:
- (use "git add ..." to include in what will be committed)
- notes/zrq/20220411-02-git-rebase.txt
---END--
+ > On branch 20220331-zrq-infra-ops
+ > Your branch is up to date with 'origin/20220331-zrq-infra-ops'.
+ >
+ > Untracked files:
+ > (use "git add ..." to include in what will be committed)
+ > notes/zrq/20220411-02-git-rebase.txt
git add notes/zrq/20220411-02-git-rebase.txt
diff --git a/notes/zrq/20220505-01-newbranch.txt b/notes/zrq/20220505-01-newbranch.txt
new file mode 100644
index 00000000..53bc2acb
--- /dev/null
+++ b/notes/zrq/20220505-01-newbranch.txt
@@ -0,0 +1,93 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ New branch to explore how we handle user accounts.
+
+ Result:
+
+ Work in progress
+
+
+# -----------------------------------------------------
+# Update from upstream.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git checkout master
+
+ git pull
+
+ git fetch upstream
+
+ git merge upstream/master
+
+ git status
+
+ git push
+
+ popd
+
+ > Already on 'master'
+ >
+ > Already up to date.
+ >
+ > Your branch is up to date with 'origin/master'.
+
+
+# -----------------------------------------------------
+# Create a new branch.
+#[user@desktop]
+
+ branchname=user-accounts
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ branchprev=$(git branch --show-current)
+ branchnext=$(date '+%Y%m%d')-zrq-${branchname:?}
+
+ git checkout master
+ git checkout -b "${branchnext:?}"
+
+ git push --set-upstream 'origin' "$(git branch --show-current)"
+
+ popd
+
+ > ....
+ > ....
+ > To github.com:Zarquan/aglais.git
+ > * [new branch] 20220505-zrq-user-accounts -> 20220505-zrq-user-accounts
+ > branch '20220505-zrq-user-accounts' set up to track 'origin/20220505-zrq-user-accounts'.
+
+
+
+
diff --git a/notes/zrq/20220505-02-user-accounts.txt b/notes/zrq/20220505-02-user-accounts.txt
new file mode 100644
index 00000000..3dbfbdeb
--- /dev/null
+++ b/notes/zrq/20220505-02-user-accounts.txt
@@ -0,0 +1,250 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+
+ Thoughts on how to handle user accounts.
+
+
+ Shell scripts to create and delete user accounts.
+
+ create-user ....
+ delete-user ....
+
+ Need to be safe.
+ Create script checks for existing user and skips.
+ Delete script checks for safety catch preventing us deleting real accounts.
+
+ Almost everything about out users can be pubic.
+ The only real secrets arethe password hashes.
+
+ YAML format for describing users
+
+ users:
+ - name: "albert"
+ uid: 2049,
+ uuid: "7ac3dc18-f53c-4076-88b2-bcf4171d5b76",
+ lock: true,
+ test: false,
+ home: "/home/albert",
+ data: "/user/albert"
+ pass:
+ hash:
+ algorithm: "SHA-256"
+ secret: "148518cb-1dc5-49df-a1b2-42cec8a3e547"
+
+ - name: "test-21"
+ uid: 3051,
+ uuid: "0a44662b-b3fa-4834-aa2e-fe49807412e9",
+ lock: false,
+ test: true,
+ home: "/home/test-21",
+ data: "/test/test-21"
+ pass:
+ text: "super secret"
+ hash:
+ algorithm: "SHA-256"
+ secret: "148518cb-1dc5-49df-a1b2-42cec8a3e547"
+
+ - name: "test-22"
+ uid: 3052,
+ uuid: "5a8c6ac4-5ddc-4546-a3bd-282979b9599e",
+ lock: false,
+ test: true,
+ home: "/home/test-22",
+ data: "/test/test-22"
+ pass:
+ text: "super secret"
+ hash:
+ algorithm: "SHA-256"
+ secret: "148518cb-1dc5-49df-a1b2-42cec8a3e547"
+
+
+ The password 'secret' is not the hash value itself, it is the index into our secret database.
+ To get Albert's password hash, we need to look it up in our secrets database.
+
+ password=$(
+ secret "148518cb-1dc5-49df-a1b2-42cec8a3e547"
+ )
+
+ This minimises the amount of data in the secrets and would fit quite easily into the simple ssh based secret function we have now.
+
+ If a user account doesn't have a password value or hash, then a new one is generated at deployment time and returned in the JSON output returned by the function.
+
+ Deleting accounts needs lock=false and (test=true or interactive confirmation) to go ahead.
+
+ Users need 2 data directories.
+ User's home directory needs to be private.
+ Does not need to be shared witrh worker nodes.
+ Space limited using quotas
+
+ /home/
+
+ User data needs to be shared via Manila so that the worker nodes can access the data.
+
+ Checking this is the case:
+
+ fedora@iris-gaia-green-20220405-worker01
+
+ grep -r '/data/gaia' /var/hadoop/logs/*
+
+ > ....
+ > /var/hadoop/logs/application_1649158781282_0002/container_1649158781282_0002_01_000078/stderr:2022-04-05 13:17:11,957 INFO datasources.FileScanRDD: Reading File path: file:///data/gaia/GEDR3/GEDR3_GAIASOURCE/part-01804-061dbeeb-75b5-41c3-9d01-422766759ddd_01804.c000.snappy.parquet, range: 0-293846081, partition values: [empty row]
+ > /var/hadoop/logs/application_1649158781282_0002/container_1649158781282_0002_01_000078/stderr:2022-04-05 13:17:12,207 INFO datasources.FileScanRDD: Reading File path: file:///data/gaia/GEDR3/GEDR3_GAIASOURCE/part-01821-061dbeeb-75b5-41c3-9d01-422766759ddd_01821.c000.snappy.parquet, range: 0-294046672, partition values: [empty row]
+ > /var/hadoop/logs/application_1649158781282_0002/container_1649158781282_0002_01_000078/stderr:2022-04-05 13:17:12,404 INFO datasources.FileScanRDD: Reading File path: file:///data/gaia/GEDR3/GEDR3_GAIASOURCE/part-01841-061dbeeb-75b5-41c3-9d01-422766759ddd_01841.c000.snappy.parquet, range: 0-294279487, partition values: [empty row]
+ > ....
+
+ Specifically:
+
+ > ....
+ > Reading File path: file:///data/gaia/GEDR3/GEDR3_GAIASOURCE/part-01841-061dbeeb-75b5-41c3-9d01-422766759ddd_01841.c000.snappy.parquet
+ > ....
+
+ So if the user wants to store data in their data directories, the worker nodes will need to be able to access them.
+ Which means the user's data directories need to be shared.
+ Just adding the user's data directories to /user will fail as soon as they try to access the data in a Spark job.
+
+ We need the user's data to be shared..
+ Either part of a huge uber-share, or as separate shares.
+
+ The huge-uber share has advantages.
+
+ Simple `mkdir` works to create user's directories within the share.
+ Simpler to mount one large share than multiple smaller ones.
+
+ The huge-uber share has dis-advantages.
+
+ The size will need to be allocated at the start.
+ We haven't tested being able to grow a share, although it is in theory possible.
+ We might need to unmount during the resize ?
+ (*) actually, we might have done a re-size in the past, need to check ..
+
+ As this grows to multi-terrabyte size it will become harder to backup.
+ I don't know the details of CephFS works, but I'm guessing that putting everything through one share creates a potential bottle neck.
+
+ We would have to use some kind of quotas system to control the amount of space that people use.
+
+ CephFS has quotas, but it looks like they are per client (VM) rather than per user.
+ https://docs.ceph.com/en/latest/cephfs/quota/
+
+ Could still do something with that.
+ Apply a quota to a directory and use uid/gid to restrict who can write to it.
+ Potentially another performance hit ? Would need to test and measure to see.
+
+ Individual shares has advantages.
+
+ If we create individual shares per user accounmt, then size of each of the shares is smaller.
+ The smaller size makes it easier to manage backups.
+
+ Individual shares has dis-advantages.
+
+ Lots of shares increases the client-server traffic.
+ Potentially another performance hit ? Would need to test and measure to see.
+
+ Creating the user shares needs the Openstack credentials.
+ Which means the best place to do this is on the openstack-client container.
+ That is easy enough to do during deployment build time, but we need to make it easier to re-connect a new openstack-client container after a build.
+
+ To do that we need to store the details of the Openstack components on the zeppelin node itself, probably in /root.
+ We have started doing this for the data deployment, so we can copy and adapt the existing code.
+
+ I think we need acombinations of things.
+
+ Every user gets a Unix account, with name and uid.
+ Every user gets a /home/ directory, limited by quota to Mbytes.
+ User's home is protected space for data access credentials for services like DropBox, OwnCloud and STFC Echo.
+
+ The /home directories are only visible from the Zeppelin node.
+ Q - how do we backup and deploy the /home directories ?
+ A - can we do this using a cron+rsync command from a VM on the data project ?
+ A - same as the notebook directory ?
+
+ Science users get a separate CephFS share mounted at /user/.
+ The share is created in the Openstack data project and published to the other projects.
+ The share is limited to a specific size and permissions are set to make it read/write only to the user.
+ Within users /data directory there is a /public directory which is exposed by our websever for download.
+
+ Creating a new science user creates a new share.
+ Needs to be done from the openstack-client.
+ Requires a copy of Openstack config saved on the Zeppelin node and revovered by the openstack-client.
+
+ Test users get space on a common CephFS share mounted at /test/.
+ The create/delete scripts will delete test users and test data without confirmation.
+ Anything outside that /test directory requires manual confirmation.
+
+ Need to be careful about concurrent tests overwriting their data.
+ Need to generate unique test user names.
+ Suggest a date+random pattern ?
+
+ If we create one test share per deployment, and create unique test usres per deployment, then we should avoid conflict.
+ However - what are we testing. Or more importantly what are we NOT testing.
+ We actually need to test creating and running multiple users.
+ So we should use the same system for the test accounts.
+ So no to a separate system for test users.
+
+ --------
+
+ Every user gets a Unix account, with name and uid.
+ Every user gets a /home/ directory, limited by quota to Mbytes.
+ User's home is protected space for data access credentials for services like DropBox, OwnCloud and STFC Echo.
+ User's home is backed up to a machine in the data project.
+
+ Every user gets a separate CephFS share mounted at /user/.
+ The share is created in the Openstack data project and published to the other projects.
+ The share is limited to a specific size and permissions are set to make it read/write only to the user.
+ Within users /data directory there is a /public directory which is exposed by our websever for download.
+
+ --------
+
+ OR, we create a NFS share within the deployment, using Cinder volumes.
+ We have HDFS already setup ..
+ This sounds like a step back, but ...
+ We haven't tested this on the new cloud.
+ Cinder performance might be better.
+
+ It is also looking forward to the direct attached SSDs we hope to be getting later in the year ...
+ No news from Cambridge or StackHPC about this.
+ Do we have any details about what we were allocated ?
+
+ Data transfer at startup would be an issue ...
+
+ OK, not for end of June deadline so skip it for now.
+
+ --------
+
+ First get the existing setup automated and tested.
+
+ We can have /user and /test, but everything else is the same - both will have new shares created.
+ We can prefix the test shares with the deplyment name, enabling us to clear them up afterwards.
+ ** We need explicit checks to make sure we don't delete user shares. **
+
+ Home directories and notebooks are backed up using crom+rsync from a VM in the data project.
+
+
+
+
+
diff --git a/notes/zrq/20220510-01-user-accounts.txt b/notes/zrq/20220510-01-user-accounts.txt
new file mode 100644
index 00000000..840bc18d
--- /dev/null
+++ b/notes/zrq/20220510-01-user-accounts.txt
@@ -0,0 +1,55 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+
+ Plans for implementing user accounts.
+
+ Create /opt/aglais for our tools.
+ Add our shell scripts to /opt/aglais/bin.
+ Add /opt/aglais/bin to the PATH.
+ https://github.com/wfau/aglais/issues/696
+
+ Client side script calls remote functions via ssh.
+
+ Yaml config file for user accounts
+ https://github.com/wfau/aglais/issues/692
+
+ Plans for fixing Zeppelin install
+ Need to do these as a separate branch and PR
+
+ Install Zeppelin in /opt/zeppelin.
+ Add /opt/zeppelin/bin to the PATH.
+ https://github.com/wfau/aglais/issues/634
+
+ Run Zeppelin as zeppelin not fedora.
+ https://github.com/wfau/aglais/issues/693
+
+ Create /home/zeppelin for config files.
+
+
+
diff --git a/notes/zrq/20220510-02-backups.txt b/notes/zrq/20220510-02-backups.txt
new file mode 100644
index 00000000..6f877c74
--- /dev/null
+++ b/notes/zrq/20220510-02-backups.txt
@@ -0,0 +1,78 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Simple backup script to start with.
+
+ Result:
+
+ Success, backup to local desktop works.
+
+
+# -----------------------------------------------------------------------------------------
+# Backup our Zeppelin notebooks.
+#[user@desktop]
+
+ sshuser=fedora
+ sshhost=zeppelin.aglais.uk
+
+ mkdir -p /var/local/backups/aglais/$(date '+%Y')/$(date '+%Y%m%d')
+ pushd /var/local/backups/aglais/$(date '+%Y')/$(date '+%Y%m%d')
+
+ datetime=$(date '+%Y%m%d%H%M%S')
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ "${sshuser:?}@${sshhost:?}://home/fedora/zeppelin/notebook/" \
+ 'aglais-notebooks'
+
+ tar --xz \
+ -cvf "aglais-notebooks-${datetime:?}.tar.xz" \
+ 'aglais-notebooks'
+
+
+ du -h -d 2 .
+
+ > 103M ./aglais-notebooks/.git
+ > 3.2M ./aglais-notebooks/Public Examples
+ > 31M ./aglais-notebooks/Users
+ > 136M ./aglais-notebooks
+ > 253M .
+
+
+
+
diff --git a/notes/zrq/20220510-03-blue-deploy.txt b/notes/zrq/20220510-03-blue-deploy.txt
new file mode 100644
index 00000000..8f9d1c60
--- /dev/null
+++ b/notes/zrq/20220510-03-blue-deploy.txt
@@ -0,0 +1,133 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to check everything works with new client container.
+
+ Result:
+
+ Failed ..
+ Changes to the JSON field names in the openstack client broke cephfs-mount.sh.
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 3m57.836s
+ > user 1m42.166s
+ > sys 0m11.096s
+
+
+# -----------------------------------------------------
+# Create everything, using the new config.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > ....
+ > ....
+ > Target [iris-gaia-data][aglais-data-gaia-dr2-6514]
+ > Found [1e1ed68a-e5fe-47a3-a663-7096231a9324]
+ > ----
+ > Ceph path [/volumes/_nogroup/d6ce1262-7f83-4079-b364-befc1f166142]
+ > Ceph size [512]
+ > ----
+ > Ceph node [10.4.200.9:6789]
+ > Ceph node [10.4.200.13:6789]
+ > Ceph node [10.4.200.17:6789]
+ > /deployments/hadoop-yarn/bin/cephfs-mount.sh: line 144: accessrule: parameter null or not set
+ > ----
+ > Ceph user []
+ > Ceph key []
+ >
+ > /deployments/hadoop-yarn/bin/cephfs-mount.sh: line 168: cephuser: parameter null or not set
+ > /deployments/hadoop-yarn/ansible /
+ > [WARNING]: * Failed to parse /deployments/hadoop-
+ > yarn/ansible/config/zeppelin-54.86-spark-6.26.43.yml with auto plugin: no root
+ > 'plugin' key found, '/deployments/hadoop-
+ > yarn/ansible/config/zeppelin-54.86-spark-6.26.43.yml' is not a valid YAML
+ > inventory plugin config file
+ > [WARNING]: * Failed to parse /deployments/hadoop-
+ > yarn/ansible/config/zeppelin-54.86-spark-6.26.43.yml with yaml plugin: Invalid
+ > extra vars data supplied. '@/tmp/ceph-mount-vars.yml' could not be made into a
+ > dictionary
+ > [WARNING]: * Failed to parse /deployments/hadoop-
+ > yarn/ansible/config/zeppelin-54.86-spark-6.26.43.yml with ini plugin: Invalid
+ > extra vars data supplied. '@/tmp/ceph-mount-vars.yml' could not be made into a
+ > dictionary
+ > ....
+ > ....
+
+
+ #
+ # Openstack client changed some of the JSON field names, breaking our shell script.
+ #
+
+
+
+
diff --git a/notes/zrq/20220511-01-blue-deploy.txt b/notes/zrq/20220511-01-blue-deploy.txt
new file mode 100644
index 00000000..da1cf54d
--- /dev/null
+++ b/notes/zrq/20220511-01-blue-deploy.txt
@@ -0,0 +1,341 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to debug Shiro database.
+
+ Result:
+
+ Work in progress ...
+
+ Broken parts of JDBC Shiro scripts (creates tables twice).
+ Inserted test users manually.
+ Zeppelin unable to contact Spark on master.
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 4m1.865s
+ > user 1m41.456s
+ > sys 0m11.113s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 35m59.088s
+ > user 9m52.643s
+ > sys 2m23.472s
+
+
+# -----------------------------------------------------
+# Create (test) users.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-users.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ "test" \
+ | tee /tmp/create-users.log
+
+ > ....
+ > TASK [Import Zeppelin user data] ***********************************************
+ > fatal: [zeppelin]: FAILED! => {"changed": false, "msg": "ERROR 1050 (42S01) at line 2: Table 'users' already exists\n"}
+ > ....
+
+ #
+ # Fixed broken parts of JDBC Shiro scripts.
+ #
+
+
+# -----------------------------------------------------
+# Quick test with one user.
+#[root@ansibler]
+
+ numusers=1
+ testlevel=basic
+
+ concurrent=True
+ testdate=$(date '+%Y%m%d-%H%M%S')
+
+ time \
+ /deployments/hadoop-yarn/bin/run-tests.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ "${testlevel:?}" \
+ "${concurrent:?}" \
+ "${numusers:?}" \
+ | tee /tmp/test-${testlevel:?}-${testdate:?}.log
+
+ sed "
+ 1,3 d
+ s/\"/#/g
+ s/'\(-\{0,1\}[0-9.]\{1,\}\)'/\1/g
+ s/:[[:space:]]*\([a-zA-Z]\{1,\}\)\([,}]\)/:'\1'\2/g
+ s/:[[:space:]]*\([,}]\),/: ''\1/g
+ s/'/\"/g
+ " \
+ '/tmp/test-result.json' \
+ | jq '.' \
+ | tee /tmp/test-${testlevel:?}-${testdate:?}.json
+
+
+ > ....
+ > ....
+ > TASK [Run benchmarker] *********************************************************
+ > changed: [
+ > localhost] => {
+ > "changed": true,
+ > "cmd": "python3 /tmp/run-test.py | tee /tmp/test-result.json",
+ > "delta": "0:00:04.763916",
+ > "end": "2022-05-11 03:01:11.734516",
+ > "msg": "",
+ > "rc": 0,
+ > "start": "2022-05-11 03:01:06.970600",
+ > "stderr": "
+ > ERROR:root:list index out of range
+ > Traceback (most recent call last):
+ > File \"/usr/local/lib/python3.10/site-packages/aglais_benchmark/aglais_benchmark.py\", line 83, in run_notebook
+ > notebookid = text.split(\": \")[1]
+ > IndexError: list index out of range
+ > ERROR:root:list index out of range
+ > Traceback (most recent call last):
+ > File \"/usr/local/lib/python3.10/site-packages/aglais_benchmark/aglais_benchmark.py\", line 83, in run_notebook
+ > notebookid = text.split(\": \")[1]
+ > IndexError: list index out of range
+ > ERROR:root:list index out of range
+ > ....
+ > ....
+
+
+ #
+ # Checked in code does not run.
+ #
+ # create-users.sh is not idempotent, it tries to create the database every time it is run.
+ # auth.sql and auth-test.sql fail because they both contain the "CREATE TABLE users ..."
+ # which has already been done by "38-install-user-db.yml" calling "create.sql"
+ #
+
+
+# -----------------------------------------------------
+# Try using our curl tests.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.6
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin as a normal user.
+#[root@ansibler]
+
+ gaiauser=gaiauser2
+ gaiapass=gaiapass2
+
+ zeplogin "${gaiauser:?}" "${gaiapass}"
+
+ > {
+ > "status": "FORBIDDEN",
+ > "message": ""
+ > }
+
+ #
+ # Tried different combinations.
+ # No luck.
+ #
+
+
+# -----------------------------------------------------
+# Configure the database passwords.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ cat > /home/fedora/mariadb.cnf << EOF
+[client]
+user = zeppelin
+password = ........
+EOF
+
+ ln -s \
+ /home/fedora/mariadb.cnf \
+ /home/fedora/.my.cnf
+ '
+
+
+# -----------------------------------------------------
+# Hash our password.
+#[root@ansibler]
+
+ dnf install java-latest-openjdk
+
+ mkdir ~/frog
+ pushd ~/frog
+
+ wget https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
+
+ java -jar shiro-tools-hasher-1.9.0-cli.jar -pnc
+
+ popd
+
+ username='gamestop'
+ password='........'
+ passhash='$shiro1$SHA-256$500000$uAtUUYAYJdHFZdcubPI32g==$tkvtpVWtVqObz25XOL9WARy4+WFJqlwTKCCzYf1dI4o='
+
+# -----------------------------------------------------
+# Add our own user accounts.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ show tables ;
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ \"
+ use zeppelin ;
+ DELETE FROM users WHERE username = '${username:?}' ;
+ DELETE FROM user_roles WHERE username = '${username:?}' ;
+ \"
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ INSERT INTO users (username, password) VALUES (\"${username:?}\", \"${passhash:?}\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${username:?}\", \"user\");
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+# -----------------------------------------------------
+# Login to Zeppelin as a normal user.
+#[root@ansibler]
+
+ zeplogin "${username:?}" "${password:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "a42cfc33-de34-4762-8fb7-279fba293113",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# Run the HealpixSourceCounts notebook
+#[root@ansibler]
+
+ noteid=2H1TF42A3
+
+ zepnbclear ${noteid}
+ zepnbexecstep ${noteid}
+ zepnbstatus ${noteid}
+ zepnbtotaltime ${noteid}
+
+ > ....
+ > ERROR [2022-05-11 05:11:11,319] ({FIFOScheduler-interpreter_196503570-Worker-1} Logging.scala[logError]:94) - Error initializing SparkContext.
+ > java.net.ConnectException: Call From iris-gaia-blue-20220511-zeppelin/10.10.3.119 to master01:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused
+ > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ > ....
+
+ #
+ # Zeppelin PySpark notebook cell unable to connect to Spark master node.
+ # Not clear why.
+ # At least one worker failed.
+ # Not clear why.
+ #
+
diff --git a/notes/zrq/20220511-02-blue-deploy.txt b/notes/zrq/20220511-02-blue-deploy.txt
new file mode 100644
index 00000000..462bffea
--- /dev/null
+++ b/notes/zrq/20220511-02-blue-deploy.txt
@@ -0,0 +1,530 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to debug Shiro database.
+
+ Result:
+
+ Work in progress ...
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 3m22.997s
+ > user 1m30.479s
+ > sys 0m9.980s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 32m37.591s
+ > user 9m40.683s
+ > sys 2m22.807s
+
+
+# -----------------------------------------------------
+# Add a simple symlink for Zeppelin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ ln -s zeppelin-0.10.0-bin-all zeppelin
+ '
+
+# -----------------------------------------------------
+# Create our shiro-auth database.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-auth-database.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-auth-database.log
+
+ > PLAY RECAP *********************************************************************
+ > zeppelin : ok=11 changed=7 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
+
+ > real 0m33.924s
+ > user 0m9.664s
+ > sys 0m2.012s
+
+
+# -----------------------------------------------------
+# Hash our tester password.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ mkdir ~/shiro
+ pushd ~/shiro
+
+ wget https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
+
+ popd
+ '
+
+ # Manual process because hasher won't read from console
+ ssh zeppelin
+
+ java -jar ~/shiro/shiro-tools-hasher-1.9.0-cli.jar -pnc
+
+
+
+ username='gamestop'
+ password='ahm5Rion see2Eegh'
+ passhash='$shiro1$SHA-256$500000$/gn0Ya40yIINQ7N0ctFebQ==$iHbcPiALiMKQSgAwNNuU4ydbzeFprGIfSVhKMEC4Yes='
+
+
+# -----------------------------------------------------
+# Add our tester account.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ show tables ;
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ \"
+ use zeppelin ;
+ DELETE FROM users WHERE username = '${username:?}' ;
+ DELETE FROM user_roles WHERE username = '${username:?}' ;
+ \"
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ INSERT INTO users (username, password) VALUES (\"${username:?}\", \"${passhash:?}\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${username:?}\", \"user\");
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+ > username password password_salt
+ > gamestop $shiro1$SHA-256$.....= NULL
+ >
+ > username role_name
+ > gamestop user
+
+# -----------------------------------------------------
+# Load our curl tests.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.174
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin.
+#[root@ansibler]
+
+ zeplogin "${username:?}" "${password:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "25ea5f23-1dca-462a-abd0-5974293f3f68",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Upgrade our account from user to admin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ DELETE FROM user_roles WHERE username = \"${username:?}\";
+ INSERT INTO user_roles (username, role_name) VALUES (\"${username:?}\", \"user\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${username:?}\", \"admin\");
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+ > username password password_salt
+ > gamestop $shiro1$SHA-256$.....= NULL
+ >
+ > username role_name
+ > gamestop admin
+
+
+# -----------------------------------------------------
+# Login to Zeppelin.
+#[root@ansibler]
+
+ zeplogin "${username:?}" "${password:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "25ea5f23-1dca-462a-abd0-5974293f3f68",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+ #
+ # Still listing us as 'user' not 'admin'.
+ # Try restarting Zeppelin
+ #
+
+ ssh zeppelin \
+ '
+ zeppelin-daemon.sh restart
+ '
+
+ > Zeppelin stop [ OK ]
+ > Zeppelin start [ OK ]
+
+
+ rm -f /tmp/${username:?}.cookies
+
+ zeplogin "${username:?}" "${password:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "457af920-e02b-4431-b296-b553fcc7cf08",
+ > "roles": "[\"admin\",\"user\"]"
+ > }
+ > }
+
+ #
+ # IMPORTANT - Zeppelin caches user accounts.
+ # Inserting a new user works, because it forces a database query.
+ # Altering the role of an existing user uses cached data.
+ #
+
+# -----------------------------------------------------
+# Try listing the user accounts.
+#[root@ansibler]
+
+ zepcookies=/tmp/${username:?}.cookies
+
+ curl \
+ --silent \
+ --request 'POST' \
+ --cookie-jar "${zepcookies:?}" \
+ --data "userName=${username:?}" \
+ --data "password=${password:?}" \
+ "${zeppelinurl:?}/api/login" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "70dc2413-9908-48d5-8e0d-72df2f1309d7",
+ > "roles": "[\"admin\",\"user\"]"
+ > }
+ > }
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": [
+ > {
+ > "id": "2F2YS7PCE",
+ > "path": "/Flink Tutorial/1. Flink Basics"
+ > },
+ > ....
+ > ....
+ > {
+ > "id": "2GE79Y5FV",
+ > "path": "/Spark Tutorial/8. PySpark Conda Env in Yarn Mode"
+ > }
+ > ]
+ > }
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/configurations/all" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "zeppelin.notebook.azure.user": "user",
+ > "zeppelin.war.tempdir": "webapps",
+ > "zeppelin.helium.npm.installer.url": "http://registry.npmjs.org/",
+ > "zeppelin.notebook.git.remote.username": "token",
+ > "zeppelin.interpreter.remoterunner": "bin/interpreter.sh",
+ > ....
+ > ....
+ > "zeppelin.interpreter.group.default": "spark",
+ > "zeppelin.conf.dir": "/home/fedora/zeppelin-0.10.0-bin-all/conf",
+ > "zeppelin.run.mode": "auto",
+ > "zeppelin.interpreter.localRepo": "local-repo"
+ > }
+ > }
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/credential" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "body": {
+ > "userCredentials": {}
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# Try creating a new user account.
+#[root@ansibler]
+
+ newuser='immutable'
+ newpass='juaz4Boo Quie8AhK'
+
+cat > /tmp/new-user.json << EOF
+{
+ "entity": "e1",
+ "username": "${newuser:?}",
+ "password": "${newpass:?}"
+}
+EOF
+
+ curl \
+ --verbose \
+ --cookie "${zepcookies:?}" \
+ --request 'PUT' \
+ --header 'Content-Type: application/json' \
+ --data '@/tmp/new-user.json' \
+ "${zeppelinurl:?}/api/credential"
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/credential" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "body": {
+ > "userCredentials": {
+ > "e1": {
+ > "username": "immutable",
+ > "password": "juaz4Boo Quie8AhK"
+ > }
+ > }
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# Try login using the new user account.
+#[root@ansibler]
+
+ rm -f "${zepcookies}"
+
+ curl \
+ --verbose \
+ --request 'POST' \
+ --cookie-jar "${zepcookies:?}" \
+ --data "userName=${newuser:?}" \
+ --data "password=${newpass:?}" \
+ "${zeppelinurl:?}/api/login" \
+ | jq '.'
+
+ > {
+ > "status": "FORBIDDEN",
+ > "message": ""
+ > }
+
+ #
+ # OK, possibly too good to be true ?
+ # Yep, credentials are something else entirely.
+ # https://zeppelin.apache.org/docs/latest/usage/interpreter/overview.html#credential-injection
+ # Useful, but not here.
+ # Worth a try ...
+ #
+
+
+# -----------------------------------------------------
+# Go back to doing it via SQL.
+#[root@ansibler]
+
+ ssh zeppelin
+
+ java -jar ~/shiro/shiro-tools-hasher-1.9.0-cli.jar -pnc
+
+
+ newuser='immutable'
+ newpass='juaz4Boo Quie8AhK'
+ newhash='$shiro1$SHA-256$500000$4cB0QcXO7UB1sq7wQaFhLQ==$j1sEcSBGORYjw3yTRlUxbqZXSGOjXp/qLsaTFHwga/E='
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ use zeppelin ;
+ INSERT INTO users (username, password) VALUES (\"${newuser:?}\", \"${newhash:?}\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${newuser:?}\", \"user\");
+ '
+ "
+
+ rm -f "${zepcookies}"
+
+ curl \
+ --verbose \
+ --request 'POST' \
+ --cookie-jar "${zepcookies:?}" \
+ --data "userName=${newuser:?}" \
+ --data "password=${newpass:?}" \
+ "${zeppelinurl:?}/api/login" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "immutable",
+ > "ticket": "6d52d89c-f8aa-4ae1-83f2-dc1cb94d9db8",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+ #
+ # So far so good ... the only crappy bit is hashing the passwords.
+ #
+
+# -----------------------------------------------------
+# The problem ...
+#[root@ansibler]
+
+ ssh zeppelin
+
+ echo -n 'juaz4Boo Quie8AhK' | java -jar ~/shiro/shiro-tools-hasher-1.9.0-cli.jar -pnc
+
+ > Error: java.io.Console is not available on the current JVM. Cannot read passwords.
+ > ....
+ > ....
+
+
diff --git a/notes/zrq/20220511-03-blue-deploy.txt b/notes/zrq/20220511-03-blue-deploy.txt
new file mode 100644
index 00000000..1a9ff289
--- /dev/null
+++ b/notes/zrq/20220511-03-blue-deploy.txt
@@ -0,0 +1,737 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to debug Shiro database.
+
+ Result:
+
+ Success, adding new users to database works.
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 4m11.416s
+ > user 1m45.877s
+ > sys 0m11.614s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 45m50.568s
+ > user 14m55.214s
+ > sys 3m56.496s
+
+
+# -----------------------------------------------------
+# Add a simple symlink for Zeppelin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ ln -s zeppelin-0.10.0-bin-all zeppelin
+ '
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Resore notebooks from backup.
+#[user@desktop]
+
+ sshuser=fedora
+ sshhost=128.232.222.23
+
+ ssh "${sshuser:?}@${sshhost:?}" \
+ '
+ mv zeppelin/notebook zeppelin/notebook-old
+ '
+
+ pushd /var/local/backups/aglais/2022/20220510/
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ 'aglais-notebooks/' \
+ "${sshuser:?}@${sshhost:?}:zeppelin/notebook"
+
+ popd
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Restart Zeppelin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ zeppelin-daemon.sh restart
+ '
+
+
+# -----------------------------------------------------
+# Create our shiro-auth database.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-auth-database.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-auth-database.log
+
+ > real 0m31.532s
+ > user 0m9.317s
+ > sys 0m2.143s
+
+
+# -----------------------------------------------------
+# The problem ...
+#[root@ansibler]
+
+ ssh zeppelin
+
+ echo -n 'juaz4Boo Quie8AhK' | java -jar ~/shiro/shiro-tools-hasher-1.9.0-cli.jar -pnc
+
+ > Error: java.io.Console is not available on the current JVM. Cannot read passwords.
+ > ....
+ > ....
+
+ #
+ # Looks like Stelios has solved this one :-)
+ # (java -jar {{zephome}}/lib/shiro-tools-hasher-1.9.0-cli.jar -i 500000 -f shiro1 -a SHA-256 -gss 128 $NEW_PASSWORD)
+ #
+
+
+# -----------------------------------------------------
+# Add our tester account.
+#[root@ansibler]
+
+ testuser='gamestop'
+ testpass='ahm5Rion see2Eegh'
+
+ ssh zeppelin \
+ "
+ testhash=\$(
+ java -jar '/opt/aglais/lib/shiro-tools-hasher-1.9.0-cli.jar' -i 500000 -f shiro1 -a SHA-256 -gss 128 '${testpass:?}'
+ )
+
+ mysql --execute \
+ '
+ INSERT INTO users (username, password) VALUES (\"${testuser:?}\", \"'\${testhash:?}'\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${testuser:?}\", \"user\");
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+ > username password password_salt
+ > gamestop $shiro1$SHA-256$........ NULL
+ >
+ > username role_name
+ > gamestop user
+
+
+# -----------------------------------------------------
+# Load our curl tests.
+# TODO save the IP address duting the build.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.23
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin.
+#[root@ansibler]
+
+ zeplogin "${testuser:?}" "${testpass:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "49a2ac98-9535-41c6-bd94-0c5f85bb7788",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# Run the HealpixSourceCounts notebook
+#[root@ansibler]
+
+ noteid=2GQ6WMH9W
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [20210507-084613_357121151][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-105718_1698521515][Set the resolution level and define the query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-110030_2095441495][Plot up the results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-091244_670006530][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1648610499944_1376690736][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "3. Source counts over the sky",
+ > "id": "2GQ6WMH9W",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/3. Source counts over the sky"
+ > }
+ > }
+
+ > 0:1:3
+
+
+# -----------------------------------------------------
+# Run the MeanProperMotions notebook
+#[root@ansibler]
+
+ noteid=2GSNYBDWB
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [paragraph_1646395441893_1272795891][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111756_391695716][Set HEALPix resolution]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111538_106023214][Define a data frame by SQL query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111939_1386609632][Mean RA proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111943_814907111][Mean Dec proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111956_1822284967][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-132447_1514402898][Tidy-up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20211207-132335_689637194][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "4. Mean proper motions over the sky",
+ > "id": "2GSNYBDWB",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/4. Mean proper motions over the sky"
+ > }
+ > }
+
+ > 0:0:49
+
+
+# -----------------------------------------------------
+# Run the RandomForestClassifier notebook
+#[root@ansibler]
+
+ noteid=2GQDKZ59J
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [20201013-131059_546082898][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-131649_1734629667][Basic catalogue query selections and predicates]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-132418_278702125][Raw catalogue with selected columns]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-094650_221463065][Visualisation (colour / absolute-magnitue diagram) of the raw catalogue]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-110502_1704727157][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-105445_95907042][Define the training samples]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-161110_18118893][Assemble training and reserve test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-152110_1282917873][Train up the Random Forrest]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210504-153521_1591875670][Check feature set for nulls]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-131823_1744793710][Classify the reserved test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201016-154755_24366630][Classification confusion matrix]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-163421_1811049882][Relative importance of the selected features]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-162249_1468741293][Apply the classification model and plot sample results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201124-100512_110153564][Histogram of classification probability]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-103046_1353183691][Sky distribution of good source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-163312_728555601][Sky distribution of bad source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647354647989_1984770159][Tidy up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210428-140519_1288739408][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210506-134212_1741520795][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "7. Good astrometric solutions via ML Random Forrest classifier",
+ > "id": "2GQDKZ59J",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > }
+
+ > 0:9:5
+
+
+
+# -----------------------------------------------------
+# List the public examples.
+#[root@ansibler]
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq '.'
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": [
+ > {
+ > "id": "2GRTQZFUM",
+ > "path": "/Public Examples/1. Start here"
+ > },
+ > ....
+ > ....
+ > ]
+ > }
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq '.body[] | select(.path | startswith("/Public"))'
+
+ > {
+ > "id": "2GRTQZFUM",
+ > "path": "/Public Examples/1. Start here"
+ > }
+ > {
+ > "id": "2GRA39HCN",
+ > "path": "/Public Examples/2. Data holdings"
+ > }
+ > {
+ > "id": "2GQ6WMH9W",
+ > "path": "/Public Examples/3. Source counts over the sky"
+ > }
+ > {
+ > "id": "2GSNYBDWB",
+ > "path": "/Public Examples/4. Mean proper motions over the sky"
+ > }
+ > {
+ > "id": "2H2YRJCKM",
+ > "path": "/Public Examples/5. Working with Gaia XP spectra"
+ > }
+ > {
+ > "id": "2GZME59KY",
+ > "path": "/Public Examples/6. Working with cross-matched surveys"
+ > }
+ > {
+ > "id": "2GQDKZ59J",
+ > "path": "/Public Examples/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > {
+ > "id": "2GVXKC266",
+ > "path": "/Public Examples/9. Tips and tricks"
+ > }
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq -r '.body[] | select(.path | startswith("/Public")) | .id'
+
+ > 2GRTQZFUM
+ > 2GRA39HCN
+ > 2GQ6WMH9W
+ > 2GSNYBDWB
+ > 2H2YRJCKM
+ > 2GZME59KY
+ > 2GQDKZ59J
+ > 2GVXKC266
+
diff --git a/notes/zrq/20220512-01-create-users.txt b/notes/zrq/20220512-01-create-users.txt
new file mode 100644
index 00000000..6f20b619
--- /dev/null
+++ b/notes/zrq/20220512-01-create-users.txt
@@ -0,0 +1,1534 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Prototyping create user functions.
+
+ Result:
+
+ Work in progress
+
+# -----------------------------------------------------
+
+
+ #
+ # Example YAML ..
+ #
+
+ - name: "albert"
+ uid: 2049,
+ uuid: "7ac3dc18-f53c-4076-88b2-bcf4171d5b76",
+ home: "/home/albert",
+ data: "/user/albert"
+ pass:
+ hash: "148518cb-1dc5-49df-a1b2-42cec8a3e547"
+
+
+ create-user
+
+ If any of them are empty, they are created.
+ Script output is the updated account.
+
+
+ get the next available uid
+ # https://www.commandlinefu.com/commands/view/5684/determine-next-available-uid
+ getent passwd | awk -F: '($3>600) && ($3<60000) && ($3>maxuid) { maxuid=$3; } END { print maxuid+1; }'
+
+
+ yq '.[] | split_doc ' \
+ /deployments/common/users/test-users.yml
+
+ > name: "Nelia"
+ > uid: 2050
+ > uuid: "5cf0cf95-157e-4a40-b95e-b163f22c2d92"
+ > home: "/home/Nelia"
+ > data: "/user/Nelia"
+ > pass: "SaiH2pad Uwaw2chu"
+ > ---
+ > name: "Ghoria"
+ > uid: 2051
+ > uuid: "237983b5-a21f-47c8-8fb3-80cbbc70ba56"
+ > home: "/home/Ghoria"
+ > data: "/user/Ghoria"
+ > pass: ""
+ > ---
+ > name: "Nalla"
+ > uid: 2052
+ > uuid: "65c7aeb1-3c2a-43b7-acc0-8c4497997c70"
+ > home: "/home/Nalla"
+ > data: "/user/Nalla"
+ > pass: ""
+ > ---
+ > name: "Wenia"
+ > uid: 2053
+
+ #
+ # Split input into bolcks ..
+ # https://raymii.org/s/tutorials/Bash_bits_split_a_file_in_blocks_and_do_something_with_each_block.html
+
+ OLDIFS=$IFS;
+ IFS="@"
+
+ for block in $(
+ yq '.[] | split_doc ' \
+ /deployments/common/users/test-users.yml \
+ | sed "s/^---/@/"
+ )
+ do
+ echo "${block}"
+ done
+ IFS=$OLDIFS
+
+ #
+ # Sed execute option
+ # https://unix.stackexchange.com/questions/48325/how-to-embed-a-shell-command-into-a-sed-expression
+
+ printblock()
+ {
+ local block=${1:?}
+ echo ""
+ echo "========"
+ echo ${block}
+ echo "========"
+ echo ""
+ }
+
+ export -f printblock
+
+ yq '.[] | split_doc ' \
+ /deployments/common/users/test-users.yml \
+ | sed -n '
+ 1 i ---
+ $ a ---
+ /---/,/---/ {
+ s/\(.*\)/printblock "\1"/e
+ }
+ '
+
+ yq '.[] | split_doc ' \
+ /deployments/common/users/test-users.yml \
+ | sed "
+ s/\"/'/g
+ " \
+ | sed -n '
+ $ a ---
+ /^---/! {
+ H
+ }
+ /^---/ {
+ g
+ s/\(.*\)/printblock "\1"/ep
+ }
+ '
+
+ #
+ # Try using yq to convert to json, and then parse that.
+
+
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+
+ > {"name":"Nelia","uid":2050,"uuid":"5cf0cf95-157e-4a40-b95e-b163f22c2d92","home":"/home/Nelia","data":"/user/Nelia","pass":"SaiH2pad Uwaw2chu"}
+ > {"name":"Ghoria","uid":2051,"uuid":"237983b5-a21f-47c8-8fb3-80cbbc70ba56","home":"/home/Ghoria","data":"/user/Ghoria","pass":""}
+ > {"name":"Nalla","uid":2052,"uuid":"65c7aeb1-3c2a-43b7-acc0-8c4497997c70","home":"/home/Nalla","data":"/user/Nalla","pass":""}
+ > {"name":"Wenia","uid":2053}
+ > {"name":"Ava"}
+
+
+ for userinfo in $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+ do
+ echo "========"
+ name=$(jq '.name' <<< "${userinfo}")
+ echo "name [${name}]"
+ echo "========"
+ done
+
+
+
+ for userinfo in $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+ do
+ echo "========"
+ name=$(jq --argjson foo "${userinfo}" '$foo | .name')
+ echo "name [${name}]"
+ echo "========"
+ done
+
+ #
+ # Passing JSON string into jq.
+ # https://stackoverflow.com/a/47106137
+
+ testjson='{"name":"Nelia","uid":2050,"uuid":"5cf0cf95-157e-4a40-b95e-b163f22c2d92","home":"/home/Nelia","data":"/user/Nelia","pass":"SaiH2pad Uwaw2chu"}'
+
+ jq -n --argjson data "$testjson" '$data.name'
+
+
+ for userinfo in $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+ do
+ echo "========"
+ echo "${userinfo}"
+ echo "========"
+ name=$(jq -n --argjson user "${userinfo}" '$user.name')
+ uuid=$(jq -n --argjson user "${userinfo}" '$user.uuid')
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "========"
+ done
+
+ > ========
+ > jq: invalid JSON text passed to --argjson
+ > Use jq --help for help with command-line options,
+ > or see the jq manpage, or online docs at https://stedolan.github.io/jq
+ > jq: invalid JSON text passed to --argjson
+ > Use jq --help for help with command-line options,
+ > or see the jq manpage, or online docs at https://stedolan.github.io/jq
+ > name []
+ > uuid []
+ > ========
+ > ========
+ > jq: invalid JSON text passed to --argjson
+ > Use jq --help for help with command-line options,
+ > or see the jq manpage, or online docs at https://stedolan.github.io/jq
+ > jq: invalid JSON text passed to --argjson
+ > Use jq --help for help with command-line options,
+ > or see the jq manpage, or online docs at https://stedolan.github.io/jq
+ > name []
+ > uuid []
+ > ========
+ > ========
+ > name ["Ghoria"]
+ > uuid ["237983b5-a21f-47c8-8fb3-80cbbc70ba56"]
+ > ========
+ > ========
+ > name ["Nalla"]
+ > uuid ["65c7aeb1-3c2a-43b7-acc0-8c4497997c70"]
+ > ========
+ > ========
+ > name ["Wenia"]
+ > uuid [null]
+ > ========
+ > ========
+ > name ["Ava"]
+ > uuid [null]
+ > ========
+
+ #
+ # Fails because the for next loop is breaking on spaces, not because jq isn't parsing the JSON.
+ # https://stackoverflow.com/a/47775590
+ #
+
+ for userinfo in $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+ do
+ echo "========"
+ echo "${userinfo}"
+ echo "========"
+ name=$(jq -n --argjson user "${userinfo}" '$user.name')
+ uuid=$(jq -n --argjson user "${userinfo}" '$user.uuid')
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "========"
+ done
+
+
+ #
+ # Set IFS to just newline
+ # https://askubuntu.com/a/344418
+
+ IFS=$'\n'
+ for userinfo in $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+ do
+ echo "========"
+ echo "${userinfo}"
+ echo "========"
+ done
+ unset IFS
+
+ #
+ # Use read instead
+ # https://askubuntu.com/a/1044537
+ while read -r userinfo
+ do
+ echo "========"
+ echo "${userinfo}"
+ echo "========"
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+
+ > ========
+ > {"name":"Nelia","uid":2050,"uuid":"5cf0cf95-157e-4a40-b95e-b163f22c2d92","home":"/home/Nelia","data":"/user/Nelia","pass":"SaiH2pad Uwaw2chu"}
+ > ========
+ > ========
+ > {"name":"Ghoria","uid":2051,"uuid":"237983b5-a21f-47c8-8fb3-80cbbc70ba56","home":"/home/Ghoria","data":"/user/Ghoria","pass":""}
+ > ========
+ > ========
+ > {"name":"Nalla","uid":2052,"uuid":"65c7aeb1-3c2a-43b7-acc0-8c4497997c70","home":"/home/Nalla","data":"/user/Nalla","pass":""}
+ > ========
+ > ========
+ > {"name":"Wenia","uid":2053}
+ > ========
+ > ========
+ > {"name":"Ava"}
+ > ========
+
+ #
+ # Putting it together ...
+
+ while read -r userinfo
+ do
+ echo "========"
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid')
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "========"
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > ========
+ > name ["Nelia"]
+ > uuid ["5cf0cf95-157e-4a40-b95e-b163f22c2d92"]
+ > ========
+ > ========
+ > name ["Ghoria"]
+ > uuid ["237983b5-a21f-47c8-8fb3-80cbbc70ba56"]
+ > ========
+ > ========
+ > name ["Nalla"]
+ > uuid ["65c7aeb1-3c2a-43b7-acc0-8c4497997c70"]
+ > ========
+ > ========
+ > name ["Wenia"]
+ > uuid [null]
+ > ========
+ > ========
+ > name ["Ava"]
+ > uuid [null]
+ > ========
+
+
+ #
+ # Returning empty value rather than 'null'
+ # https://github.com/stedolan/jq/issues/354#issuecomment-43147898
+
+ while read -r userinfo
+ do
+ echo "========"
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "========"
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > ========
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > ========
+ > ========
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > ========
+ > ========
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > ========
+ > ========
+ > name [Wenia]
+ > uuid []
+ > ========
+ > ========
+ > name [Ava]
+ > uuid []
+ > ========
+
+
+ #
+ # Adding a test for empty values.
+
+ while read -r userinfo
+ do
+ echo "========"
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "========"
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > ========
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > ========
+ > ========
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > ========
+ > ========
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > ========
+ > ========
+ > name [Wenia]
+ > uuid [new uuid]
+ > ========
+ > ========
+ > name [Ava]
+ > uuid [new uuid]
+ > ========
+
+ #
+ # Add a local secret function that users a YAML file for data.
+
+ secretfile=$(mktemp)
+
+ cat > "${secretfile}" << EOF
+users:
+ passhash:
+ "Nelia": "Hash of Nelia's password"
+ "Ghoria": "Hash of Ghoria's password"
+ "Nalla": "Hash of Nalla's password"
+EOF
+
+ getpasshash()
+ {
+ local key=${1:?}
+ yq '
+ .users.passhash.'${key}'
+ ' "${secretfile}"
+ }
+
+ getpasshash 'Nelia'
+
+ > Hash of Nelia's password
+
+ #
+ # Adding a password lookup.
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ pass=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.pass // empty')
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ -z "${pass}" ]
+ then
+ pass='new password'
+ elif [ "${pass}" == '' ]
+ then
+ pass=$(getpasshash "${name}")
+ fi
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > pass [Nelia's password]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > pass [Hash of Ghoria's password]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > pass [new password]
+ >
+ > name [Wenia]
+ > uuid [new uuid]
+ > pass [new password]
+ >
+ > name [Ava]
+ > uuid [new uuid]
+ > pass [new password]
+
+ #
+ # For real users we store their hashed passwords.
+ # So real-users.yml will never store passwords in plain text.
+ # For test users, our tests need to know the password to be able to use it.
+ # So test-users.yml will contain passwords in plain text.
+ #
+ # The decider is if the user is a test user or not ...
+ # If the password is blank, always check for a hash, if it is still emoty, generate a new one.
+ #
+ # What could possibly go wrong ...
+ # A badguy edits real-users.yml and sets the password.
+ # Blocking the actual user from accessing the site.
+ # Updating to the hacked value when the site is re-deployed.
+ # Very bad
+ #
+ # Don't use passwords from real-users.yml or test-users.yml.
+ # Always check for a hash.
+ # If there is no hash, generate a new password.
+ #
+ # Real users will always have a hash.
+ # Test users won't have a hash, so they get generated passwords.
+ #
+ # If we want, we can override the getpasshash function on a test deployment
+ # to use a separate database of hashes for test users.
+ #
+ # A plan .. :-)
+ #
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ pass=$(getpasshash "${name}")
+
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ -z "${pass}" ]
+ then
+ pass='new password'
+ fi
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > pass [Hash of Nelia's password]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > pass [Hash of Ghoria's password]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > pass [Hash of Nalla's password]
+ >
+ > name [Wenia]
+ > uuid [new uuid]
+ > pass [null]
+ >
+ > name [Ava]
+ > uuid [new uuid]
+ > pass [null]
+
+ #
+ # Fix getpasshash to return '-' if there is no password.
+ # https://mikefarah.gitbook.io/yq/operators/alternative-default-value
+
+ getpasshash()
+ {
+ local key=${1:?}
+ yq '
+ .users.passhash.'${key}' // "-"
+ ' "${secretfile}"
+ }
+
+ getpasshash 'Nelia'
+
+ > Hash of Nelia's password
+
+ getpasshash 'Frog'
+
+ > -
+
+
+ #
+ # Add a check for specific value, '-', to generate a new password.
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ pass=$(getpasshash "${name}")
+
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ "${pass}" == '-' ]
+ then
+ pass='new password'
+ fi
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > pass [Hash of Nelia's password]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > pass [Hash of Ghoria's password]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > pass [Hash of Nalla's password]
+ >
+ > name [Wenia]
+ > uuid [new uuid]
+ > pass [new password]
+ >
+ > name [Ava]
+ > uuid [new uuid]
+ > pass [new password]
+
+
+ #
+ # For real users, we only know the password hash.
+ # The password hash is used to populate the Shro database at deployment.
+ #
+ # For new users, we only know the password hash.
+ # The password hash is used to insert the new users into the Shro database.
+ #
+ # For test users, our tests need to know the actual password,
+ # but we stll send the hashed value to the server.
+ #
+ # So we do the hashing on the client, not on the server ?
+ #
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ hash=$(getpasshash "${name}")
+ pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ hash='new password hash'
+ pass='new password'
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+
+ #
+ # Looking good.
+ # Now we move the password hash function to long lived VM on the data project.
+ #
+
+ datahost=128.232.222.153
+ datauser=fedora
+
+ ssh "${datauser}@${datahost}"
+
+ sudo dnf install -y wget
+ sudo wget -O '/usr/bin/yq' 'https://github.com/mikefarah/yq/releases/download/v4.25.1/yq_linux_amd64'
+ sudo chmod a+x '/usr/bin/yq'
+
+ cat > '/home/fedora/passhashes' << EOF
+users:
+ passhash:
+ "Nelia": "Hash of Nelia's password"
+ "Ghoria": "Hash of Ghoria's password"
+ "Nalla": "Hash of Nalla's password"
+EOF
+
+ mkdir /home/fedora/bin
+
+ cat > /home/fedora/bin/getpasshash << 'EOF'
+#!/bin/sh
+key=${1:?}
+yq '
+ .users.passhash.'${key}' // "-"
+ ' '/home/fedora/passhashes'
+EOF
+
+ chmod u+x,g+x /home/fedora/bin/getpasshash
+
+ getpasshash 'Nelia'
+
+ > Hash of Nelia's password
+
+ getpasshash 'Frog'
+
+ > -
+
+ #
+ # Update our local passhash function to use the remote one.
+ #
+
+ getpasshash()
+ {
+ local key=${1:?}
+ datahost=128.232.222.153
+ datauser=fedora
+
+ ssh "${datauser}@${datahost}" \
+ "
+ getpasshash '${key:?}'
+ "
+ }
+
+ getpasshash 'Nelia'
+
+ > Hash of Nelia's password
+
+
+ getpasshash 'Frog'
+
+ > -
+
+ #
+ # Check the rest still works ...
+ #
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ hash=$(getpasshash "${name}")
+ pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ hash='new password hash'
+ pass='new password'
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > hash [Hash of Nelia's password]
+ > pass [-]
+
+ #
+ # ... and the list is truncated :-(
+ #
+ # I remember this from previous encounter - something to do with ssh eating all the available input.
+ #
+ # GoogleFoo:
+ # SSH eats stdin of while loop
+ # https://www.chengweiyang.cn/2014/09/25/SSH-eat-stdin-of-while-loop/
+ #
+ # Just add -n
+ #
+
+ getpasshash()
+ {
+ local key=${1:?}
+ datahost=128.232.222.153
+ datauser=fedora
+
+ ssh -n "${datauser}@${datahost}" \
+ "
+ getpasshash '${key:?}'
+ "
+ }
+
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ hash=$(getpasshash "${name}")
+ pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid='new uuid'
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ hash='new password hash'
+ pass='new password'
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > hash [Hash of Nelia's password]
+ > pass [-]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > hash [Hash of Ghoria's password]
+ > pass [-]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > hash [Hash of Nalla's password]
+ > pass [-]
+ >
+ > name [Wenia]
+ > uuid [new uuid]
+ > hash [new password hash]
+ > pass [new password]
+ >
+ > name [Ava]
+ > uuid [new uuid]
+ > hash [new password hash]
+ > pass [new password]
+
+
+ #
+ # OK, so let's add the password hasher to the mix ..
+ #
+
+ dnf install -y java-latest-openjdk-headless
+
+ mkdir "${HOME}/lib"
+ pushd "${HOME}/lib"
+
+ wget https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
+ ln -s shiro-tools-hasher-1.9.0-cli.jar shiro-tools-hasher.jar
+
+ popd
+
+ testpass='bee6Aud7 aCu9uo5g'
+ java -jar "${HOME}/lib/shiro-tools-hasher.jar" -i 500000 -f shiro1 -a SHA-256 -gss 128 '${testpass:?}'
+
+ > $shiro1$SHA-256$500000$HNgc433MMf+w01YqmW20yA==$FiUVKsjTBZDywvVE19vq3ZaEOzA/NEv8WAkckwvLiCg=
+
+ #
+ # Wrap it as a function
+ #
+
+ newpasshash()
+ {
+ local password="${1:?}"
+ java -jar "${HOME}/lib/shiro-tools-hasher.jar" -i 500000 -f shiro1 -a SHA-256 -gss 128 '${password:?}'
+ }
+
+ newpasshash "${testpass}"
+
+ > $shiro1$SHA-256$500000$g3uc+AQWTWHeqdJTh7R/wQ==$M7/t5eBHiD9RiAw1FFQ52AdJwxN/WPSADnUQZSxt+J4=
+
+ #
+ # Putting it all together ..
+ #
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ hash=$(getpasshash "${name}")
+ pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid=$(
+ uuidgen
+ )
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ /deployments/common/users/test-users.yml
+ )
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > hash [Hash of Nelia's password]
+ > pass [-]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > hash [Hash of Ghoria's password]
+ > pass [-]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > hash [Hash of Nalla's password]
+ > pass [-]
+ >
+ > name [Wenia]
+ > uuid [73a222f6-fea9-4563-a841-b016a56fac91]
+ > hash [$shiro1$SHA-256$500000$jnPMssW5kkCFXR57DATEOA==$pZXs45oe1z9fsKZEi9LqrOzM6Lr/DWxb0sQ3CupdQLI=]
+ > pass [ooPheem5noodaeBepoofiereejoo7o]
+ >
+ > name [Ava]
+ > uuid [3b6c9340-8d98-4c82-a434-14846e53f1fc]
+ > hash [$shiro1$SHA-256$500000$TYIhwOV6fY6hk9f+L2GFaA==$EhryHokWxdPT9C83vFjA2CFM8G1oNlsPDSk+Nc6K8KA=]
+ > pass [zuf1peish3ohL9eingielio2aezu3k]
+
+ #
+ # Make the source configurable ...
+ #
+
+ source=/deployments/common/users/test-users.yml
+
+ yq -I 0 -o json '.[]' \
+ "${source}"
+
+ > {"name":"Nelia","uid":2050,"uuid":"5cf0cf95-157e-4a40-b95e-b163f22c2d92","home":"/home/Nelia","data":"/user/Nelia","pass":"Nelia's password"}
+ > {"name":"Ghoria","uid":2051,"uuid":"237983b5-a21f-47c8-8fb3-80cbbc70ba56","home":"/home/Ghoria","data":"/user/Ghoria","pass":""}
+ > {"name":"Nalla","uid":2052,"uuid":"65c7aeb1-3c2a-43b7-acc0-8c4497997c70","home":"/home/Nalla","data":"/user/Nalla","pass":""}
+ > {"name":"Wenia","uid":2053}
+ > {"name":"Ava"}
+
+ testusers=$(mktemp)
+ cat > "${testusers}" << EOF
+- name: "Lacey"
+ uuid: "dfbda6b5-a70b-4954-8543-c5fded6666f7"
+ uid: 2050
+- name: "Yasmin"
+ uuid: "3c252b4c-6ba8-4592-835b-3352b0a6bab6"
+ uid: 2051
+- name: "Eleanor"
+- name: "Niamh"
+EOF
+
+ yq -I 0 -o json '.[]' \
+ "${testusers}"
+
+ > {"name":"Lacey","uuid":"dfbda6b5-a70b-4954-8543-c5fded6666f7","uid":2050}
+ > {"name":"Yasmin","uuid":"3c252b4c-6ba8-4592-835b-3352b0a6bab6","uid":2051}
+ > {"name":"Eleanor"}
+ > {"name":"Niamh"}
+
+
+ while read -r userinfo
+ do
+ name=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')
+ uuid=$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')
+ hash=$(getpasshash "${name}")
+ pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid=$(
+ uuidgen
+ )
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ "${testusers}"
+ )
+
+ > name [Lacey]
+ > uuid [dfbda6b5-a70b-4954-8543-c5fded6666f7]
+ > hash [$shiro1$SHA-256$500000$evAezndmljL1CCzk5I8jEw==$l7qwYH96PUHENcK4rsli1ReybBmehbEiK+yELsKcob0=]
+ > pass [Eel9Ang7xie4loohoov3weseecei7u]
+ >
+ > name [Yasmin]
+ > uuid [3c252b4c-6ba8-4592-835b-3352b0a6bab6]
+ > hash [$shiro1$SHA-256$500000$mutGJcwv+F4zBCUaPRb4dA==$RJTg4aYa5qWV1X4L+HOKXDl3WkaO9nY1Bo7AwpvSp74=]
+ > pass [eibohvaochohCha8Cheup8Fae7AW5i]
+ >
+ > name [Eleanor]
+ > uuid [a25fc878-4509-4db2-995b-3c0d5badf236]
+ > hash [$shiro1$SHA-256$500000$oFsyldNu+z3kHXsQRC9+xg==$DPRzhaDvHeN0Wd5IGnvUtn1dGgHNEvsi+xDzLpuuzwU=]
+ > pass [cee3eoth8tahHazahNee8eip2ohsh7]
+ >
+ > name [Niamh]
+ > uuid [377c4a8b-d022-43f2-b453-a6adcab17027]
+ > hash [$shiro1$SHA-256$500000$hVa791lNSpfiuM8G5KBDQg==$RYfc+w5c89htDo8r8cL9mScjCnd4D2RjBmejHT1qN4I=]
+ > pass [aophootheexaevahcooXaexaighi9X]
+
+ #
+ # Where do we want to load our users from ..
+ #
+ # Source control
+ # /deployments/common/users/live-users.yml
+ # /deployments/common/users/test-users.yml
+ #
+ # Test scripts
+ # A list of random names ...
+ # A file of random names ...
+ #
+ #
+
+ #
+ # Create our 'createuser' function.
+ #
+
+ createuser()
+ {
+ local name="${1:?}"
+ local uuid="${2}"
+ local hash="$(getpasshash "${name}")"
+ local pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid=$(
+ uuidgen
+ )
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+
+ }
+
+ while read -r userinfo
+ do
+ createuser \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')"
+
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ "${testusers}"
+ )
+
+ > name [Lacey]
+ > uuid [dfbda6b5-a70b-4954-8543-c5fded6666f7]
+ > hash [$shiro1$SHA-256$500000$jGSGmO5tnVE8mxvuXAem+g==$iXd88gkuRNHUWiLD6KI5Mx1GgeO84+RMMKzxSuylc1Y=]
+ > pass [shaeF0dahvei8eequee6thaez9xei7]
+ >
+ > name [Yasmin]
+ > uuid [3c252b4c-6ba8-4592-835b-3352b0a6bab6]
+ > hash [$shiro1$SHA-256$500000$8fiCszaNI4OWO50Lk7RkvA==$p1Sa/s7vPwvwALc0e6AsRvzyYmYiffTyJSXQLYFFj3Q=]
+ > pass [yie8oochaiquahvu7eog8uavahHaij]
+ >
+ > name [Eleanor]
+ > uuid [2feaa3c0-3257-4f30-ad3c-cc1f5666e3ef]
+ > hash [$shiro1$SHA-256$500000$xPVltPje6O1EoUEyHq1QRA==$CZQKl/xv8aVQ8DHko86u3SXBys7eLqzBAweHff0f4DU=]
+ > pass [cee7shauHie0UP0Cooshauchiekaze]
+ >
+ > name [Niamh]
+ > uuid [fcd8b61f-d128-4b8e-b363-575b95c9822c]
+ > hash [$shiro1$SHA-256$500000$/HdVwSFStS4PYDrS8ireUg==$TxqHGcwCll+556ehlfBzUIjm07kynfU+8hJTAbTOg8M=]
+ > pass [LuiX1aiBieboo3ua7koh1yee4Dee8l]
+
+ #
+ # Create a yaml file function.
+ #
+
+ yamlusers()
+ {
+ local yamlfile=${1:?}
+ local userinfo
+
+ while read -r userinfo
+ do
+ createuser \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')"
+
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ "${yamlfile}"
+ )
+ }
+
+ yamlusers "${testusers}"
+
+ > name [Lacey]
+ > uuid [dfbda6b5-a70b-4954-8543-c5fded6666f7]
+ > hash [$shiro1$SHA-256$500000$5r0NyZnozuf2fpGcURXJ0Q==$821AgrlkXJNyRVZrGMtzUXHC61w9EBdK4Okur7Tyi2c=]
+ > pass [maecaid7Ui6Fohphaiy5foo1gooyoh]
+ >
+ > name [Yasmin]
+ > uuid [3c252b4c-6ba8-4592-835b-3352b0a6bab6]
+ > hash [$shiro1$SHA-256$500000$AXvLifJi1QCeBDYu1kqdKQ==$p+ynZQaai1JN3nGiZMm+fmrpWA2yuTiSMx+KRet7gIk=]
+ > pass [ohsae1ahzei8Ohkier1iexei0biel6]
+ >
+ > name [Eleanor]
+ > uuid [4d52a26c-c8f5-47df-872a-0b6c4e5ed396]
+ > hash [$shiro1$SHA-256$500000$Z23fsAVrBrci4Wea3cuYZg==$czMLgKU5gf50zsIWGtSXLHFVWRDM0iGRGydodp0NYss=]
+ > pass [uiGau0axeec0tofahch8peedisheuf]
+ >
+ > name [Niamh]
+ > uuid [e9196375-3df9-4a38-88b4-10997bb1841e]
+ > hash [$shiro1$SHA-256$500000$QDeRKUorY85tgfBeSpsi6Q==$+fFAVaG5QpUX9S7IbyuLqyJOAUON3DgZrAnRejwp74g=]
+ > pass [ieXooh1ceid5Eethae4aeK8phaifei]
+
+
+ yamlusers '/deployments/common/users/test-users.yml'
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > hash [Hash of Nelia's password]
+ > pass [-]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > hash [Hash of Ghoria's password]
+ > pass [-]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > hash [Hash of Nalla's password]
+ > pass [-]
+ >
+ > name [Wenia]
+ > uuid [63dfa19a-238b-45a8-82c9-8ee694547ccb]
+ > hash [$shiro1$SHA-256$500000$SF/lxvpwTVaGVrjNFV2Baw==$Dw5wDWiQpvvTbuFsivqG7qFjstMm0iY21Zr8RzhGmfs=]
+ > pass [xo1ahneirashae6ooXi8aechuo1eeK]
+ >
+ > name [Ava]
+ > uuid [7699e3e5-797d-45be-85ee-476f785a1ab2]
+ > hash [$shiro1$SHA-256$500000$xYNzyGAL6W0/wPt5+Omtiw==$4kloJqbqlSYda5FBytlvtj0Zf60JER1pQ/M8JhP/bTI=]
+ > pass [Oosu1chizoo4kov6maigat4it8ohp2]
+
+ #
+ # But we can invoke the same function from an array of names.
+ #
+
+ testers=(
+ "Chavezlafia"
+ "Phillipsmog"
+ "Pela"
+ "Ellisiri Gonzatt"
+ "Crawfordobby Owenorty"
+ "Stephenmony Kendmitchell"
+ "Griheart"
+ "Clarpuff"
+ "Kokakhan"
+ )
+
+ for name in "${testers[@]}"
+ do
+ echo "Name [${name}]"
+ done
+
+ > Name [Chavezlafia]
+ > Name [Phillipsmog]
+ > Name [Pela]
+ > Name [Ellisiri Gonzatt]
+ > Name [Crawfordobby Owenorty]
+ > Name [Stephenmony Kendmitchell]
+ > Name [Griheart]
+ > Name [Clarpuff]
+ > Name [Kokakhan]
+
+
+ for name in "${testers[@]}"
+ do
+ createuser "${name}"
+ done
+
+
+ > name [Chavezlafia]
+ > uuid [bbc176ea-d2f8-47b9-9392-a4fe7ef68e61]
+ > hash [$shiro1$SHA-256$500000$oISkyqfDSsj5YTPxjdelxg==$/G/xpseqzC+gmqKzbKISuZcSG1eA4gIC6gnPPrtCAqQ=]
+ > pass [fiet5fie5keipi4saiHaTio4ein9Qu]
+ >
+ > name [Phillipsmog]
+ > uuid [2c2ff2ce-bfea-4cc4-a3c4-5f7ae20d4efe]
+ > hash [$shiro1$SHA-256$500000$HBOIp7qs2ToqCOQSp9yzkw==$omyg14smCwOgTdH63bFSsQoInZZUEH8lgFZtPXINPKo=]
+ > pass [waipongo1eir7johD4baiw6oobohNe]
+ >
+ > name [Pela]
+ > uuid [27378fd5-ca87-44ce-b2c4-49ec5f522efe]
+ > hash [$shiro1$SHA-256$500000$XsqO8mb1oU8NBDgANEPeSA==$6Hb0qnD7w69ZTJRPPZdZlNbpo8Ze8+CfynRzJdP0WpQ=]
+ > pass [eeTo0loo9lieXooc7phai7fuPaitoh]
+ >
+ > Error: open Gonzatt // "-"
+ > : no such file or directory
+ > name [Ellisiri Gonzatt]
+ > uuid [18ad058b-c52f-46c6-8293-ebb973a0acc3]
+ > hash []
+ > pass [-]
+ >
+ > Error: open Owenorty // "-"
+ > : no such file or directory
+ > name [Crawfordobby Owenorty]
+ > uuid [45215cfe-c595-4b44-aa06-4c468a953324]
+ > hash []
+ > pass [-]
+ >
+ > Error: open Kendmitchell // "-"
+ > : no such file or directory
+ > name [Stephenmony Kendmitchell]
+ > uuid [51fa0e12-d794-4f7b-a2a8-84fabb5491ce]
+ > hash []
+ > pass [-]
+ >
+ > name [Griheart]
+ > uuid [9427a140-e203-468e-a611-64aecf7134a3]
+ > hash [$shiro1$SHA-256$500000$jh74YGRrv1lIq4Yhy5vBWQ==$Ji5Mb7RXnoizO/gLNVsqrINH9pPu4QqzlzyWvX8xPvI=]
+ > pass [shei1aek2Aeb2ju8ruusheedua4pha]
+ >
+ > name [Clarpuff]
+ > uuid [ee1b1b46-11be-49ec-b4dc-53e3b198c5dc]
+ > hash [$shiro1$SHA-256$500000$a6v+vw15grLQ5ZQ2yaITqg==$hJIZSeBgN64QYADuKLQPt1LTUzhWs48yze44J/d7ta8=]
+ > pass [AhBieng6aa3oon8eiwoo4ohchimoo8]
+ >
+ > name [Kokakhan]
+ > uuid [b2639fab-604b-4a78-ac43-1d703ace415d]
+ > hash [$shiro1$SHA-256$500000$iaSkwzmp8cOpUR9KVNKioA==$296fhhReoI/dyqVEV/qMveVYagYLlrr6VhG5js7OgKI=]
+ > pass [ooxen1vaL6ahraesei6quiemue3ahs]
+
+ #
+ # Fails with spaces in the name.
+ #
+
+ newpasshash()
+ {
+ local password="${1:?}"
+ java -jar "${HOME}/lib/shiro-tools-hasher.jar" -i 500000 -f shiro1 -a SHA-256 -gss 128 '${password:?}'
+ }
+
+ newpasshash "a b"
+
+
+ createuser()
+ {
+ local name="${1:?}"
+ local uuid="${2}"
+ local hash="$(getpasshash \"${name}\")"
+ local pass='-'
+
+ if [ -z "${uuid}" ]
+ then
+ uuid=$(
+ uuidgen
+ )
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi
+
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+
+ }
+
+ createuser "a b"
+
+ > name [a b]
+ > uuid [5b88a158-48f6-49d1-877c-bc4021ec4f4f]
+ > hash [$shiro1$SHA-256$500000$AnPFsrWaof154sjVPJJx6Q==$wd+7bzYPooyqH2mqU48iGsBSvP3zOi/ye6LzLY37xtg=]
+ > pass [aexaiboosov9bieraingij3vi4Ma7t]
+
+ #
+ # Try again ..
+ #
+
+ for name in "${testers[@]}"
+ do
+ createuser "${name}"
+ done
+
+ > name [Chavezlafia]
+ > uuid [36010b0c-da6a-4379-b7fa-ae476c61d87e]
+ > hash [$shiro1$SHA-256$500000$jcr700igmzYat6TvsPzxow==$GaBTCEHnl0z3+wjd4x2Xzj04B7CJv4ISdXESa3EjjWs=]
+ > pass [aigahCixai7ahx5Choo0phuYutaeri]
+ >
+ > name [Phillipsmog]
+ > uuid [ce4ef604-decf-4804-894e-801fc822910d]
+ > hash [$shiro1$SHA-256$500000$u7sStQ+6oNd2TmmEKJgjGg==$jnHdwUSrxdDs+MDi/0SZV9fo13svKsr9jvUOF+fvzvw=]
+ > pass [ierae0Aod0aevojeeK0unah5Veacae]
+ >
+ > name [Pela]
+ > uuid [621d4e8d-4ca9-4b4d-bd42-64de5b689330]
+ > hash [$shiro1$SHA-256$500000$Cy2a4Jmn10ZqPQLKIjJ2Kg==$IxqtqQ7ldskDe0RkJiy+Uvht32Mho7msMUP6QLXs7zI=]
+ > pass [eesh2cahseer3bouX7oeSoo2lienga]
+ >
+ > name [Ellisiri Gonzatt]
+ > uuid [60cafd79-0615-4081-9ecd-26b274a2b26c]
+ > hash [$shiro1$SHA-256$500000$CK7Iyj1akGVqQndlMmE04A==$p4Iq4YR4rd9UWK9D4u1/v90NFZ7eBtDM3e8oEJWlDd4=]
+ > pass [JaeLochu8Xoh4jaex9aechee4ceey4]
+ >
+ > name [Crawfordobby Owenorty]
+ > uuid [d65ed1ce-7ab9-48ca-a4aa-ce2f738d2e82]
+ > hash [$shiro1$SHA-256$500000$vdeZcavn137whKH3ORWIqw==$YALswGMKwAL7m3nWXFCK16z9LKYVFHoLA64J78Efzyw=]
+ > pass [kou4eet0ooHagh8Quo7phush0reiNo]
+ >
+ > name [Stephenmony Kendmitchell]
+ > uuid [7d02d6fe-b0b2-4c0e-8828-abcfab5ff293]
+ > hash [$shiro1$SHA-256$500000$T7zjrh4sg6dS2AkajarJRg==$L7qalZAEmKJxwjAPUXU4+chAs43CkLUFHJZFmNSIhIo=]
+ > pass [euDai8Eedaibooqu9ahghouRei8ahx]
+ >
+ > name [Griheart]
+ > uuid [af6784c4-4725-448e-9adb-c8bb10a037e6]
+ > hash [$shiro1$SHA-256$500000$jHZLngdXoSE2gRkR8SX9EQ==$/Qx604/l4i6zuCmbWYGBrJsom6sL5ELueN6JN6jb5uI=]
+ > pass [naiNgiedeiyai9shahk2ooTheishuC]
+ >
+ > name [Clarpuff]
+ > uuid [d9c26616-42e5-49f1-8c32-628449cab165]
+ > hash [$shiro1$SHA-256$500000$JlzqrkLQy8uNe+VYFU9QDg==$Z5p4gkF3rg/N9ISQ7ZuUAgyGx3c5g9buSSsKV6tjVG0=]
+ > pass [ahsiebauqu1HooGei7theghuu5Aega]
+ >
+ > name [Kokakhan]
+ > uuid [9bcae2ff-6dee-463a-990f-ffc43fa5f8d5]
+ > hash [$shiro1$SHA-256$500000$b9TEDi+5j94xeUs2XdwTYA==$NEzaK6zDDvO6sQZy+wValgBxroLwxx0kzn9O2wbZc9A=]
+ > pass [ep0iShaeCa4phoh2coow4xoo6ahdoh]
+
+---------------------------------------------------------------------------------------------------
+
+ getpasshash()
+ {
+ local key="${1:?}"
+ local datahost='128.232.222.153'
+ local datauser='fedora'
+ ssh -n "${datauser}@${datahost}" \
+ "
+ getpasshash '${key:?}'
+ "
+ }
+
+ newpasshash()
+ {
+ local password="${1:?}"
+ java \
+ -jar "${HOME}/lib/shiro-tools-hasher.jar" \
+ -i 500000 \
+ -f shiro1 \
+ -a SHA-256 \
+ -gss 128 \
+ '${password:?}'
+ }
+
+ createuserinner()
+ {
+ local name="${1:?}"
+ local uuid="${2:?}"
+ local hash="${3:?}"
+ local pass="${4:?}"
+ echo "name [${name}]"
+ echo "uuid [${uuid}]"
+ echo "hash [${hash}]"
+ echo "pass [${pass}]"
+ echo ""
+ }
+
+ createuser()
+ {
+ local name="${1:?}"
+ local uuid="${2}"
+ local hash="$(getpasshash \"${name}\")";
+ local pass='-'
+ if [ -z "${uuid}" ]
+ then
+ uuid=$(
+ uuidgen
+ )
+ fi
+ if [ "${hash}" == '-' ]
+ then
+ pass=$(
+ pwgen 30 1
+ )
+ hash=$(
+ newpasshash "${pass}"
+ )
+ fi;
+ createuserinner \
+ "${name}" \
+ "${uuid}" \
+ "${hash}" \
+ "${pass}"
+ }
+
+ yamlusers()
+ {
+ local yamlfile=${1:?}
+ local userinfo
+ while read -r userinfo
+ do
+ createuser \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.name // empty')" \
+ "$(jq --raw-output --null-input --argjson user "${userinfo}" '$user.uuid // empty')"
+ done <<< $(
+ yq -I 0 -o json '.[]' \
+ "${yamlfile}"
+ )
+ }
+
+
+ testers=(
+ "Chavezlafia"
+ "Phillipsmog"
+ "Pela"
+ "Ellisiri Gonzatt"
+ "Crawfordobby Owenorty"
+ "Stephenmony Kendmitchell"
+ "Griheart"
+ "Clarpuff"
+ "Kokakhan"
+ )
+
+ for name in "${testers[@]}"
+ do
+ createuser "${name}"
+ done
+
+ > name [Chavezlafia]
+ > uuid [747049ef-e5a8-4943-9164-83db919cdcf8]
+ > hash [$shiro1$SHA-256$500000$oICvJoRA7fwgXU9k4NNrOQ==$lY4/XS72sDui++ff9+H2FTm2w+MQNGbTuBsZp/GywMQ=]
+ > pass [IekahMechaeBacuG1oom7akoojiete]
+ > ....
+ > ....
+ > name [Kokakhan]
+ > uuid [1a83cf56-559a-41f1-a203-874a20a73e7c]
+ > hash [$shiro1$SHA-256$500000$njYjgBJeVIWuCSr3uUN0VA==$6WHnYOYhPppiJOORQd8RXgO2ghyoaa1lw2Ozuf//UCE=]
+ > pass [ooch6Goh7jahpeiPouju3jophue8du]
+
+
+ yamlusers '/deployments/common/users/test-users.yml'
+
+ > name [Nelia]
+ > uuid [5cf0cf95-157e-4a40-b95e-b163f22c2d92]
+ > hash [Hash of Nelia's password]
+ > pass [-]
+ >
+ > name [Ghoria]
+ > uuid [237983b5-a21f-47c8-8fb3-80cbbc70ba56]
+ > hash [Hash of Ghoria's password]
+ > pass [-]
+ >
+ > name [Nalla]
+ > uuid [65c7aeb1-3c2a-43b7-acc0-8c4497997c70]
+ > hash [Hash of Nalla's password]
+ > pass [-]
+ >
+ > name [Wenia]
+ > uuid [255cc451-c4eb-44ed-a519-72ae1271d932]
+ > hash [$shiro1$SHA-256$500000$3J5bWE3+67gXx8s7v+v8Gw==$pE64F4wMG/mqDQ4NhlTqCfkqJMTYru8jKCpVvjs/cg8=]
+ > pass [theeR2eiv9kah1coongaiVait6Phee]
+ >
+ > name [Ava]
+ > uuid [e89f2d5a-5965-4484-b1ac-27ab3266bf42]
+ > hash [$shiro1$SHA-256$500000$evpTSNLmJKqQIkQQptRF+w==$t9YR1WqcvxBdm9VA8ScAWfKd5cAAh/uVp3Zuw3YEUqk=]
+ > pass [aenga4eeB6coongiphiph3weiChahl]
+
diff --git a/notes/zrq/20220512-02-create-users.txt b/notes/zrq/20220512-02-create-users.txt
new file mode 100644
index 00000000..7b3ca3ed
--- /dev/null
+++ b/notes/zrq/20220512-02-create-users.txt
@@ -0,0 +1,141 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Prototyping create user functions.
+
+ Result:
+
+ Work in progress
+
+# -----------------------------------------------------
+
+ local functions
+
+ createuser
+
+ createshirouser-local
+ createshirouser-zeppelin
+ done by stv
+ name, hash
+
+ createlinuxuser-local
+ createlinuxuser-zeppelin
+ done by stv
+ name, uid, gid, home
+
+ createusershare-local
+ TODO
+
+ openstack
+ TODO
+
+ cloudname
+ sharename
+ sharesize
+ uid,gid
+
+ mountusershare-local
+ TODO
+
+ cloudname
+ sharename
+ mountpath
+
+ cephfs-mount.sh
+ done
+ cephfs-mount.yaml
+ done
+ TODO refactor this as just shell script, no Ansible
+
+ mntpath
+ mntmode
+ mnthost default('zeppelin:masters:workers')
+
+ cephuser
+ cephkey
+ cephpath
+ cephnodes
+
+# -----------------------------------------------------
+
+ existing function
+
+ create-user-shares.sh
+
+ read list
+ common/manila/usershares.yaml
+
+ - id: "nch"
+ cloudname: "iris-gaia-data"
+ sharename: "aglais-user-nch"
+ mountpath: "/user/nch"
+
+ - id: "zrq"
+ cloudname: "iris-gaia-data"
+ sharename: "aglais-user-zrq"
+ mountpath: "/user/zrq"
+
+ foreach
+ cephfs-mount.sh
+ cephfs-mount.yaml
+
+
+ shares created manually
+ access permissions already set
+
+ new function just adds an element to this list
+ using yq to edit in place, or just append ?
+
+# -----------------------------------------------------
+
+ access control
+
+ zeppelin node - impersonation means notebooks run as uid,gid
+ worker node - all jobs run as 'hadoop' user
+
+ if zeppelin runs as 'uid:user'
+ if hadoop runs as 'hadoop:hadoop'
+
+ shares are owned by 'uid:hadoop'
+ shares allow rwx access to o and g
+
+ zeppelin notebooks can access because uid is wrx
+ hadoop workers can access because gid is wrx
+
+
+# -----------------------------------------------------
+
+ space
+
+ mount /home on zeppelin as an openstack volume
+ means we can disconnect it if we need to
+ means we can re-use it if we need to
+
+
diff --git a/notes/zrq/20220513-01-git-rebase.txt b/notes/zrq/20220513-01-git-rebase.txt
new file mode 100644
index 00000000..4b7b1d23
--- /dev/null
+++ b/notes/zrq/20220513-01-git-rebase.txt
@@ -0,0 +1,556 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Rebase and merge before we add more.
+ Resolve conflicts with upstreamn changes.
+
+ Result:
+
+ Work in progress.
+ The rebase worked, but ended up re-comitting a lot of Stelios's commits.
+ See how this works when we merge into master.
+ Probably easier to create a new branch based on main and use meld to combine the changes.
+
+
+# -----------------------------------------------------
+# Make sure everything is comitted.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git status
+
+ > On branch 20220505-zrq-user-accounts
+ > Your branch is ahead of 'origin/20220505-zrq-user-accounts' by 1 commit.
+ > (use "git push" to publish your local commits)
+
+ git push
+
+ > ....
+ > ....
+ > To github.com:Zarquan/aglais.git
+ > 40aba3a..86968a0 20220505-zrq-user-accounts -> 20220505-zrq-user-accounts
+
+ popd
+
+
+# -----------------------------------------------------
+# Fetch upstream changes and merge into master.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git checkout master
+
+ > Switched to branch 'master'
+ > Your branch is up to date with 'origin/master'.
+
+
+ git fetch upstream
+
+ > remote: Enumerating objects: 60, done.
+ > remote: Counting objects: 100% (60/60), done.
+ > remote: Compressing objects: 100% (60/60), done.
+ > remote: Total 60 (delta 29), reused 29 (delta 0), pack-reused 0
+ > Unpacking objects: 100% (60/60), 23.56 KiB | 492.00 KiB/s, done.
+ > From github.com:wfau/aglais
+ > b7c0b98..57ed7fb master -> upstream/master
+
+
+ git merge upstream/master
+
+ > Updating b7c0b98..57ed7fb
+ > Fast-forward
+ > deployments/hadoop-yarn/ansible/27-install-zeppelin.yml | 3 +
+ > deployments/hadoop-yarn/ansible/38-install-user-db.yml | 30 ++++++-
+ > deployments/hadoop-yarn/ansible/39-create-user-scripts.yml | 218 +++++++++++++++++++++++++++++++++++++++++++++
+ > deployments/hadoop-yarn/ansible/config/zeppelin.yml | 2 +-
+ > deployments/hadoop-yarn/bin/create-users.sh | 11 +++
+ > notes/stv/20220503-test-user-create.sql | 276 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ > notes/stv/20220511-create-user-test.txt | 158 +++++++++++++++++++++++++++++++++
+ > 7 files changed, 694 insertions(+), 4 deletions(-)
+ > create mode 100644 deployments/hadoop-yarn/ansible/39-create-user-scripts.yml
+ > create mode 100644 notes/stv/20220503-test-user-create.sql
+ > create mode 100644 notes/stv/20220511-create-user-test.txt
+
+
+ git status
+
+ > On branch master
+ > Your branch is ahead of 'origin/master' by 11 commits.
+ > (use "git push" to publish your local commits)
+
+
+ git push
+
+ > Total 0 (delta 0), reused 0 (delta 0), pack-reused 0
+ > To github.com:Zarquan/aglais.git
+ > b7c0b98..57ed7fb master -> master
+
+ popd
+
+
+# -----------------------------------------------------
+# Rebase our wotking branch to merge upstream changes from master.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git branch
+
+ > 20220505-zrq-user-accounts
+ > * master
+
+
+ git checkout '20220505-zrq-user-accounts'
+
+ > Switched to branch '20220505-zrq-user-accounts'
+ > Your branch is up to date with 'origin/20220505-zrq-user-accounts'.
+
+
+ git rebase master
+
+ > Auto-merging deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > error: could not apply 49f717c... Adding support for user accounts
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply 49f717c... Adding support for user accounts
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 57ed7fb
+ > Last commands done (6 commands done):
+ > pick 0636f60 ....
+ > pick 49f717c Adding support for user accounts
+ > (see more in file .git/rebase-merge/done)
+ > Next commands to do (18 remaining commands):
+ > pick 8e9855f Backup to desktop - just in case
+ > pick 8d87a72 Adding /opt/aglais to Zeppelin node
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '57ed7fb'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > new file: deployments/aglais/bin/aglais-test.sh
+ > new file: deployments/hadoop-yarn/ansible/10-install-aglais.yml
+ > modified: deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > new file: deployments/hadoop-yarn/ansible/config/aglais.yml
+ > modified: notes/zrq/20220505-02-user-accounts.txt
+ > new file: notes/zrq/20220510-01-user-accounts.txt
+
+
+ git rebase --continue
+
+ > [detached HEAD a5a6b79] Adding support for user accounts
+ > 6 files changed, 200 insertions(+), 13 deletions(-)
+ > create mode 100644 deployments/aglais/bin/aglais-test.sh
+ > create mode 100644 deployments/hadoop-yarn/ansible/10-install-aglais.yml
+ > create mode 100644 deployments/hadoop-yarn/ansible/config/aglais.yml
+ > create mode 100644 notes/zrq/20220510-01-user-accounts.txt
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > error: could not apply 9a5e251... Replace MySQL with MariaDB
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply 9a5e251... Replace MySQL with MariaDB
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 57ed7fb
+ > Last commands done (16 commands done):
+ > pick 55ce7a2 Explicitly create bin and lib directories
+ > pick 9a5e251 Replace MySQL with MariaDB
+ > (see more in file .git/rebase-merge/done)
+ > Next commands to do (8 remaining commands):
+ > pick f93d63b Notes and fixes
+ > pick 60cfe23 List the public examples
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '57ed7fb'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > modified: deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > modified: deployments/hadoop-yarn/bin/create-auth-database.sh
+
+
+ git rebase --continue
+
+ > [detached HEAD 2c164d2] Replace MySQL with MariaDB
+ > 2 files changed, 47 insertions(+), 61 deletions(-)
+ > Successfully rebased and updated refs/heads/20220505-zrq-user-accounts.
+
+
+ git status
+
+ > On branch 20220505-zrq-user-accounts
+ > Your branch and 'origin/20220505-zrq-user-accounts' have diverged,
+ > and have 35 and 24 different commits each, respectively.
+ > (use "git pull" to merge the remote branch into yours)
+
+
+ git pull
+
+ > hint: You have divergent branches and need to specify how to reconcile them.
+ > hint: You can do so by running one of the following commands sometime before
+ > hint: your next pull:
+ > hint:
+ > hint: git config pull.rebase false # merge
+ > hint: git config pull.rebase true # rebase
+ > hint: git config pull.ff only # fast-forward only
+ > hint:
+ > hint: You can replace "git config" with "git config --global" to set a default
+ > hint: preference for all repositories. You can also pass --rebase, --no-rebase,
+ > hint: or --ff-only on the command line to override the configured default per
+ > hint: invocation.
+ > fatal: Need to specify how to reconcile divergent branches.
+
+ # Chicken - take a snapshot
+ working=$(basename $(pwd))
+ pushd ..
+ cp -a "${working}" "${working}-snapshot-00"
+ popd
+
+
+ # Do a rebase pull ??
+ # https://stackoverflow.com/questions/13846300/how-to-make-git-pull-use-rebase-by-default-for-all-my-repositories
+ # https://stackoverflow.com/questions/2472254/when-should-i-use-git-pull-rebase
+
+ git pull --rebase
+
+ > warning: skipped previously applied commit 8c1ad28
+ > warning: skipped previously applied commit 2f4d10e
+ > warning: skipped previously applied commit cf2b410
+ > warning: skipped previously applied commit 72d05b5
+ > warning: skipped previously applied commit 6020bbd
+ > warning: skipped previously applied commit c454558
+ > warning: skipped previously applied commit 61ca7e1
+ > warning: skipped previously applied commit 70aa8c9
+ > warning: skipped previously applied commit 0d0f71f
+ > warning: skipped previously applied commit 88e88d6
+ > warning: skipped previously applied commit fff2f2e
+ > warning: skipped previously applied commit 883260e
+ > warning: skipped previously applied commit c9d92c8
+ > warning: skipped previously applied commit 11ab56c
+ > warning: skipped previously applied commit 3b0eda8
+ > warning: skipped previously applied commit 82f13b8
+ > warning: skipped previously applied commit 022bc4b
+ > warning: skipped previously applied commit 482f73e
+ > warning: skipped previously applied commit a86c076
+ > warning: skipped previously applied commit ccf9e11
+ > warning: skipped previously applied commit 450354a
+ > hint: use --reapply-cherry-picks to include skipped commits
+ > hint: Disable this message with "git config advice.skippedCherryPicks false"
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > error: could not apply 0ada1b5... Added two scripts under Zeppelin (create user / export users)
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply 0ada1b5... Added two scripts under Zeppelin (create user / export users)
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last command done (1 command done):
+ > pick 0ada1b5 Added two scripts under Zeppelin (create user / export users)
+ > Next commands to do (11 remaining commands):
+ > pick c6932c6 Added notes on testing user create / export scripts
+ > pick 3587c61 Changed naming of cloned/generated notebooks for new users (Has been tested)
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (fix conflicts and then run "git rebase --continue")
+ > (use "git rebase --skip" to skip this patch)
+ > (use "git rebase --abort" to check out the original branch)
+ >
+ > Unmerged paths:
+ > (use "git restore --staged ..." to unstage)
+ > (use "git add ..." to mark resolution)
+ > both modified: deployments/hadoop-yarn/ansible/38-install-user-db.yml
+
+ #
+ # I thought I was done ... apparently not :-(
+ #
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last command done (1 command done):
+ > pick 0ada1b5 Added two scripts under Zeppelin (create user / export users)
+ > Next commands to do (11 remaining commands):
+ > pick c6932c6 Added notes on testing user create / export scripts
+ > pick 3587c61 Changed naming of cloned/generated notebooks for new users (Has been tested)
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > modified: deployments/hadoop-yarn/ansible/38-install-user-db.yml
+
+
+ git rebase --continue
+
+ > [detached HEAD 62cdf77] Added two scripts under Zeppelin (create user / export users)
+ > Author: stvoutsin
+ > 1 file changed, 93 insertions(+), 2 deletions(-)
+ > Auto-merging deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > error: could not apply e817f1f... Breaking script into smaller scripts
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply e817f1f... Breaking script into smaller scripts
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last commands done (4 commands done):
+ > pick 3587c61 Changed naming of cloned/generated notebooks for new users (Has been tested)
+ > pick e817f1f Breaking script into smaller scripts
+ > (see more in file .git/rebase-merge/done)
+ > Next commands to do (8 remaining commands):
+ > pick 2bd6d9f Added notes on testing creation of new user
+ > pick 1af6c5e Added additonal permission command to notes on testing
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > modified: deployments/hadoop-yarn/ansible/27-install-zeppelin.yml
+ > modified: deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > modified: deployments/hadoop-yarn/ansible/config/zeppelin.yml
+
+
+ git rebase --continue
+
+ > [detached HEAD 72374cc] Breaking script into smaller scripts
+ > Author: stvoutsin
+ > 3 files changed, 114 insertions(+), 35 deletions(-)
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > error: could not apply 7ca30d4... Move create user scripts to new file
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply 7ca30d4... Move create user scripts to new file
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last commands done (7 commands done):
+ > pick 1af6c5e Added additonal permission command to notes on testing
+ > pick 7ca30d4 Move create user scripts to new file
+ > (see more in file .git/rebase-merge/done)
+ > Next commands to do (5 remaining commands):
+ > pick 0cc47ab Change create-users script to add the new scripts
+ > pick 5d43490 Fixing empty spaces in create-user-scripts yaml file
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > new file: deployments/hadoop-yarn/ansible/39-create-user-scripts.yml
+ > modified: notes/stv/20220511-create-user-test.txt
+
+
+ git rebase --continue
+
+ > deployments/hadoop-yarn/ansible/10-install-aglais.yml: needs merge
+ > deployments/hadoop-yarn/ansible/27-install-zeppelin.yml: needs merge
+ > You must edit all merge conflicts and then
+ > mark them as resolved using git add
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last commands done (10 commands done):
+ > pick 5d43490 Fixing empty spaces in create-user-scripts yaml file
+ > pick a5a6b79 Adding support for user accounts
+ > (see more in file .git/rebase-merge/done)
+ > Next commands to do (2 remaining commands):
+ > pick f3d6c45 Changed database from MySQL to MariaDB
+ > pick 2c164d2 Replace MySQL with MariaDB
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+ >
+ > Changes to be committed:
+ > (use "git restore --staged ..." to unstage)
+ > modified: deployments/hadoop-yarn/ansible/10-install-aglais.yml
+
+
+ git rebase --continue
+
+ > [detached HEAD 0284548] Adding support for user accounts
+ > 1 file changed, 1 deletion(-)
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > Auto-merging deployments/hadoop-yarn/bin/create-auth-database.sh
+ > CONFLICT (add/add): Merge conflict in deployments/hadoop-yarn/bin/create-auth-database.sh
+ > error: could not apply f3d6c45... Changed database from MySQL to MariaDB
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply f3d6c45... Changed database from MySQL to MariaDB
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last commands done (11 commands done):
+ > pick a5a6b79 Adding support for user accounts
+ > pick f3d6c45 Changed database from MySQL to MariaDB
+ > (see more in file .git/rebase-merge/done)
+ > Next command to do (1 remaining command):
+ > pick 2c164d2 Replace MySQL with MariaDB
+ > (use "git rebase --edit-todo" to view and edit)
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+
+
+ git rebase --continue
+
+ > Auto-merging deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > CONFLICT (content): Merge conflict in deployments/hadoop-yarn/ansible/38-install-user-db.yml
+ > error: could not apply 2c164d2... Replace MySQL with MariaDB
+ > hint: Resolve all conflicts manually, mark them as resolved with
+ > hint: "git add/rm ", then run "git rebase --continue".
+ > hint: You can instead skip this commit: run "git rebase --skip".
+ > hint: To abort and get back to the state before "git rebase", run "git rebase --abort".
+ > Could not apply 2c164d2... Replace MySQL with MariaDB
+
+
+ meld . &
+
+ > ....
+ > ....
+
+
+ git status
+
+ > interactive rebase in progress; onto 86968a0
+ > Last commands done (12 commands done):
+ > pick f3d6c45 Changed database from MySQL to MariaDB
+ > pick 2c164d2 Replace MySQL with MariaDB
+ > (see more in file .git/rebase-merge/done)
+ > No commands remaining.
+ > You are currently rebasing branch '20220505-zrq-user-accounts' on '86968a0'.
+ > (all conflicts fixed: run "git rebase --continue")
+
+
+ git rebase --continue
+
+ > Successfully rebased and updated refs/heads/20220505-zrq-user-accounts.
+
+
+ git status
+
+ > On branch 20220505-zrq-user-accounts
+ > Your branch is ahead of 'origin/20220505-zrq-user-accounts' by 10 commits.
+ > (use "git push" to publish your local commits)
+ >
+ > Untracked files:
+ > (use "git add ..." to include in what will be committed)
+ > notes/zrq/20220513-01-git-rebase.txt
+
+ git add notes/zrq/20220513-01-git-rebase.txt
+ git commit -m "Notes on rebase"
+
+ > [20220505-zrq-user-accounts dd5693f] Notes on rebase
+ > 1 file changed, 551 insertions(+)
+ > create mode 100644 notes/zrq/20220513-01-git-rebase.txt
+
+
+
diff --git a/notes/zrq/20220513-02-blue-deploy.txt b/notes/zrq/20220513-02-blue-deploy.txt
new file mode 100644
index 00000000..2eb777d9
--- /dev/null
+++ b/notes/zrq/20220513-02-blue-deploy.txt
@@ -0,0 +1,674 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to debug Shiro database.
+
+ Result:
+
+ Success, deployment worked and tests passed.
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 4m42.715s
+ > user 1m51.640s
+ > sys 0m13.081s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 43m26.132s
+ > user 13m10.677s
+ > sys 3m27.292s
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Restore notebooks from backup.
+# TODO Move this into the container.
+# TODO Restore from data project VM.
+#[user@desktop]
+
+ sshuser=fedora
+ sshhost=128.232.222.217
+
+ ssh "${sshuser:?}@${sshhost:?}" \
+ '
+ mv zeppelin/notebook zeppelin/notebook-old
+ '
+
+ pushd /var/local/backups/aglais/2022/20220510/
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ 'aglais-notebooks/' \
+ "${sshuser:?}@${sshhost:?}:zeppelin/notebook"
+
+ popd
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Create our shiro-auth database.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-auth-database.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-auth-database.log
+
+ > real 0m58.721s
+ > user 0m12.923s
+ > sys 0m2.331s
+
+# -----------------------------------------------------
+# Restart Zeppelin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ zeppelin-daemon.sh restart
+ '
+
+
+# -----------------------------------------------------
+# Add our tester account.
+#[root@ansibler]
+
+ testuser='gamestop'
+ testpass='ahm5Rion see2Eegh'
+
+ ssh zeppelin \
+ "
+ testhash=\$(
+ java -jar '/opt/aglais/lib/shiro-tools-hasher-cli.jar' -i 500000 -f shiro1 -a SHA-256 -gss 128 '${testpass:?}'
+ )
+ mysql --execute \
+ '
+ INSERT INTO users (username, password) VALUES (\"${testuser:?}\", \"'\${testhash:?}'\");
+ INSERT INTO user_roles (username, role_name) VALUES (\"${testuser:?}\", \"user\");
+ '
+ "
+
+ ssh zeppelin \
+ "
+ mysql --execute \
+ '
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+ > username password password_salt
+ > gamestop $shiro1$SHA-256.... NULL
+ >
+ > username role_name
+ > gamestop user
+
+
+# -----------------------------------------------------
+# Load our curl tests.
+# TODO save the IP address duting the build.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.217
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin.
+#[root@ansibler]
+
+ zeplogin "${testuser:?}" "${testpass:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "017d2598-4a9f-44c6-aa94-b0126ad6a772",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# List the public notebooks
+#[root@ansibler]
+
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq '.body[] | select(.path | startswith("/Public"))'
+
+ > {
+ > "id": "2GRTQZFUM",
+ > "path": "/Public Examples/1. Start here"
+ > }
+ > {
+ > "id": "2GRA39HCN",
+ > "path": "/Public Examples/2. Data holdings"
+ > }
+ > {
+ > "id": "2GQ6WMH9W",
+ > "path": "/Public Examples/3. Source counts over the sky"
+ > }
+ > {
+ > "id": "2GSNYBDWB",
+ > "path": "/Public Examples/4. Mean proper motions over the sky"
+ > }
+ > {
+ > "id": "2H2YRJCKM",
+ > "path": "/Public Examples/5. Working with Gaia XP spectra"
+ > }
+ > {
+ > "id": "2GZME59KY",
+ > "path": "/Public Examples/6. Working with cross-matched surveys"
+ > }
+ > {
+ > "id": "2GQDKZ59J",
+ > "path": "/Public Examples/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > {
+ > "id": "2GVXKC266",
+ > "path": "/Public Examples/9. Tips and tricks"
+ > }
+
+
+# -----------------------------------------------------
+# Run the HealpixSourceCounts notebook
+#[root@ansibler]
+
+ noteid=2GQ6WMH9W
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [20210507-084613_357121151][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-105718_1698521515][Set the resolution level and define the query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-110030_2095441495][Plot up the results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-091244_670006530][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1648610499944_1376690736][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "3. Source counts over the sky",
+ > "id": "2GQ6WMH9W",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/3. Source counts over the sky"
+ > }
+ > }
+
+ > 0:1:11
+
+
+# -----------------------------------------------------
+# Run the MeanProperMotions notebook
+#[root@ansibler]
+
+ noteid=2GSNYBDWB
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [paragraph_1646395441893_1272795891][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111756_391695716][Set HEALPix resolution]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111538_106023214][Define a data frame by SQL query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111939_1386609632][Mean RA proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111943_814907111][Mean Dec proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111956_1822284967][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-132447_1514402898][Tidy-up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20211207-132335_689637194][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "4. Mean proper motions over the sky",
+ > "id": "2GSNYBDWB",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/4. Mean proper motions over the sky"
+ > }
+ > }
+
+ > 0:0:54
+
+
+# -----------------------------------------------------
+# Run the RandomForestClassifier notebook
+#[root@ansibler]
+
+ noteid=2GQDKZ59J
+
+ zepnbclear "${noteid}"
+ zepnbexecstep "${noteid}"
+ zepnbstatus "${noteid}"
+ zepnbtotaltime "${noteid}"
+
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+
+ > Para [20201013-131059_546082898][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-131649_1734629667][Basic catalogue query selections and predicates]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-132418_278702125][Raw catalogue with selected columns]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-094650_221463065][Visualisation (colour / absolute-magnitue diagram) of the raw catalogue]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-110502_1704727157][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-105445_95907042][Define the training samples]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-161110_18118893][Assemble training and reserve test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-152110_1282917873][Train up the Random Forrest]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210504-153521_1591875670][Check feature set for nulls]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-131823_1744793710][Classify the reserved test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201016-154755_24366630][Classification confusion matrix]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-163421_1811049882][Relative importance of the selected features]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-162249_1468741293][Apply the classification model and plot sample results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201124-100512_110153564][Histogram of classification probability]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-103046_1353183691][Sky distribution of good source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-163312_728555601][Sky distribution of bad source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647354647989_1984770159][Tidy up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210428-140519_1288739408][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210506-134212_1741520795][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "7. Good astrometric solutions via ML Random Forrest classifier",
+ > "id": "2GQDKZ59J",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false,
+ > "looknfeel": "default",
+ > "personalizedMode": "false"
+ > },
+ > "info": {},
+ > "path": "/Public Examples/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > }
+
+ > 0:9:5
+
+
+
diff --git a/notes/zrq/20220513-03-create-users.txt b/notes/zrq/20220513-03-create-users.txt
new file mode 100644
index 00000000..7cf965db
--- /dev/null
+++ b/notes/zrq/20220513-03-create-users.txt
@@ -0,0 +1,387 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Testing the create user shell scripts work.
+
+ Result:
+
+ Success, but required some tweaking to get there.
+
+# -----------------------------------------------------
+# Install the create-user scripts on Zeppelin.
+# TODO Add these steps to the main create-all sequence.
+#[root@ansibler]
+
+ inventory="/deployments/hadoop-yarn/ansible/config/${configname:?}.yml"
+
+ pushd "/deployments/hadoop-yarn/ansible"
+
+ ansible-playbook \
+ --inventory "${inventory:?}" \
+ "39-create-user-scripts.yml"
+
+ popd
+
+
+# -----------------------------------------------------
+# Create ssh key for fedora (Zeppelin) user.
+# TODO This needs to be part of the deployment sequence.
+# TODO Copy the public key to /opt/aglais/keys/.pub
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ sudo mkdir "/opt/aglais/keys"
+ '
+
+ ssh zeppelin \
+ '
+ keypath=${HOME}/.ssh/id_rsa
+ ssh-keygen -t rsa -N "" -f "${keypath}"
+ sudo cp "${keypath}.pub" "/opt/aglais/keys/$(whoami).pub"
+ '
+
+ > Generating public/private rsa key pair.
+ > Your identification has been saved in /home/fedora/.ssh/fedora.rsa.
+ > Your public key has been saved in /home/fedora/.ssh/fedora.rsa.pub.
+ > The key fingerprint is:
+ > SHA256:9Yr0fMxVcOzPqfKip9KwIpqZexOhLXjYAZPOOeWQ0u8 fedora@iris-gaia-blue-20220513-zeppelin
+ > The key's randomart image is:
+ > +---[RSA 3072]----+
+ > | o. ...|
+ > |=o.. o.|
+ > |+o=. . ..|
+ > | =.o. . . ..|
+ > |.o+o. S . ..o|
+ > |ooooE o + + . .o|
+ > | .. . = + + . |
+ > | ++ . o . = . |
+ > | *+ o . .o+ +. |
+ > +----[SHA256]-----+
+
+
+ ssh zeppelin \
+ '
+ ls -al "/opt/aglais/keys"
+ '
+
+
+# -----------------------------------------------------
+# Test the export_users script.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ export_users.sh
+ '
+
+ ssh zeppelin \
+ '
+ head /opt/aglais/tmp/auth.sql
+ echo
+ echo
+ tail /opt/aglais/tmp/auth.sql
+ '
+
+ > -- MySQL dump 10.18 Distrib 10.3.27-MariaDB, for Linux (x86_64)
+ > --
+ > -- Host: localhost Database: shirodata
+ > -- ------------------------------------------------------
+ > -- Server version 10.3.27-MariaDB
+ >
+ > /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+ > /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+ > /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+ > /*!40101 SET NAMES utf8mb4 */;
+
+ > /*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+ > /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+ > /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+ > /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+ > /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+ > /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+
+# -----------------------------------------------------
+# Test the create_unix_user script.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ create_unix_user.sh "marigold"
+ '
+
+ > Generating public/private rsa key pair.
+ > Your identification has been saved in /home/marigold/.ssh/id_rsa.
+ > Your public key has been saved in /home/marigold/.ssh/id_rsa.pub.
+ > The key fingerprint is:
+ > SHA256:49zawBcaVlKjtvLDYnPIn+zctmK4ilvkclRfXCCm1Tw root@iris-gaia-blue-20220513-zeppelin
+ > The key's randomart image is:
+ > +---[RSA 3072]----+
+ > | +o+.. |
+ > | + =Eo |
+ > | o + +. |
+ > | . o = |
+ > | o . S . |
+ > | + . X = . |
+ > | . + *.@ o |
+ > | = ..BoO. |
+ > | o....+Bo+. |
+ > +----[SHA256]-----+
+
+
+ ssh zeppelin \
+ '
+ id "marigold"
+ '
+
+ > uid=1006(marigold) gid=1008(marigold) groups=1008(marigold),1006(zeppelinusers)
+
+
+ ssh zeppelin \
+ '
+ sudo ls -al /home/marigold
+ echo
+ sudo ls -al /home/marigold/.ssh
+ '
+
+ > total 24
+ > drwx------. 3 marigold marigold 4096 May 13 18:56 .
+ > drwxr-xr-x. 9 root root 4096 May 13 18:56 ..
+ > -rw-r--r--. 1 marigold marigold 18 Aug 5 2019 .bash_logout
+ > -rw-r--r--. 1 marigold marigold 141 Aug 5 2019 .bash_profile
+ > -rw-r--r--. 1 marigold marigold 376 Aug 5 2019 .bashrc
+ > drwxr-xr-x. 2 marigold marigold 4096 May 13 18:56 .ssh
+ >
+ > total 20
+ > drwxr-xr-x. 2 marigold marigold 4096 May 13 18:56 .
+ > drwx------. 3 marigold marigold 4096 May 13 18:56 ..
+ > -rw-------. 1 marigold marigold 593 May 13 18:56 authorized_keys
+ > -rw-------. 1 marigold marigold 2635 May 13 18:56 id_rsa
+ > -rw-r--r--. 1 marigold marigold 591 May 13 18:56 id_rsa.pub
+
+
+ ssh zeppelin \
+ '
+ sudo cat /home/marigold/.ssh/authorized_keys
+ '
+
+ > ssh-rsa AAAA.... fedora@iris-gaia-blue-20220513-zeppelin
+
+
+ ssh zeppelin \
+ '
+ date
+ hostname
+ whoami
+ echo
+ ssh -o IdentitiesOnly=yes marigold@localhost \
+ "
+ date
+ hostname
+ whoami
+ "
+ '
+
+ > Fri May 13 19:06:20 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > fedora
+ >
+ > Fri May 13 19:06:20 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > marigold
+
+ #
+ # TODO fedora needs to accept the key for localhost.
+ #
+
+
+# -----------------------------------------------------
+# Test the create_hdfs_user script.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ create_hdfs_user.sh "marigold"
+ '
+
+ ssh zeppelin \
+ '
+ hdfs dfs -ls /user
+ '
+
+ > Found 2 items
+ > drwxr-xr-x - fedora supergroup 0 2022-05-13 15:29 /user/fedora
+ > drwxr-xr-x - marigold supergroup 0 2022-05-13 19:09 /user/marigold
+
+
+
+ ssh zeppelin \
+ '
+ date
+ hostname
+ whoami
+ echo
+ ssh -o IdentitiesOnly=yes marigold@localhost \
+ "
+ date
+ hostname
+ whoami
+ echo
+ hdfs dfs -ls /user
+ "
+ '
+
+ > Fri May 13 19:16:03 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > fedora
+ >
+ > Fri May 13 19:16:03 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > marigold
+ >
+ > Found 2 items
+ > drwxr-xr-x - fedora supergroup 0 2022-05-13 15:29 /user/fedora
+ > drwxr-xr-x - marigold supergroup 0 2022-05-13 19:09 /user/marigold
+
+
+# -----------------------------------------------------
+# Test the create_mysql_user script.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ create_mysql_user.sh "marigold" "secret" "user"
+ '
+
+ ssh zeppelin \
+ '
+ mysql --table --execute \
+ "
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ "
+ '
+
+ > +----------+-----------------+---------------+
+ > | username | password | password_salt |
+ > +----------+-----------------+---------------+
+ > | marigold | $shiro1$SHA.... | NULL |
+ > +----------+-----------------+---------------+
+ > +----------+-----------+
+ > | username | role_name |
+ > +----------+-----------+
+ > | marigold | user |
+ > +----------+-----------+
+
+
+# -----------------------------------------------------
+# Test the create_notebook_clone script.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ create_notebook_clone.sh "marigold" "secret" "http://localhost:8080"
+ '
+
+
+# -----------------------------------------------------
+# Load our curl tests.
+# TODO save the IP address duting the build.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.217
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin.
+#[root@ansibler]
+
+ zeplogin "marigold" "secret"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "marigold",
+ > "ticket": "b2783cea-0c71-4141-ba9b-489b68e50a67",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# List the user's notebooks
+#[root@ansibler]
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq '.body[] | select(.path | startswith("/Users/marigold"))'
+
+ > {
+ > "id": "2H3HF6SFP",
+ > "path": "/Users/marigold/1. Start here"
+ > }
+ > {
+ > "id": "2H3CT4VD1",
+ > "path": "/Users/marigold/2. Data holdings"
+ > }
+ > {
+ > "id": "2H2FN7FN1",
+ > "path": "/Users/marigold/3. Source counts over the sky"
+ > }
+ > {
+ > "id": "2H3AR37SV",
+ > "path": "/Users/marigold/4. Mean proper motions over the sky"
+ > }
+ > {
+ > "id": "2H4D3KYRP",
+ > "path": "/Users/marigold/5. Working with Gaia XP spectra"
+ > }
+ > {
+ > "id": "2H4QM5TX6",
+ > "path": "/Users/marigold/6. Working with cross-matched surveys"
+ > }
+ > {
+ > "id": "2H5XG81VY",
+ > "path": "/Users/marigold/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > {
+ > "id": "2H32WN5RC",
+ > "path": "/Users/marigold/9. Tips and tricks"
+ > }
+
+
diff --git a/notes/zrq/20220514-01-blue-deploy.txt b/notes/zrq/20220514-01-blue-deploy.txt
new file mode 100644
index 00000000..cf87d47f
--- /dev/null
+++ b/notes/zrq/20220514-01-blue-deploy.txt
@@ -0,0 +1,1398 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to check everything works.
+
+ Result:
+
+ Work in progress ....
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 4m23.071s
+ > user 1m49.784s
+ > sys 0m12.289s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 42m43.745s
+ > user 12m56.944s
+ > sys 3m23.966s
+
+
+# -----------------------------------------------------
+# Create our shiro-auth database.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-auth-database.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-auth-database.log
+
+ > real 1m5.732s
+ > user 0m18.383s
+ > sys 0m3.741s
+
+
+# -----------------------------------------------------
+# Create ssh key for fedora (Zeppelin) user.
+# TODO This needs to be part of the deployment sequence.
+# TODO Copy the public key to /opt/aglais/keys/.pub
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ sudo mkdir "/opt/aglais/keys"
+ '
+
+ ssh zeppelin \
+ '
+ keypath=${HOME}/.ssh/id_rsa
+ ssh-keygen -t rsa -N "" -f "${keypath}"
+ sudo cp "${keypath}.pub" "/opt/aglais/keys/$(whoami).pub"
+ '
+
+ > Generating public/private rsa key pair.
+ > Your identification has been saved in /home/fedora/.ssh/id_rsa.
+ > Your public key has been saved in /home/fedora/.ssh/id_rsa.pub.
+ > The key fingerprint is:
+ > SHA256:b0xWwRQM18nh9+K7sFr18AamTSH9+9bGbcxCLuLABR4 fedora@iris-gaia-blue-20220513-zeppelin
+ > The key's randomart image is:
+ > +---[RSA 3072]----+
+ > | .==+.o |
+ > | .o++ |
+ > | E o + .|
+ > | . o . . +.|
+ > | S + B o|
+ > | . * B.*.|
+ > | o + +oo=*|
+ > | o...ooo@|
+ > | .oo..o*.|
+ > +----[SHA256]-----+
+
+
+ ssh zeppelin \
+ '
+ cat "/opt/aglais/keys/$(whoami).pub"
+ '
+
+ > ssh-rsa AAAA.... fedora@iris-gaia-blue-20220513-zeppelin
+
+
+# -----------------------------------------------------
+# Move the built-in notebooks out of the way.
+# TODO Make this part of the deployment.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ mv zeppelin/notebook zeppelin/notebook-old
+ '
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Restore notebooks from backup.
+# TODO Move this into the container.
+# TODO Restore from data project VM.
+#[user@desktop]
+
+ sshuser=fedora
+ sshhost=128.232.222.201
+
+ ssh "${sshuser:?}@${sshhost:?}" \
+ '
+ date
+ hostname
+ '
+
+ > Sat May 14 00:49:38 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+
+
+ pushd /var/local/backups/aglais/2022/20220510/
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ 'aglais-notebooks/' \
+ "${sshuser:?}@${sshhost:?}:zeppelin/notebook"
+
+ popd
+
+ > ....
+ > ....
+ > Number of files: 656 (reg: 441, dir: 215)
+ > Number of created files: 656 (reg: 441, dir: 215)
+ > Number of deleted files: 0
+ > Number of regular files transferred: 441
+ > Total file size: 141.06M bytes
+ > Total transferred file size: 141.06M bytes
+ > Literal data: 141.06M bytes
+ > Matched data: 0 bytes
+ > File list size: 0
+ > File list generation time: 0.004 seconds
+ > File list transfer time: 0.000 seconds
+ > Total bytes sent: 141.15M
+ > Total bytes received: 9.70K
+ >
+ > sent 141.15M bytes received 9.70K bytes 2.69M bytes/sec
+ > total size is 141.06M speedup is 1.00
+
+
+# -----------------------------------------------------
+# -----------------------------------------------------
+# Restart Zeppelin.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ zeppelin-daemon.sh restart
+ '
+
+ > Zeppelin stop [ OK ]
+ > Zeppelin start [ OK ]
+
+
+# -----------------------------------------------------
+# Add our test account.
+#[root@ansibler]
+
+ testuser='gamestop'
+ testpass=$(pwgen 16 1)
+
+ ssh zeppelin \
+ "
+ create_user.sh '${testuser}' '${testpass}' 'user'
+ "
+
+ > Generating public/private rsa key pair.
+ > Your identification has been saved in /home/gamestop/.ssh/id_rsa.
+ > Your public key has been saved in /home/gamestop/.ssh/id_rsa.pub.
+ > The key fingerprint is:
+ > SHA256:f65i3zjgBUT2UbbLFTdDNnG3CFqBLcK90dOgOfD1O9o root@iris-gaia-blue-20220513-zeppelin
+ > The key's randomart image is:
+ > +---[RSA 3072]----+
+ > | .o+ =B* oB=|
+ > | ++***ooooB|
+ > | ..*= oo.. |
+ > | .... o. |
+ > | S. oo |
+ > | ... o . |
+ > | . o...E |
+ > | + .= |
+ > | . o+oo |
+ > +----[SHA256]-----+
+ > {"status":"OK","message":"","body":{"principal":"gamestop","ticket":"1624d15f-777b-450c-b002-7f786267f566","roles":"[\"user\"]"}} % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 89 100 47 100 42 783 700 --:--:-- --:--:-- --:--:-- 1483{"status":"OK","message":"","body":"2H249GNV1"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 92 100 47 100 45 1516 1451 --:--:-- --:--:-- --:--:-- 2967{"status":"OK","message":"","body":"2H33RZHNZ"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 105 100 47 100 58 1205 1487 --:--:-- --:--:-- --:--:-- 2692{"status":"OK","message":"","body":"2H43GAMR9"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 111 100 47 100 64 1044 1422 --:--:-- --:--:-- --:--:-- 2466{"status":"OK","message":"","body":"2H25YXUAD"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 107 100 47 100 60 2764 3529 --:--:-- --:--:-- --:--:-- 6294{"status":"OK","message":"","body":"2H54VQWAU"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 113 100 47 100 66 1678 2357 --:--:-- --:--:-- --:--:-- 4035{"status":"OK","message":"","body":"2H2ZPFFWK"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 138 100 47 100 91 870 1685 --:--:-- --:--:-- --:--:-- 2555{"status":"OK","message":"","body":"2H267CGY9"}
+ > % Total % Received % Xferd Average Speed Time Time Time Current
+ > Dload Upload Total Spent Left Speed
+ > 100 94 100 47 100 47 1382 1382 --:--:-- --:--:-- --:--:-- 2764{"status":"OK","message":"","body":"2H5ZZZCAV"}
+
+
+# -----------------------------------------------------
+# Check the test user's Unix account.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ echo
+ id '${testuser:?}'
+ "
+
+ > Sat May 14 01:02:29 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ >
+ > uid=1006(gamestop) gid=1008(gamestop) groups=1008(gamestop),1006(zeppelinusers)
+
+
+# -----------------------------------------------------
+# Check the test user's ssh keys.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ echo
+ sudo ls -al '/home/${testuser:?}'
+ echo
+ sudo ls -al '/home/${testuser:?}/.ssh'
+ "
+
+ > Sat May 14 01:03:11 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ >
+ > total 24
+ > drwx------. 3 gamestop gamestop 4096 May 14 01:00 .
+ > drwxr-xr-x. 9 root root 4096 May 14 01:00 ..
+ > -rw-r--r--. 1 gamestop gamestop 18 Aug 5 2019 .bash_logout
+ > -rw-r--r--. 1 gamestop gamestop 141 Aug 5 2019 .bash_profile
+ > -rw-r--r--. 1 gamestop gamestop 376 Aug 5 2019 .bashrc
+ > drwxr-xr-x. 2 gamestop gamestop 4096 May 14 01:00 .ssh
+ >
+ > total 20
+ > drwxr-xr-x. 2 gamestop gamestop 4096 May 14 01:00 .
+ > drwx------. 3 gamestop gamestop 4096 May 14 01:00 ..
+ > -rw-------. 1 gamestop gamestop 593 May 14 01:00 authorized_keys
+ > -rw-------. 1 gamestop gamestop 2635 May 14 01:00 id_rsa
+ > -rw-r--r--. 1 gamestop gamestop 591 May 14 01:00 id_rsa.pub
+
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ echo
+ sudo cat '/home/${testuser:?}/.ssh/authorized_keys'
+ "
+
+ > Sat May 14 01:03:39 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ >
+ > ssh-rsa AAAA.... fedora@iris-gaia-blue-20220513-zeppelin
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ whoami
+ echo
+ ssh -o IdentitiesOnly=yes '${testuser:?}@localhost' \
+ '
+ date
+ hostname
+ whoami
+ '
+ "
+
+ > Sat May 14 01:07:36 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > fedora
+ >
+ > Sat May 14 01:07:37 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > gamestop
+
+# -----------------------------------------------------
+# For that to work, fedora needs to accept the public key for localhost.
+# Which is in /etc/ssh ..
+#[fedora@zeppelin]
+
+ sudo cat /etc/ssh/ssh_host_ecdsa_key.pub
+
+ > ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPFarQQHlgk5rlzX+6+UpVfzDUvz0Jh9Lt+gItvOATfWraIEDZo4KNLhcZ73SXaD/s4f09VxnTusUbamr77d/CM=
+
+ cat ~/.ssh/known_hosts
+
+ > localhost ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPFarQQHlgk5rlzX+6+UpVfzDUvz0Jh9Lt+gItvOATfWraIEDZo4KNLhcZ73SXaD/s4f09VxnTusUbamr77d/CM=
+
+ #
+ # We could probably solve this by copying the public key and adding 'localhost' to the first line.
+ # Note - this will always be the first known_host because we have only just created the deployment.
+ #
+
+
+# -----------------------------------------------------
+# Check the test user's HDFS space.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ whoami
+ echo
+ hdfs dfs -ls '/user'
+ "
+
+ > "
+ > Sat May 14 01:12:46 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > fedora
+ >
+ > Found 1 items
+ > drwxr-xr-x - gamestop supergroup 0 2022-05-14 01:00 /user/gamestop
+
+
+ ssh zeppelin \
+ "
+ date
+ hostname
+ whoami
+ echo
+ ssh -o IdentitiesOnly=yes '${testuser:?}@localhost' \
+ '
+ date
+ hostname
+ whoami
+ echo
+ hdfs dfs -ls '/user'
+ '
+ "
+
+ > Sat May 14 01:13:00 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > fedora
+ >
+ > Sat May 14 01:13:00 UTC 2022
+ > iris-gaia-blue-20220513-zeppelin
+ > gamestop
+ >
+ > Found 1 items
+ > drwxr-xr-x - gamestop supergroup 0 2022-05-14 01:00 /user/gamestop
+
+
+# -----------------------------------------------------
+# Check the test user's Shiro account.
+#[root@ansibler]
+
+ ssh zeppelin \
+ "
+ mysql --table --execute \
+ '
+ SELECT * FROM users ;
+ SELECT * FROM user_roles ;
+ '
+ "
+
+ > +----------+------------------+---------------+
+ > | username | password | password_salt |
+ > +----------+------------------+---------------+
+ > | gamestop | $shiro1$SHA-.... | NULL |
+ > +----------+------------------+---------------+
+ >
+ > +----------+-----------+
+ > | username | role_name |
+ > +----------+-----------+
+ > | gamestop | user |
+ > +----------+-----------+
+
+
+# -----------------------------------------------------
+# Load our curl tests.
+# TODO save the IP address duting the build.
+#[root@ansibler]
+
+ zeppelinhost=128.232.222.201
+ zeppelinport=8080
+ zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?}
+
+ source /deployments/zeppelin/test/bin/rest-tests.sh
+
+
+# -----------------------------------------------------
+# Login to Zeppelin as the test user.
+#[root@ansibler]
+
+ zeplogin "${testuser:?}" "${testpass:?}"
+
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "principal": "gamestop",
+ > "ticket": "1624d15f-777b-450c-b002-7f786267f566",
+ > "roles": "[\"user\"]"
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# List the test user's notebooks
+#[root@ansibler]
+
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq ".body[] | select(.path | startswith(\"/Users/${testuser:?}\"))"
+
+ > {
+ > "id": "2H249GNV1",
+ > "path": "/Users/gamestop/1. Start here"
+ > }
+ > {
+ > "id": "2H33RZHNZ",
+ > "path": "/Users/gamestop/2. Data holdings"
+ > }
+ > {
+ > "id": "2H43GAMR9",
+ > "path": "/Users/gamestop/3. Source counts over the sky"
+ > }
+ > {
+ > "id": "2H25YXUAD",
+ > "path": "/Users/gamestop/4. Mean proper motions over the sky"
+ > }
+ > {
+ > "id": "2H54VQWAU",
+ > "path": "/Users/gamestop/5. Working with Gaia XP spectra"
+ > }
+ > {
+ > "id": "2H2ZPFFWK",
+ > "path": "/Users/gamestop/6. Working with cross-matched surveys"
+ > }
+ > {
+ > "id": "2H267CGY9",
+ > "path": "/Users/gamestop/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > {
+ > "id": "2H5ZZZCAV",
+ > "path": "/Users/gamestop/9. Tips and tricks"
+ > }
+
+
+# -----------------------------------------------------
+# Run all the test user's notebooks.
+#[root@ansibler]
+
+ for noteid in $(
+ curl \
+ --silent \
+ --cookie "${zepcookies:?}" \
+ "${zeppelinurl:?}/api/notebook" \
+ | jq -r ".body[] | select(.path | startswith(\"/Users/${testuser:?}\")) | .id"
+ )
+ do
+ echo "-----------------------------------------------------"
+ echo "Notebook [${noteid}]"
+ echo
+ zepnbclear "${noteid}"
+ echo
+ zepnbexecstep "${noteid}"
+ echo
+ zepnbstatus "${noteid}"
+ echo
+ zepnbtotaltime "${noteid}"
+ echo
+ done
+
+ > Notebook [2H249GNV1]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [20210507-152557_21014937][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-152628_446700578][Familiarisation]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-084215_1445060106][Zeppelin notebooks]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-160139_1365676170][PySpark SQL]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-105824_1166659944][Example code from previous cell]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-152746_1171284722][Spark aspects]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-090543_1039599855][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "1. Start here",
+ > "id": "2H249GNV1",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/1. Start here"
+ > }
+ > }
+ >
+ > 0:0:38
+
+ > Notebook [2H33RZHNZ]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [20210507-123801_497865463][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-124436_282498395][Database and table details]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1651056869670_357515407][N.B.]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-124618_1845823957][Description and links]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-133844_1243154051][Column listing for a table]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-140821_1444471628][Querying the main catalogue]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-144257_1004220319][Querying with cross-matched data]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-144728_1143294166][Things to note]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-144934_423944230][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "2. Data holdings",
+ > "id": "2H33RZHNZ",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/2. Data holdings"
+ > }
+ > }
+ >
+ > 0:0:16
+
+ > Notebook [2H43GAMR9]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [20210507-084613_357121151][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-105718_1698521515][Set the resolution level and define the query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20200826-110030_2095441495][Plot up the results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210507-091244_670006530][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1648610499944_1376690736][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "3. Source counts over the sky",
+ > "id": "2H43GAMR9",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/3. Source counts over the sky"
+ > }
+ > }
+ >
+ > 0:0:29
+
+ > Notebook [2H25YXUAD]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [paragraph_1646395441893_1272795891][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111756_391695716][Set HEALPix resolution]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111538_106023214][Define a data frame by SQL query]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111939_1386609632][Mean RA proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111943_814907111][Mean Dec proper motion plot]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-111956_1822284967][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210510-132447_1514402898][Tidy-up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20211207-132335_689637194][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "4. Mean proper motions over the sky",
+ > "id": "2H25YXUAD",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/4. Mean proper motions over the sky"
+ > }
+ > }
+ >
+ > 0:0:39
+
+ > Notebook [2H54VQWAU]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [paragraph_1650981001262_1093264483][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1650981269844_2057939329][Sampling and plotting spectra TODO CHECK FOLLOWING DR3 RELEASE]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "ERROR"
+ > }
+ > }
+ > Result [ERROR]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "5. Working with Gaia XP spectra",
+ > "id": "2H54VQWAU",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/5. Working with Gaia XP spectra"
+ > }
+ > }
+ >
+ > 0:0:0
+
+ > Notebook [2H2ZPFFWK]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [paragraph_1646992513304_653265144][Introduction]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647250636508_2082384671][Standard platform set-up TODO]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646992632242_1534494301][Utility function definitions]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646996320283_812479873][Define a data aggregation TODO tweak]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646996892901_1183551489][Collect the results and process in preparation for visualisation]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646997023626_41851232][Visualise via matplotlib]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647276187713_571053798][Further reading TODO add links]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647339970003_891385563][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "6. Working with cross-matched surveys",
+ > "id": "2H2ZPFFWK",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/6. Working with cross-matched surveys"
+ > }
+ > }
+ >
+ > 0:0:47
+
+ > Notebook [2H267CGY9]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [20201013-131059_546082898][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-131649_1734629667][Basic catalogue query selections and predicates]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-132418_278702125][Raw catalogue with selected columns]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-094650_221463065][Visualisation (colour / absolute-magnitue diagram) of the raw catalogue]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201120-110502_1704727157][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-105445_95907042][Define the training samples]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-161110_18118893][Assemble training and reserve test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201013-152110_1282917873][Train up the Random Forrest]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210504-153521_1591875670][Check feature set for nulls]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201015-131823_1744793710][Classify the reserved test sets]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201016-154755_24366630][Classification confusion matrix]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-163421_1811049882][Relative importance of the selected features]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201123-162249_1468741293][Apply the classification model and plot sample results]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201124-100512_110153564][Histogram of classification probability]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-103046_1353183691][Sky distribution of good source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20201125-163312_728555601][Sky distribution of bad source sample]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1647354647989_1984770159][Tidy up]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210428-140519_1288739408][Further reading and resources]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [20210506-134212_1741520795][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "7. Good astrometric solutions via ML Random Forrest classifier",
+ > "id": "2H267CGY9",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/7. Good astrometric solutions via ML Random Forrest classifier"
+ > }
+ > }
+ >
+ > 0:9:10
+
+ > Notebook [2H5ZZZCAV]
+ >
+ > {
+ > "status": "OK",
+ > "message": ""
+ > }
+ >
+ >
+ > Para [paragraph_1646302979387_1967819699][null]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1650969130797_24418076][Resetting the Spark context]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646303142140_1348941331][Interpreters]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646303435709_963866048][Getting Python help (PySpark)]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ >
+ > Para [paragraph_1646307170102_2048767015][Getting Python help (IPython)]
+ > {
+ > "status": "OK",
+ > "body": {
+ > "code": "SUCCESS"
+ > }
+ > }
+ > Result [SUCCESS]
+ > parse error: Invalid numeric literal at line 1, column 83
+ > parse error: Invalid numeric literal at line 1, column 83
+ > parse error: Invalid numeric literal at line 1, column 83
+ >
+ > Para [][]
+ > bash: paraid: parameter null or not set
+ > Result []
+ >
+ > {
+ > "status": "OK",
+ > "message": "",
+ > "body": {
+ > "paragraphs": [],
+ > "name": "9. Tips and tricks",
+ > "id": "2H5ZZZCAV",
+ > "defaultInterpreterGroup": "spark",
+ > "version": "0.10.0",
+ > "noteParams": {},
+ > "noteForms": {},
+ > "angularObjects": {},
+ > "config": {
+ > "personalizedMode": "false",
+ > "looknfeel": "default",
+ > "isZeppelinNotebookCronEnable": false
+ > },
+ > "info": {},
+ > "path": "/Users/gamestop/9. Tips and tricks"
+ > }
+ > }
+ >
+ > 0:0:6
+
+
diff --git a/notes/zrq/20220517-01-create-users.txt b/notes/zrq/20220517-01-create-users.txt
new file mode 100644
index 00000000..b1b5d3e3
--- /dev/null
+++ b/notes/zrq/20220517-01-create-users.txt
@@ -0,0 +1,586 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Develop the create user functions
+
+ Result:
+
+ Success ..
+ A set of create-user scripts accept YAML and generate JSON.
+ TODO Need to wire up the functions to create the user shares.
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Install the hash generator in our client container.
+#[root@ansibler]
+
+ dnf install -y java-latest-openjdk-headless
+
+ mkdir "${HOME}/lib"
+ pushd "${HOME}/lib"
+
+ wget https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
+ ln -s shiro-tools-hasher-1.9.0-cli.jar shiro-tools-hasher.jar
+
+ popd
+
+
+# -----------------------------------------------------
+# Hash some passwords.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ newpasshash "Nelia's password"
+ newpasshash "Ghoria's password"
+ newpasshash "Nalla's password"
+
+ > $shiro1$SHA-256$500000$pFPrHD2aadhp8H2DRvUTiw==$BfvnN68VUze19UdSZ5ak5dUB+dcj99OKCy0PAmtl/tM=
+ > $shiro1$SHA-256$500000$jImCE+te8lXR5UytB4JSTA==$0M+TqzV/1yU+1MLCjsQLMbjCJYp1or455uh0UlrVEjE=
+ > $shiro1$SHA-256$500000$OLl6J+tHHPbvMV1f602fBA==$IwKL74ftVxBvh0LUzt0VGn2DWuKUTDs6D1xuB4v6xIc=
+
+
+
+# -----------------------------------------------------
+# Install the hash lookup tools on our data node.
+#[root@ansibler]
+
+ datahostname='iris-gaia-data.duckdns.org'
+ datahostuser='fedora'
+
+ ssh "${datahostuser}@${datahostname}"
+
+ sudo dnf install -y wget
+ sudo wget -O '/usr/bin/yq' 'https://github.com/mikefarah/yq/releases/download/v4.25.1/yq_linux_amd64'
+ sudo chmod a+x '/usr/bin/yq'
+
+ cat > '/home/fedora/passhashes' << 'EOF'
+users:
+ passhash:
+ "Nelia": "$shiro1$SHA-256$500000$pFPrHD2aadhp8H2DRvUTiw==$BfvnN68VUze19UdSZ5ak5dUB+dcj99OKCy0PAmtl/tM="
+ "Ghoria": "$shiro1$SHA-256$500000$jImCE+te8lXR5UytB4JSTA==$0M+TqzV/1yU+1MLCjsQLMbjCJYp1or455uh0UlrVEjE="
+ "Nalla": "$shiro1$SHA-256$500000$OLl6J+tHHPbvMV1f602fBA==$IwKL74ftVxBvh0LUzt0VGn2DWuKUTDs6D1xuB4v6xIc="
+EOF
+
+
+ mkdir /home/fedora/bin
+
+ cat > /home/fedora/bin/getpasshash << 'EOF'
+#!/bin/sh
+key=${1:?}
+yq '
+ .users.passhash.'${key}' // ""
+ ' '/home/fedora/passhashes'
+EOF
+
+ chmod u+x,g+x /home/fedora/bin/getpasshash
+
+ getpasshash 'Nelia'
+
+ > $shiro1$SHA-256$500000$pFPrHD2aadhp8H2DRvUTiw==$BfvnN68VUze19UdSZ5ak5dUB+dcj99OKCy0PAmtl/tM=
+
+ getpasshash 'Frog'
+
+ >
+
+
+# -----------------------------------------------------
+# Test creating single users.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ createusermain \
+ "albert" \
+ "2000" \
+ "2000" \
+ "home/path" \
+ "data/path" \
+ "5" \
+ | jq '.'
+
+ > {
+ > "linux": {
+ > "name": "albert",
+ > "uid": 2000,
+ > "gid": 2000,
+ > "home": "home/path"
+ > },
+ > "shiro": {
+ > "pass": "ech1waG0oodiev5foogiem6ub7chai",
+ > "hash": "$shiro1$SHA-256$500000$jNEAbbLcWlaNQ19kpR2Mtw==$yBULtdnnns74qO1lU5UK3zsWd0YhBmGX8It3P8Lzdm4="
+ > },
+ > "share": {
+ > "name": "user-data-albert",
+ > "uuid": "5432ad28-c0f9-4d7f-b40a-1c1bb2bf3293",
+ > "path": "data/path",
+ > "size": 5
+ > }
+ > }
+
+
+ createusermain \
+ "albert" \
+ "2000" \
+ "2000" \
+ | jq '.'
+
+ > {
+ > "linux": {
+ > "name": "albert",
+ > "uid": 2000,
+ > "gid": 2000,
+ > "home": "/home/albert"
+ > },
+ > "shiro": {
+ > "pass": "uga4Quahm2Ohch6Feih7Eequee4ohR",
+ > "hash": "$shiro1$SHA-256$500000$dZVo5N4wI87mhXPeSeMsNQ==$0xLaEfTV1ssRK+hAYYpe1PqmclptbnYJ3eQ9n/r2/Rc="
+ > },
+ > "share": {
+ > "name": "user-data-albert",
+ > "uuid": "f9ffd966-2d58-46ae-aa0f-e49ac5407ae1",
+ > "path": "/user/albert",
+ > "size": 10
+ > }
+ > }
+
+
+ createusermain \
+ "albert" \
+ "2000" \
+ | jq '.'
+
+ > {
+ > "linux": {
+ > "name": "albert",
+ > "uid": 2000,
+ > "gid": 2000,
+ > "home": "/home/albert"
+ > },
+ > "shiro": {
+ > "pass": "jook5na0Quoop0eiK0sai2quu9aibu",
+ > "hash": "$shiro1$SHA-256$500000$lf+oWfPdBZqtNyIe82Y5GQ==$ZTYkZAV/px7emfSRGrw7pRJxVi/G+DpuIKt/cUANbSQ="
+ > },
+ > "share": {
+ > "name": "user-data-albert",
+ > "uuid": "e6d3622d-4c17-4dad-9a86-f4cbdff04024",
+ > "path": "/user/albert",
+ > "size": 10
+ > }
+ > }
+
+
+ createusermain \
+ "albert" \
+ | jq '.'
+
+ > {
+ > "linux": {
+ > "name": "albert",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/albert"
+ > },
+ > "shiro": {
+ > "pass": "ahrohT1raeK4agheyohxuK3Quohn7H",
+ > "hash": "$shiro1$SHA-256$500000$htBBfOVEv2faNEEjJGU5YA==$1bM9zvSvb3GcIyw5UEjoJAR6FDetdoojeCNnWo4l35M="
+ > },
+ > "share": {
+ > "name": "user-data-albert",
+ > "uuid": "b2ebd562-1787-4e05-81ae-5ed0fdbd3577",
+ > "path": "/user/albert",
+ > "size": 10
+ > }
+ > }
+
+
+# -----------------------------------------------------
+# Test creating a list of users.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ testers=(
+ "Chavezlafia"
+ "Ellisiri"
+ "Gonzatt"
+ "Crawobby"
+ "Owenorty"
+ "Stepnemony"
+ "Kendmit"
+ "Clarpulff"
+ "Kokakhan"
+ )
+
+ createarrayusers \
+ ${testers[@]} \
+ | jq '.'
+
+ > {
+ > "users": [
+ > {
+ > "linux": {
+ > "name": "Chavezlafia",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Chavezlafia"
+ > },
+ > "shiro": {
+ > "pass": "loh1vairush8ooshieth8railah9Yi",
+ > "hash": "$shiro1$SHA-256$500000$2HWRbhGI84fhXXD2s3GDTQ==$6rd2+1kB2XYJKdeMq2vZoSMRI/CF6h9lsEry25ojQw0="
+ > },
+ > "share": {
+ > "name": "user-data-Chavezlafia",
+ > "uuid": "5ba31d43-0ebd-4196-a532-c518648d86ce",
+ > "path": "/user/Chavezlafia",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Ellisiri",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Ellisiri"
+ > },
+ > "shiro": {
+ > "pass": "keib9keiWeeghuZiexoocohBohsh2c",
+ > "hash": "$shiro1$SHA-256$500000$gARUvJ+QxuHcecmtdh1XXg==$yGdNuqzLGUd90OjwRiIIcfT9SCAxiEMGVnNQR52UJ5I="
+ > },
+ > "share": {
+ > "name": "user-data-Ellisiri",
+ > "uuid": "d4f897fa-adea-4a6e-b3d9-245b03e5e4dd",
+ > "path": "/user/Ellisiri",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Gonzatt",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Gonzatt"
+ > },
+ > "shiro": {
+ > "pass": "sah0iexotoop4Biku4Ohfuod0uch5O",
+ > "hash": "$shiro1$SHA-256$500000$8R3/GZj5Va6hxSjxnPlaTw==$zfhthb+m889DzmEQyPg/OTkKCxMxfdKKNeKGAweJ+Gs="
+ > },
+ > "share": {
+ > "name": "user-data-Gonzatt",
+ > "uuid": "c7e4d165-859f-40fa-ae2f-b7156be65400",
+ > "path": "/user/Gonzatt",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Crawobby",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Crawobby"
+ > },
+ > "shiro": {
+ > "pass": "tholohp9ee5sahjie3Aicip1thueyu",
+ > "hash": "$shiro1$SHA-256$500000$/rj+Hk13hhO/Yw7ewwzOfg==$kXGqD1DJEfw/pOnKxWcWFF/WPSg1jT5cfXZIzYMAtwU="
+ > },
+ > "share": {
+ > "name": "user-data-Crawobby",
+ > "uuid": "e19b635c-bf8c-4672-bd21-8fe4ed3974a6",
+ > "path": "/user/Crawobby",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Owenorty",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Owenorty"
+ > },
+ > "shiro": {
+ > "pass": "aeyuL3ooRusoon2Feeru3xu4ajiong",
+ > "hash": "$shiro1$SHA-256$500000$pOPFczQTM5oUclvngZLgRg==$SpVLRdNxpzV/hJEYFUKWKOxTrcmrbASOpImXAOKzQxc="
+ > },
+ > "share": {
+ > "name": "user-data-Owenorty",
+ > "uuid": "c313f862-1763-4eff-8372-0869a6d6f03b",
+ > "path": "/user/Owenorty",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Stepnemony",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Stepnemony"
+ > },
+ > "shiro": {
+ > "pass": "AneeriF0Ohk0lai3vietaip3eiBee7",
+ > "hash": "$shiro1$SHA-256$500000$FkYUET1Bfyw2GrbB9dRI1w==$7npMOVJuxhOwy38eDPknGBk7nYm0oqRYxhJQSHtMDY0="
+ > },
+ > "share": {
+ > "name": "user-data-Stepnemony",
+ > "uuid": "27d0c8b2-0ed8-4d52-9af1-c4f303316f13",
+ > "path": "/user/Stepnemony",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Kendmit",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Kendmit"
+ > },
+ > "shiro": {
+ > "pass": "oothoigoh9Eng0eij3Hie2ieYoe2su",
+ > "hash": "$shiro1$SHA-256$500000$Z/eUEsiRbkggf+aOk1cY5A==$yRxWWuBZ34DhoShukUFiysIElcvJ1euUXiHEvihYgkk="
+ > },
+ > "share": {
+ > "name": "user-data-Kendmit",
+ > "uuid": "04784058-c74b-46fb-adb6-431f7f1efaa6",
+ > "path": "/user/Kendmit",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Clarpulff",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Clarpulff"
+ > },
+ > "shiro": {
+ > "pass": "ee0yahgh8iej4ieN1Eet1eequoo5oi",
+ > "hash": "$shiro1$SHA-256$500000$qVREM7NmahWlyT0xgIIqmg==$SCF4kIkr6hCQ4QXV7wXDLp/FQ30QbLuCnQr7mZgF6DQ="
+ > },
+ > "share": {
+ > "name": "user-data-Clarpulff",
+ > "uuid": "1ef39e22-96f6-4358-8e9d-4b82ae236b3d",
+ > "path": "/user/Clarpulff",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Kokakhan",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Kokakhan"
+ > },
+ > "shiro": {
+ > "pass": "ooPhoghoCheenui6thu1Hoo5Fei3ak",
+ > "hash": "$shiro1$SHA-256$500000$8vpDgsFj3rUynuC4kipfQw==$AsZkaCMipl7evs4NJfKzV0O/W24Yyz4fF0DvS1FzY5Q="
+ > },
+ > "share": {
+ > "name": "user-data-Kokakhan",
+ > "uuid": "7b5f712d-3021-46e2-9850-ff8fe38c3408",
+ > "path": "/user/Kokakhan",
+ > "size": 10
+ > }
+ > }
+ > ]
+ > }
+
+
+# -----------------------------------------------------
+# Test creating users from a list in a YAML file.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ createyamlusers \
+ '/deployments/common/users/test-users.yml' \
+ 'test-users' \
+ | jq '.'
+
+ > {
+ > "users": [
+ > {
+ > "linux": {
+ > "name": "Nelia",
+ > "uid": 2050,
+ > "gid": 2050,
+ > "home": "/home/Nelia"
+ > },
+ > "shiro": {
+ > "pass": "",
+ > "hash": "$shiro1$SHA-256$500000$pFPrHD2aadhp8H2DRvUTiw==$BfvnN68VUze19UdSZ5ak5dUB+dcj99OKCy0PAmtl/tM="
+ > },
+ > "share": {
+ > "name": "user-data-Nelia",
+ > "uuid": "f68600f7-f3d0-4cc0-afd0-e4fe43501de9",
+ > "path": "/user/Nelia",
+ > "size": 20
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Ghoria",
+ > "uid": 2051,
+ > "gid": 2051,
+ > "home": "/home/Ghoria"
+ > },
+ > "shiro": {
+ > "pass": "",
+ > "hash": "$shiro1$SHA-256$500000$jImCE+te8lXR5UytB4JSTA==$0M+TqzV/1yU+1MLCjsQLMbjCJYp1or455uh0UlrVEjE="
+ > },
+ > "share": {
+ > "name": "user-data-Ghoria",
+ > "uuid": "2bd701b4-c727-4ebd-8fbe-549a2e01fdb9",
+ > "path": "/user/Ghoria",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Nalla",
+ > "uid": 2052,
+ > "gid": 2052,
+ > "home": "/home/Nalla"
+ > },
+ > "shiro": {
+ > "pass": "",
+ > "hash": "$shiro1$SHA-256$500000$OLl6J+tHHPbvMV1f602fBA==$IwKL74ftVxBvh0LUzt0VGn2DWuKUTDs6D1xuB4v6xIc="
+ > },
+ > "share": {
+ > "name": "user-data-Nalla",
+ > "uuid": "f91916b3-cdfb-46a0-9a22-900ab9f7855a",
+ > "path": "/user/Nalla",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Wenia",
+ > "uid": 2053,
+ > "gid": 2053,
+ > "home": "/home/Wenia"
+ > },
+ > "shiro": {
+ > "pass": "pooS7eeliepeeshaiba4hee7ohghee",
+ > "hash": "$shiro1$SHA-256$500000$kMLq4j6B/Vd0O9mYFRAjRw==$RRNKvYy5d7eiURZouGbz0qaBRh6Q8gja4nqbLz6KmSg="
+ > },
+ > "share": {
+ > "name": "user-data-Wenia",
+ > "uuid": "f032ad97-2db6-4e50-a9ca-f4b87f3dabbc",
+ > "path": "/user/Wenia",
+ > "size": 10
+ > }
+ > },
+ > {
+ > "linux": {
+ > "name": "Ava",
+ > "uid": 1,
+ > "gid": 1,
+ > "home": "/home/Ava"
+ > },
+ > "shiro": {
+ > "pass": "Yaibil5Aequiequ5eRuzigeedou0oy",
+ > "hash": "$shiro1$SHA-256$500000$WOCrAmFM1obz2N+rL2XPUA==$dPXzMVdZyom+M6jr6QKA8O6AcUgeEpW9Ya3mL9GRd0Q="
+ > },
+ > "share": {
+ > "name": "user-data-Ava",
+ > "uuid": "444d7f86-2953-468d-942b-65ec1df92950",
+ > "path": "/user/Ava",
+ > "size": 10
+ > }
+ > }
+ > ]
+ > }
+
+
+# -----------------------------------------------------
+# Create a set of users and generate a JSON array of their usernames and passwords.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ testers=(
+ "Owenorty"
+ "Stepnemony"
+ "Kendmit"
+ )
+
+ tempfile=$(mktemp)
+
+ createarrayusers \
+ ${testers[@]} \
+ > "${tempfile}"
+
+ jq '[ .users[] | {name: .linux.name, pass: .shiro.pass} ]' "${tempfile}"
+
+
+ > [
+ > {
+ > "name": "Owenorty",
+ > "pass": "eexei7faiguS9ieMengais5ohSow0y"
+ > },
+ > {
+ > "name": "Stepnemony",
+ > "pass": "pierohnoopha0eephahghejeiCae7v"
+ > },
+ > {
+ > "name": "Kendmit",
+ > "pass": "aush2fahhie2Niengem0eequ5Nouko"
+ > }
+ > ]
+
+
diff --git a/notes/zrq/20220517-02-backups.txt b/notes/zrq/20220517-02-backups.txt
new file mode 100644
index 00000000..9dfa0d02
--- /dev/null
+++ b/notes/zrq/20220517-02-backups.txt
@@ -0,0 +1,106 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Simple backup script to start with.
+
+ Result:
+
+ Success, backup to local desktop works.
+
+
+# -----------------------------------------------------------------------------------------
+# Backup our Zeppelin notebooks.
+#[user@desktop]
+
+ sshuser=fedora
+ sshhost=zeppelin.aglais.uk
+
+ mkdir -p /var/local/backups/aglais/$(date '+%Y')/$(date '+%Y%m%d')
+ pushd /var/local/backups/aglais/$(date '+%Y')/$(date '+%Y%m%d')
+
+ datetime=$(date '+%Y%m%d%H%M%S')
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ "${sshuser:?}@${sshhost:?}://home/fedora/zeppelin/notebook/" \
+ 'aglais-notebooks'
+
+ > ....
+ > aglais-notebooks/.git/
+ > ....
+ > aglais-notebooks/Public Examples/
+ > aglais-notebooks/Public Examples/1. Start here_2GRTQZFUM.zpln
+ > aglais-notebooks/Public Examples/2. Data holdings_2GRA39HCN.zpln
+ > aglais-notebooks/Public Examples/3. Source counts over the sky_2GQ6WMH9W.zpln
+ > aglais-notebooks/Public Examples/4. Mean proper motions over the sky_2GSNYBDWB.zpln
+ > aglais-notebooks/Public Examples/5. Working with Gaia XP spectra_2H2YRJCKM.zpln
+ > aglais-notebooks/Public Examples/6. Working with cross-matched surveys_2GZME59KY.zpln
+ > aglais-notebooks/Public Examples/7. Good astrometric solutions via ML Random Forrest classifier_2GQDKZ59J.zpln
+ > aglais-notebooks/Public Examples/9. Tips and tricks_2GVXKC266.zpln
+ > aglais-notebooks/Users/
+ > aglais-notebooks/Users/dcr/
+ > ....
+ > aglais-notebooks/Users/nch/
+ > ....
+ > aglais-notebooks/Users/zrq/
+ > ....
+
+
+ tar --xz \
+ -cvf "aglais-notebooks-${datetime:?}.tar.xz" \
+ 'aglais-notebooks'
+
+ du -h -d 2 .
+
+ > 0 ./aglais-notebooks/.git/branches
+ > 32K ./aglais-notebooks/.git/hooks
+ > 4.0K ./aglais-notebooks/.git/info
+ > 36K ./aglais-notebooks/.git/logs
+ > 103M ./aglais-notebooks/.git/objects
+ > 20K ./aglais-notebooks/.git/refs
+ > 103M ./aglais-notebooks/.git
+ > 3.2M ./aglais-notebooks/Public Examples
+ > 27M ./aglais-notebooks/Users/dcr
+ > 3.7M ./aglais-notebooks/Users/nch
+ > 12K ./aglais-notebooks/Users/zrq
+ > 31M ./aglais-notebooks/Users
+ > 136M ./aglais-notebooks
+ > 253M .
+
+
+
diff --git a/notes/zrq/20220517-03-blue-deploy.txt b/notes/zrq/20220517-03-blue-deploy.txt
new file mode 100644
index 00000000..59795b9e
--- /dev/null
+++ b/notes/zrq/20220517-03-blue-deploy.txt
@@ -0,0 +1,309 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Test deployment to check everything works.
+
+ Result:
+
+ Work in progress ....
+
+
+# -----------------------------------------------------
+# Create a container to work with.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+
+ podman run \
+ --rm \
+ --tty \
+ --interactive \
+ --name ansibler \
+ --hostname ansibler \
+ --publish 3000:3000 \
+ --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \
+ --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \
+ --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \
+ --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \
+ ghcr.io/wfau/atolmis/ansible-client:2022.03.19 \
+ bash
+
+
+# -----------------------------------------------------
+# Set the target configuration.
+#[root@ansibler]
+
+ cloudbase='arcus'
+ cloudname='iris-gaia-blue'
+ configname=zeppelin-54.86-spark-6.26.43
+
+
+# -----------------------------------------------------
+# Delete everything.
+#[root@ansibler]
+
+ time \
+ /deployments/openstack/bin/delete-all.sh \
+ "${cloudname:?}"
+
+ > real 3m58.815s
+ > user 1m38.677s
+ > sys 0m10.503s
+
+
+# -----------------------------------------------------
+# Create everything.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-all.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-all.log
+
+ > real 34m48.518s
+ > user 10m9.402s
+ > sys 2m34.735s
+
+
+# -----------------------------------------------------
+# Debug the ssh keys.
+#[root@ansibler]
+
+#
+# inventory="/deployments/hadoop-yarn/ansible/config/${configname:?}.yml"
+#
+# pushd "/deployments/hadoop-yarn/ansible"
+# ansible-playbook \
+# --inventory "${inventory:?}" \
+# "41-config-zeppelin-ssh.yml"
+# popd
+#
+#
+# ssh zeppelin \
+# '
+# hostname
+# date
+# echo
+# ssh -o IdentitiesOnly=yes -i ~/.ssh/id_ssh_rsa fedora@localhost \
+# "
+# hostname
+# date
+# "
+# '
+#
+
+ > iris-gaia-blue-20220517-zeppelin
+ > Tue 17 May 2022 02:19:35 PM UTC
+ >
+ > iris-gaia-blue-20220517-zeppelin
+ > Tue 17 May 2022 02:19:35 PM UTC
+
+
+# -----------------------------------------------------
+# Create our shiro-auth database.
+#[root@ansibler]
+
+ time \
+ /deployments/hadoop-yarn/bin/create-auth-database.sh \
+ "${cloudname:?}" \
+ "${configname:?}" \
+ | tee /tmp/create-auth-database.log
+
+ > ....
+ > ....
+
+
+# -----------------------------------------------------
+# Check the ssh keys for fedora (Zeppelin) user.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ hostname
+ date
+ echo
+ sudo ls -al "/home/fedora/.ssh"
+ echo
+ sudo ls -al "/opt/aglais/ssh"
+ echo
+ cat "/opt/aglais/ssh/ssh-fedora.pub"
+ '
+
+ > iris-gaia-blue-20220517-zeppelin
+ > Tue 17 May 2022 02:08:59 PM UTC
+ >
+ > total 32
+ > drwx------. 2 fedora fedora 4096 May 17 14:02 .
+ > drwx------. 5 fedora fedora 4096 May 17 14:07 ..
+ > -rw-------. 1 fedora fedora 3252 May 17 13:37 authorized_keys
+ > -rw-------. 1 fedora fedora 3357 May 17 13:37 id_ssh_rsa
+ > -rw-r--r--. 1 fedora fedora 726 May 17 13:37 id_ssh_rsa.pub
+ > -rw-r--r--. 1 fedora fedora 171 May 17 14:02 known_hosts
+ >
+ > total 12
+ > drwxrwxr-x. 2 root root 4096 May 17 13:37 .
+ > drwxrwxr-x. 6 root root 4096 May 17 13:37 ..
+ > -r--r--r--. 1 root root 726 May 17 13:37 ssh-fedora.pub
+ >
+ > ssh-rsa AAAA....mfWw==
+
+
+# -----------------------------------------------------
+# Copy notebooks from the live server.
+#[root@ansibler]
+
+ ssh zeppelin \
+ '
+ sshuser=fedora
+ sshhost=zeppelin.aglais.uk
+
+ mv "/home/fedora/zeppelin/notebook" \
+ "/home/fedora/zeppelin/notebook-old"
+
+ rsync \
+ --perms \
+ --times \
+ --group \
+ --owner \
+ --stats \
+ --progress \
+ --human-readable \
+ --checksum \
+ --recursive \
+ "${sshuser:?}@${sshhost:?}:zeppelin/notebook/" \
+ "/home/fedora/zeppelin/notebook"
+
+ popd
+
+ > ....
+ > ....
+ > Number of files: 657 (reg: 442, dir: 215)
+ > Number of created files: 657 (reg: 442, dir: 215)
+ > Number of deleted files: 0
+ > Number of regular files transferred: 442
+ > Total file size: 141.09M bytes
+ > Total transferred file size: 141.09M bytes
+ > Literal data: 141.09M bytes
+ > Matched data: 0 bytes
+ > File list size: 65.52K
+ > File list generation time: 0.001 seconds
+ > File list transfer time: 0.000 seconds
+ > Total bytes sent: 9.66K
+ > Total bytes received: 141.18M
+ >
+ > sent 9.66K bytes received 141.18M bytes 56.48M bytes/sec
+ > total size is 141.09M speedup is 1.00
+
+
+# -----------------------------------------------------
+# Start Zeppelin.
+#[root@ansibler]
+
+ # TODO Need to create logs and run directories.
+
+ ssh zeppelin \
+ '
+ zeppelin-daemon.sh start
+ '
+
+ > Zeppelin start [ OK ]
+
+
+# -----------------------------------------------------
+# Install the hash generator in our client container.
+# TODO This could be done using Ansible ?
+# TODO This could be done using a remote call to Zeppelin ?
+#[root@ansibler]
+
+ dnf install -y java-latest-openjdk-headless
+
+ mkdir "${HOME}/lib"
+ pushd "${HOME}/lib"
+
+ wget https://repo1.maven.org/maven2/org/apache/shiro/tools/shiro-tools-hasher/1.9.0/shiro-tools-hasher-1.9.0-cli.jar
+ ln -s shiro-tools-hasher-1.9.0-cli.jar shiro-tools-hasher.jar
+
+ popd
+
+
+# -----------------------------------------------------
+# Add the SSH host key for the data node.
+#[root@ansibler]
+
+ ssh-keyscan 'data.aglais.uk' >> "${HOME}/.ssh//known_hosts"
+
+ > # data.aglais.uk:22 SSH-2.0-OpenSSH_8.0
+ > # data.aglais.uk:22 SSH-2.0-OpenSSH_8.0
+ > # data.aglais.uk:22 SSH-2.0-OpenSSH_8.0
+ > # data.aglais.uk:22 SSH-2.0-OpenSSH_8.0
+ > # data.aglais.uk:22 SSH-2.0-OpenSSH_8.0
+
+
+# -----------------------------------------------------
+# Test creating a single user.
+#[root@ansibler]
+
+ source /deployments/zeppelin/bin/create-user-tools.sh
+
+ createusermain \
+ "albert" \
+ "2000" \
+ "2000" \
+ "/home/path" \
+ "/data/path" \
+ "5" \
+ | jq '.'
+
+ > {
+ > "linux": {
+ > "name": "albert",
+ > "uid": 2000,
+ > "gid": 2000,
+ > "home": "/home/path"
+ > },
+ > "shiro": {
+ > "pass": "eiyeiqu1vieCiefahJium2thaeBief",
+ > "hash": "$shiro1$SHA-256$500000$9EBUsVA+CxYE74Dk0MU1fg==$789yvOMemR/5DExULJUi447Sc6NCCoagWGLFfsvhp5g="
+ > },
+ > "share": {
+ > "name": "user-data-albert",
+ > "uuid": "003c78c0-e28e-46db-aab9-6cc6bae3b034",
+ > "path": "/data/path",
+ > "size": 5
+ > }
+ > }
+
+ #
+ # TODO - wire it up to Stelios's create user functions.
+ # TODO - wire it up to the create share functions.
+ #
+
+
diff --git a/notes/zrq/20220517-04-git-branch.txt b/notes/zrq/20220517-04-git-branch.txt
new file mode 100644
index 00000000..29cb4eba
--- /dev/null
+++ b/notes/zrq/20220517-04-git-branch.txt
@@ -0,0 +1,127 @@
+#
+#
+#
+# Copyright (c) 2022, ROE (http://www.roe.ac.uk/)
+#
+# This information is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This information is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#
+#
+#zrq-notes-time
+#zrq-notes-indent
+#zrq-notes-crypto
+#zrq-notes-ansible
+#zrq-notes-osformat
+#zrq-notes-zeppelin
+#
+
+ Target:
+
+ Create a new branch and pull request.
+
+ Result:
+
+ Work in progress ...
+
+# -----------------------------------------------------
+# Update from upstream.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git checkout master
+
+ git pull
+
+ git fetch upstream
+
+ git merge upstream/master
+
+ git status
+
+ git push
+
+ popd
+
+ > Switched to branch 'master'
+ >
+ > Already up to date.
+ >
+ > Your branch is up to date with 'origin/master'.
+
+
+# -----------------------------------------------------
+# Create a new branch.
+#[user@desktop]
+
+ branchname=user-accounts
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ branchprev=$(git branch --show-current)
+ branchnext=$(date '+%Y%m%d')-zrq-${branchname:?}
+
+ git checkout master
+ git checkout -b "${branchnext:?}"
+
+ git push --set-upstream 'origin' "$(git branch --show-current)"
+
+ popd
+
+ > ....
+ > ....
+ > * [new branch] 20220517-zrq-user-accounts -> 20220517-zrq-user-accounts
+ > branch '20220517-zrq-user-accounts' set up to track 'origin/20220517-zrq-user-accounts'.
+
+
+# -----------------------------------------------------
+# Use meld to transfer all the changes from our previous branch.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_HOME}"
+
+ cp -a \
+ github-zrq \
+ github-prev
+
+ pushd github-prev
+ git checkout 20220505-zrq-user-accounts
+ popd
+
+ meld \
+ github-prev \
+ github-zrq \
+ &
+
+ popd
+
+
+# -----------------------------------------------------
+# Push our changes and create a PR.
+#[user@desktop]
+
+ source "${HOME:?}/aglais.env"
+ pushd "${AGLAIS_CODE}"
+
+ git add .
+ git commit -m "Top level create user scripts, reading from YAML and writing JSON"
+ git push
+
+ popd
+
+
+