diff --git a/deployments/common/zeppelin/interpreter.json b/deployments/common/zeppelin/interpreter.json index 08da52f3..eff92dc7 100644 --- a/deployments/common/zeppelin/interpreter.json +++ b/deployments/common/zeppelin/interpreter.json @@ -1,6 +1,6 @@ { "interpreterSettings": { - "python": { + "python": { "id": "python", "name": "python", "group": "python", @@ -8,27 +8,32 @@ "zeppelin.python": { "name": "zeppelin.python", "value": "python", - "type": "string" - }, - "zeppelin.ipython.launch.timeout": { - "name": "zeppelin.ipython.launch.timeout", - "value": "30000", - "type": "number" + "type": "string", + "description": "Python binary executable path. It is set to python by default.(assume python is in your $PATH)" }, "zeppelin.python.maxResult": { "name": "zeppelin.python.maxResult", "value": "1000", - "type": "number" + "type": "number", + "description": "Max number of dataframe rows to display." }, "zeppelin.python.useIPython": { "name": "zeppelin.python.useIPython", - "value": true, - "type": "checkbox" + "value": false, + "type": "checkbox", + "description": "Whether use IPython when it is available in `%python`" + }, + "zeppelin.ipython.launch.timeout": { + "name": "zeppelin.ipython.launch.timeout", + "value": "30000", + "type": "number", + "description": "Time out for ipython launch" }, "zeppelin.ipython.grpc.message_size": { "name": "zeppelin.ipython.grpc.message_size", "value": "33554432", - "type": "number" + "type": "number", + "description": "grpc message size, default is 32M" } }, "status": "READY", @@ -96,231 +101,58 @@ "isUserImpersonate": false } }, - "livy": { - "id": "livy", - "name": "livy", - "group": "livy", - "properties": { - "livy.spark.executor.instances": { - "name": "livy.spark.executor.instances", - "value": "", - "type": "number" - }, - "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": { - "name": "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout", - "value": "", - "type": "string" - }, - "zeppelin.livy.concurrentSQL": { - "name": "zeppelin.livy.concurrentSQL", - "value": false, - "type": "checkbox" - }, - "zeppelin.livy.url": { - "name": "zeppelin.livy.url", - "value": "http://localhost:8998", - "type": "url" - }, - "zeppelin.livy.pull_status.interval.millis": { - "name": "zeppelin.livy.pull_status.interval.millis", - "value": "1000", - "type": "number" - }, - "livy.spark.executor.memory": { - "name": "livy.spark.executor.memory", - "value": "", - "type": "string" - }, - "zeppelin.livy.restart_dead_session": { - "name": "zeppelin.livy.restart_dead_session", - "value": false, - "type": "checkbox" - }, - "livy.spark.dynamicAllocation.enabled": { - "name": "livy.spark.dynamicAllocation.enabled", - "value": false, - "type": "checkbox" - }, - "zeppelin.livy.maxLogLines": { - "name": "zeppelin.livy.maxLogLines", - "value": "1000", - "type": "number" - }, - "livy.spark.dynamicAllocation.minExecutors": { - "name": "livy.spark.dynamicAllocation.minExecutors", - "value": "", - "type": "number" - }, - "livy.spark.executor.cores": { - "name": "livy.spark.executor.cores", - "value": "", - "type": "number" - }, - "zeppelin.livy.session.create_timeout": { - "name": "zeppelin.livy.session.create_timeout", - "value": "120", - "type": "number" - }, - "zeppelin.livy.spark.sql.maxResult": { - "name": "zeppelin.livy.spark.sql.maxResult", - "value": "1000", - "type": "number" - }, - "livy.spark.driver.cores": { - "name": "livy.spark.driver.cores", - "value": "", - "type": "number" - }, - "livy.spark.jars.packages": { - "name": "livy.spark.jars.packages", - "value": "", - "type": "textarea" - }, - "zeppelin.livy.spark.sql.field.truncate": { - "name": "zeppelin.livy.spark.sql.field.truncate", - "value": true, - "type": "checkbox" - }, - "livy.spark.driver.memory": { - "name": "livy.spark.driver.memory", - "value": "", - "type": "string" - }, - "zeppelin.livy.displayAppInfo": { - "name": "zeppelin.livy.displayAppInfo", - "value": true, - "type": "checkbox" - }, - "zeppelin.livy.principal": { - "name": "zeppelin.livy.principal", - "value": "", - "type": "string" - }, - "zeppelin.livy.keytab": { - "name": "zeppelin.livy.keytab", - "value": "", - "type": "textarea" - }, - "livy.spark.dynamicAllocation.maxExecutors": { - "name": "livy.spark.dynamicAllocation.maxExecutors", - "value": "", - "type": "number" - }, - "livy.spark.dynamicAllocation.initialExecutors": { - "name": "livy.spark.dynamicAllocation.initialExecutors", - "value": "", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "spark", - "class": "org.apache.zeppelin.livy.LivySparkInterpreter", - "defaultInterpreter": true, - "editor": { - "language": "scala", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sql", - "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "pyspark", - "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "pyspark3", - "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sparkr", - "class": "org.apache.zeppelin.livy.LivySparkRInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "r", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "shared", - "class": "org.apache.zeppelin.livy.LivySharedInterpreter", - "defaultInterpreter": false - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "perNote": "shared", - "perUser": "isolated", - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, "sh": { "id": "sh", "name": "sh", "group": "sh", "properties": { - "zeppelin.shell.interpolation": { - "name": "zeppelin.shell.interpolation", - "value": false, - "type": "checkbox" + "shell.command.timeout.millisecs": { + "name": "shell.command.timeout.millisecs", + "value": "60000", + "type": "number", + "description": "Shell command time out in millisecs. Default \u003d 60000" }, - "zeppelin.shell.principal": { - "name": "zeppelin.shell.principal", - "value": "", - "type": "string" + "shell.command.timeout.check.interval": { + "name": "shell.command.timeout.check.interval", + "value": "60000", + "type": "number", + "description": "Shell command output check interval in millisecs. Default \u003d 10000" }, "shell.working.directory.user.home": { "name": "shell.working.directory.user.home", "value": false, - "type": "checkbox" + "type": "checkbox", + "description": "If this set to true, the shell\u0027s working directory will be set to user home" }, "zeppelin.shell.auth.type": { "name": "zeppelin.shell.auth.type", "value": "", - "type": "string" + "type": "string", + "description": "If auth type is needed, Example: KERBEROS" }, "zeppelin.shell.keytab.location": { "name": "zeppelin.shell.keytab.location", "value": "", - "type": "string" + "type": "string", + "description": "Kerberos keytab location" }, - "shell.command.timeout.millisecs": { - "name": "shell.command.timeout.millisecs", - "value": "60000", - "type": "number" + "zeppelin.shell.principal": { + "name": "zeppelin.shell.principal", + "value": "", + "type": "string", + "description": "Kerberos principal" + }, + "zeppelin.shell.interpolation": { + "name": "zeppelin.shell.interpolation", + "value": false, + "type": "checkbox", + "description": "Enable ZeppelinContext variable interpolation into paragraph text" + }, + "zeppelin.terminal.ip.mapping": { + "name": "zeppelin.terminal.ip.mapping", + "value": "", + "type": "string", + "description": "Internal and external IP mapping of zeppelin server" } }, "status": "READY", @@ -334,6 +166,19 @@ "editOnDblClick": false, "completionSupport": false } + }, + { + "name": "terminal", + "class": "org.apache.zeppelin.shell.TerminalInterpreter", + "defaultInterpreter": false, + "editor": { + "language": "sh", + "editOnDblClick": false, + "completionSupport": false + }, + "config": { + "checkEmpty": false + } } ], "dependencies": [], @@ -351,120 +196,215 @@ "name": "spark", "group": "spark", "properties": { - "zeppelin.spark.concurrentSQL": { - "name": "zeppelin.spark.concurrentSQL", - "value": false, - "type": "checkbox" + "SPARK_HOME": { + "name": "SPARK_HOME", + "value": "/opt/spark", + "type": "string", + "description": "Location of spark distribution" + }, + "spark.master": { + "name": "spark.master", + "value": "yarn", + "type": "string", + "description": "Spark master uri. local | yarn-client | yarn-cluster | spark master address of standalone mode, ex) spark://master_host:7077" + }, + "spark.submit.deployMode": { + "name": "spark.submit.deployMode", + "value": "client", + "type": "string", + "description": "The deploy mode of Spark driver program, either \"client\" or \"cluster\", Which means to launch driver program locally (\"client\") or remotely (\"cluster\") on one of the nodes inside the cluster." }, - "zeppelin.R.knitr": { - "name": "zeppelin.R.knitr", - "value": true, - "type": "checkbox" - }, - "zeppelin.R.cmd": { - "name": "zeppelin.R.cmd", - "value": "R", - "type": "string" + "spark.app.name": { + "name": "spark.app.name", + "value": "", + "type": "string", + "description": "The name of spark application." }, - "zeppelin.R.image.width": { - "name": "zeppelin.R.image.width", - "value": "100%", - "type": "number" + "spark.driver.cores": { + "name": "spark.driver.cores", + "value": "", + "type": "number", + "description": "Number of cores to use for the driver process, only in cluster mode." }, - "zeppelin.spark.importImplicit": { - "name": "zeppelin.spark.importImplicit", - "value": true, - "type": "checkbox" + "spark.driver.memory": { + "name": "spark.driver.memory", + "value": "", + "type": "string", + "description": "Amount of memory to use for the driver process, i.e. where SparkContext is initialized, in the same format as JVM memory strings with a size unit suffix (\"k\", \"m\", \"g\" or \"t\") (e.g. 512m, 2g)." }, - "zeppelin.dep.localrepo": { - "name": "zeppelin.dep.localrepo", - "value": "local-repo", - "type": "string" + "spark.executor.cores": { + "name": "spark.executor.cores", + "value": "", + "type": "number", + "description": "The number of cores to use on each executor" }, - "zeppelin.spark.sql.stacktrace": { - "name": "zeppelin.spark.sql.stacktrace", - "value": false, - "type": "checkbox" + "spark.executor.memory": { + "name": "spark.executor.memory", + "value": "", + "type": "string", + "description": "Executor memory per worker instance. ex) 512m, 32g" }, - "zeppelin.spark.useHiveContext": { - "name": "zeppelin.spark.useHiveContext", - "value": true, - "type": "checkbox" + "spark.executor.instances": { + "name": "spark.executor.instances", + "value": "", + "type": "number", + "description": "The number of executors for static allocation." }, - "zeppelin.spark.printREPLOutput": { - "name": "zeppelin.spark.printREPLOutput", - "value": true, - "type": "checkbox" + "spark.files": { + "name": "spark.files", + "value": "", + "type": "string", + "description": "Comma-separated list of files to be placed in the working directory of each executor. Globs are allowed." }, - "spark.cores.max": { - "name": "spark.cores.max", + "spark.jars": { + "name": "spark.jars", "value": "", - "type": "number" + "type": "string", + "description": "Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed." }, - "spark.executor.memory": { - "name": "spark.executor.memory", + "spark.jars.packages": { + "name": "spark.jars.packages", "value": "", - "type": "string" + "type": "string", + "description": "Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. The coordinates should be groupId:artifactId:version. If spark.jars.ivySettings is given artifacts will be resolved according to the configuration in the file, otherwise artifacts will be searched for in the local maven repo, then maven central and finally any additional remote repositories given by the command-line option --repositories." }, - "zeppelin.spark.sql.interpolation": { - "name": "zeppelin.spark.sql.interpolation", + "zeppelin.spark.useHiveContext": { + "name": "zeppelin.spark.useHiveContext", + "value": true, + "type": "checkbox", + "description": "Use HiveContext instead of SQLContext if it is true. Enable hive for SparkSession." + }, + "zeppelin.spark.run.asLoginUser": { + "name": "zeppelin.spark.run.asLoginUser", "value": false, - "type": "checkbox" + "type": "checkbox", + "description": "Whether run spark job as the zeppelin login user, it is only applied when running spark job in hadoop yarn cluster and shiro is enabled" }, - "spark.app.name": { - "name": "spark.app.name", - "value": "Zeppelin", - "type": "string" + "zeppelin.spark.printREPLOutput": { + "name": "zeppelin.spark.printREPLOutput", + "value": true, + "type": "checkbox", + "description": "Print REPL output" }, "zeppelin.spark.maxResult": { "name": "zeppelin.spark.maxResult", "value": "1000", - "type": "number" + "type": "number", + "description": "Max number of result to display." + }, + "zeppelin.spark.enableSupportedVersionCheck": { + "name": "zeppelin.spark.enableSupportedVersionCheck", + "value": true, + "type": "checkbox", + "description": "Whether checking supported spark version. Developer only setting, not for production use" + }, + "zeppelin.spark.uiWebUrl": { + "name": "zeppelin.spark.uiWebUrl", + "value": "", + "type": "string", + "description": "Override Spark UI default URL. In Kubernetes mode, value can be Jinja template string with 3 template variables \u0027PORT\u0027, \u0027SERVICE_NAME\u0027 and \u0027SERVICE_DOMAIN\u0027. (ex: http://{{PORT}}-{{SERVICE_NAME}}.{{SERVICE_DOMAIN}})" }, "zeppelin.spark.ui.hidden": { "name": "zeppelin.spark.ui.hidden", "value": false, - "type": "checkbox" + "type": "checkbox", + "description": "Whether hide spark ui in zeppelin ui" }, - "master": { - "name": "master", - "value": "yarn-client", - "type": "string" + "spark.webui.yarn.useProxy": { + "name": "spark.webui.yarn.useProxy", + "value": false, + "type": "checkbox", + "description": "whether use yarn proxy url as spark weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004" }, - "zeppelin.pyspark.python": { - "name": "zeppelin.pyspark.python", - "value": "python", - "type": "string" + "zeppelin.spark.scala.color": { + "name": "zeppelin.spark.scala.color", + "value": true, + "type": "checkbox", + "description": "Whether enable color output of spark scala interpreter" }, - "args": { - "name": "args", - "value": "", - "type": "textarea" + "zeppelin.spark.deprecatedMsg.show": { + "name": "zeppelin.spark.deprecatedMsg.show", + "value": true, + "type": "checkbox", + "description": "Whether show the spark deprecated message, spark 2.2 and before are deprecated. Zeppelin will display warning message by default" }, - "zeppelin.spark.enableSupportedVersionCheck": { - "name": "zeppelin.spark.enableSupportedVersionCheck", + "zeppelin.spark.concurrentSQL": { + "name": "zeppelin.spark.concurrentSQL", "value": true, - "type": "checkbox" + "type": "checkbox", + "description": "Execute multiple SQL concurrently if set true." }, - "zeppelin.spark.useNew": { - "name": "zeppelin.spark.useNew", + "zeppelin.spark.concurrentSQL.max": { + "name": "zeppelin.spark.concurrentSQL.max", + "value": "10", + "type": "number", + "description": "Max number of SQL concurrently executed" + }, + "zeppelin.spark.sql.stacktrace": { + "name": "zeppelin.spark.sql.stacktrace", "value": true, - "type": "checkbox" + "type": "checkbox", + "description": "Show full exception stacktrace for SQL queries if set to true." + }, + "zeppelin.spark.sql.interpolation": { + "name": "zeppelin.spark.sql.interpolation", + "value": false, + "type": "checkbox", + "description": "Enable ZeppelinContext variable interpolation into spark sql" + }, + "PYSPARK_PYTHON": { + "name": "PYSPARK_PYTHON", + "value": "python", + "type": "string", + "description": "Python binary executable to use for PySpark in both driver and workers (default is python2.7 if available, otherwise python). Property `spark.pyspark.python` take precedence if it is set" + }, + "PYSPARK_DRIVER_PYTHON": { + "name": "PYSPARK_DRIVER_PYTHON", + "value": "python", + "type": "string", + "description": "Python binary executable to use for PySpark in driver only (default is `PYSPARK_PYTHON`). Property `spark.pyspark.driver.python` take precedence if it is set" }, "zeppelin.pyspark.useIPython": { "name": "zeppelin.pyspark.useIPython", + "value": false, + "type": "checkbox", + "description": "Whether use IPython when it is available" + }, + "zeppelin.R.knitr": { + "name": "zeppelin.R.knitr", "value": true, - "type": "checkbox" + "type": "checkbox", + "description": "Whether use knitr or not" }, - "zeppelin.spark.uiWebUrl": { - "name": "zeppelin.spark.uiWebUrl", - "value": "", - "type": "string" + "zeppelin.R.cmd": { + "name": "zeppelin.R.cmd", + "value": "R", + "type": "string", + "description": "R binary executable path" + }, + "zeppelin.R.image.width": { + "name": "zeppelin.R.image.width", + "value": "100%", + "type": "number", + "description": "Image width of R plotting" }, "zeppelin.R.render.options": { "name": "zeppelin.R.render.options", "value": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F, fig.retina \u003d 2", - "type": "textarea" + "type": "textarea", + "description": "" + }, + "zeppelin.R.shiny.portRange": { + "name": "zeppelin.R.shiny.portRange", + "value": ":", + "type": "string", + "description": "Shiny app would launch a web app at some port, this property is to specify the portRange via format \u0027\u003cstart\u003e:\u003cend\u003e\u0027, e.g. \u00275000:5001\u0027. By default it is \u0027:\u0027 which means any port" + }, + "zeppelin.kotlin.shortenTypes": { + "name": "zeppelin.kotlin.shortenTypes", + "value": true, + "type": "checkbox", + "description": "Show short types instead of full, e.g. List\u003cString\u003e or kotlin.collections.List\u003ckotlin.String\u003e" } }, "status": "READY", @@ -491,17 +431,6 @@ "completionSupport": true } }, - { - "name": "dep", - "class": "org.apache.zeppelin.spark.DepInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "scala", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, { "name": "pyspark", "class": "org.apache.zeppelin.spark.PySparkInterpreter", @@ -520,7 +449,8 @@ "editor": { "language": "python", "editOnDblClick": false, - "completionSupport": true + "completionSupport": true, + "completionKey": "TAB" } }, { @@ -530,13 +460,42 @@ "editor": { "language": "r", "editOnDblClick": false, - "completionSupport": false + "completionSupport": false, + "completionKey": "TAB" } }, { - "name": "isolated", - "class": "org.apache.zeppelin.spark.SparkInterpreter", - "defaultInterpreter": false + "name": "ir", + "class": "org.apache.zeppelin.spark.SparkIRInterpreter", + "defaultInterpreter": false, + "editor": { + "language": "r", + "editOnDblClick": false, + "completionSupport": true, + "completionKey": "TAB" + } + }, + { + "name": "shiny", + "class": "org.apache.zeppelin.spark.SparkShinyInterpreter", + "defaultInterpreter": false, + "editor": { + "language": "r", + "editOnDblClick": false, + "completionSupport": true, + "completionKey": "TAB" + } + }, + { + "name": "kotlin", + "class": "org.apache.zeppelin.spark.KotlinSparkInterpreter", + "defaultInterpreter": false, + "editor": { + "language": "kotlin", + "editOnDblClick": false, + "completionKey": "TAB", + "completionSupport": false + } } ], "dependencies": [], @@ -558,11 +517,12 @@ "properties": { "markdown.parser.type": { "name": "markdown.parser.type", - "value": "pegdown", - "type": "string" + "value": "flexmark", + "type": "string", + "description": "Markdown Parser Type. Available values: pegdown, markdown4j, flexmark. Default \u003d flexmark" } }, - "status": "DOWNLOADING_DEPENDENCIES", + "status": "READY", "interpreterGroup": [ { "name": "md", @@ -586,12 +546,13 @@ } } }, - "interpreterBindings": {}, "interpreterRepositories": [ { "id": "central", "type": "default", "url": "http://repo1.maven.org/maven2/", + "host": "repo1.maven.org", + "protocol": "http", "releasePolicy": { "enabled": true, "updatePolicy": "daily", @@ -609,6 +570,8 @@ "id": "local", "type": "default", "url": "file:///var/local/zeppelin/maven", + "host": "", + "protocol": "file", "releasePolicy": { "enabled": true, "updatePolicy": "daily", diff --git a/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml b/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml index fa21c38c..cb427698 100644 --- a/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml +++ b/deployments/hadoop-yarn/ansible/27-install-zeppelin.yml @@ -200,6 +200,18 @@ Enable directory listings on server. + + zeppelin.interpreter.exclude + angular,livy,alluxio,file,psql,flink,ignite,lens,cassandra,geode,kylin,elasticsearch,scalding,jdbc,hbase,bigquery,beam,groovy,flink-cmd,hazelcastjet,influxdb,java,jupyter,kotlin,ksql,mongodb,neo4j,pig,r,sap,spark-submit,sparql,submarine + All the inteprreters that you would like to exclude. You can only specify either 'zeppelin.interpreter.include' or 'zeppelin.interpreter.exclude'. Specifying them together is not allowed. + + + + zeppelin.jobmanager.enable + true + The Job tab in zeppelin page seems not so useful instead it cost lots of memory and affect the performance. Disable it can save lots of memory + + zeppelinshiro: | diff --git a/deployments/hadoop-yarn/ansible/29-install-pip-libs.yml b/deployments/hadoop-yarn/ansible/29-install-pip-libs.yml index 7a0e7eff..0c5d3269 100644 --- a/deployments/hadoop-yarn/ansible/29-install-pip-libs.yml +++ b/deployments/hadoop-yarn/ansible/29-install-pip-libs.yml @@ -28,9 +28,16 @@ - /tmp/ansible-vars.yml - tasks: + - name: "Install required system libraries" + become: yes + yum: + name: libtiff-devel,libjpeg-devel,libzip-devel,freetype-devel,lcms2-devel,libwebp-devel,tcl-devel,tk-devel + update_cache: yes + state: present + + - name: Copy pip requirements file into tmp become: true copy: @@ -42,4 +49,3 @@ become: true pip: requirements: "/tmp/requirements.txt" - diff --git a/deployments/hadoop-yarn/ansible/34-setup-shuffler.yml b/deployments/hadoop-yarn/ansible/34-setup-shuffler.yml index d846e772..6d5b1549 100644 --- a/deployments/hadoop-yarn/ansible/34-setup-shuffler.yml +++ b/deployments/hadoop-yarn/ansible/34-setup-shuffler.yml @@ -22,6 +22,7 @@ - name: "Fetch the spark-yarn-shuffle jar from one of the master nodes and store it in our /tmp directory" hosts: master01 + become: yes tasks: - fetch: src: /opt/spark/yarn/{{spname}}-yarn-shuffle.jar @@ -30,6 +31,7 @@ - name: "Copy Shuffle jar to Hadoop directory on worker & master nodes" hosts: workers:masters + become: yes tasks: - copy: src: /tmp/{{spname}}-yarn-shuffle.jar diff --git a/deployments/hadoop-yarn/ansible/36-run-benchmark.yml b/deployments/hadoop-yarn/ansible/36-run-benchmark.yml new file mode 100644 index 00000000..d41ee7b0 --- /dev/null +++ b/deployments/hadoop-yarn/ansible/36-run-benchmark.yml @@ -0,0 +1,83 @@ +# +# +# +# Copyright (c) 2020, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + +- name: "Get Zeppelin IP Address" + hosts: localhost + vars_files: + - config/ansible.yml + - /tmp/ansible-vars.yml + - config/openstack.yml + + tasks: + + - name: "Discover our Zeppelin node and store IP address in temp file" + os_server_info: + cloud: "{{ cloudname }}" + server: "{{ deployname }}-zeppelin" + register: + zeppelinnode + + - local_action: copy content={{ zeppelinnode.openstack_servers[0].accessIPv4 }} dest=/tmp/zeppelin_ip.txt + + +- name: "Install and run Python benchmark suite" + hosts: localhost + gather_facts: yes + become: yes + become_method: sudo + vars_files: + - config/ansible.yml + - /tmp/ansible-vars.yml + vars: + zepipaddress: "{{ lookup('file', '/tmp/zeppelin_ip.txt') | trim }}" + + tasks: + + - name: "Creating our Zeppelin config file" + copy: + dest: "/tmp/user.yml" + content: | + zeppelin_url: http://{{ zepipaddress }}:8080 + zeppelin_auth: true + zeppelin_user: gaiauser + zeppelin_password: gaiapass + + - name: "Install git" + yum: + name: git + update_cache: yes + state: present + + - pip: + name: 'git+https://github.com/wfau/aglais-testing@v0.1.2' + executable: pip + + - name: "Creating our Benchmarking script" + copy: + dest: "/tmp/run-test.py" + content: | + import sys + from aglais_benchmark import AglaisBenchmarker + AglaisBenchmarker("/deployments/zeppelin/test/config/notebooks.json", "/tmp/").run(concurrent=False, users=1) + + - name: "Run benchmarker" + command: python3 /tmp/run-test.py + diff --git a/deployments/hadoop-yarn/ansible/config/cclake-medium-04.yml b/deployments/hadoop-yarn/ansible/config/zeppelin-13.22-spark-4.13.22.yml similarity index 97% rename from deployments/hadoop-yarn/ansible/config/cclake-medium-04.yml rename to deployments/hadoop-yarn/ansible/config/zeppelin-13.22-spark-4.13.22.yml index 17787abc..4ff29235 100644 --- a/deployments/hadoop-yarn/ansible/config/cclake-medium-04.yml +++ b/deployments/hadoop-yarn/ansible/config/zeppelin-13.22-spark-4.13.22.yml @@ -26,7 +26,7 @@ all: # Hadoop vars - hdname: "hadoop-3.1.3" + hdname: "hadoop-3.2.1" hdbase: "/opt" hdhome: "/opt/hadoop" @@ -39,10 +39,9 @@ all: hdfsconf: "/var/hdfs/conf" hdfsuser: "fedora" - # Spark vars - - spname: "spark-2.4.7" - spfull: "spark-2.4.7-bin-hadoop2.7" + # Spark vars + spname: "spark-3.1.2" + spfull: "spark-3.1.2-bin-hadoop3.2" spbase: "/opt" sphome: "/opt/spark" sphost: "master01" @@ -174,9 +173,9 @@ all: #mapreduce.reduce.memory.mb = (multiple of yarn.scheduler.minimum-allocation-mb) # Zeppelin vars - zepname: "zeppelin-0.8.2" + zepname: "zeppelin-0.10.0" zepbase: "/home/fedora" - zephome: "/home/fedora/zeppelin-0.8.2-bin-all" + zephome: "/home/fedora/zeppelin-0.10.0-bin-all" zephost: "zeppelin" zepuser: "fedora" zepmavendest: "/var/local/zeppelin/maven" diff --git a/deployments/hadoop-yarn/ansible/config/medium-04.yml b/deployments/hadoop-yarn/ansible/config/zeppelin-14.45-spark-4.14.45.yml similarity index 97% rename from deployments/hadoop-yarn/ansible/config/medium-04.yml rename to deployments/hadoop-yarn/ansible/config/zeppelin-14.45-spark-4.14.45.yml index a60b4bbe..b4057b4c 100644 --- a/deployments/hadoop-yarn/ansible/config/medium-04.yml +++ b/deployments/hadoop-yarn/ansible/config/zeppelin-14.45-spark-4.14.45.yml @@ -26,7 +26,7 @@ all: # Hadoop vars - hdname: "hadoop-3.1.3" + hdname: "hadoop-3.2.1" hdbase: "/opt" hdhome: "/opt/hadoop" @@ -41,8 +41,8 @@ all: # Spark vars - spname: "spark-2.4.7" - spfull: "spark-2.4.7-bin-hadoop2.7" + spname: "spark-3.1.2" + spfull: "spark-3.1.2-bin-hadoop3.2" spbase: "/opt" sphome: "/opt/spark" sphost: "master01" @@ -174,9 +174,9 @@ all: #mapreduce.reduce.memory.mb = (multiple of yarn.scheduler.minimum-allocation-mb) # Zeppelin vars - zepname: "zeppelin-0.8.2" + zepname: "zeppelin-0.10.0" zepbase: "/home/fedora" - zephome: "/home/fedora/zeppelin-0.8.2-bin-all" + zephome: "/home/fedora/zeppelin-0.10.0-bin-all" zephost: "zeppelin" zepuser: "fedora" zepmavendest: "/var/local/zeppelin/maven" diff --git a/deployments/hadoop-yarn/ansible/config/cclake-large-06.yml b/deployments/hadoop-yarn/ansible/config/zeppelin-27.45-spark-6.27.45.yml similarity index 98% rename from deployments/hadoop-yarn/ansible/config/cclake-large-06.yml rename to deployments/hadoop-yarn/ansible/config/zeppelin-27.45-spark-6.27.45.yml index e2d3d5b3..053d6f04 100644 --- a/deployments/hadoop-yarn/ansible/config/cclake-large-06.yml +++ b/deployments/hadoop-yarn/ansible/config/zeppelin-27.45-spark-6.27.45.yml @@ -26,7 +26,7 @@ all: # Hadoop vars - hdname: "hadoop-3.1.3" + hdname: "hadoop-3.2.1" hdbase: "/opt" hdhome: "/opt/hadoop" @@ -41,8 +41,8 @@ all: # Spark vars - spname: "spark-2.4.7" - spfull: "spark-2.4.7-bin-hadoop2.7" + spname: "spark-3.1.2" + spfull: "spark-3.1.2-bin-hadoop3.2" spbase: "/opt" sphome: "/opt/spark" sphost: "master01" @@ -176,9 +176,9 @@ all: #mapreduce.reduce.memory.mb = (multiple of yarn.scheduler.minimum-allocation-mb) # Zeppelin vars - zepname: "zeppelin-0.8.2" + zepname: "zeppelin-0.10.0" zepbase: "/home/fedora" - zephome: "/home/fedora/zeppelin-0.8.2-bin-all" + zephome: "/home/fedora/zeppelin-0.10.0-bin-all" zephost: "zeppelin" zepuser: "fedora" zepmavendest: "/var/local/zeppelin/maven" diff --git a/deployments/hadoop-yarn/ansible/config/zeppelin-55.90-spark-6.27.45.yml b/deployments/hadoop-yarn/ansible/config/zeppelin-55.90-spark-6.27.45.yml new file mode 100644 index 00000000..3d0c3546 --- /dev/null +++ b/deployments/hadoop-yarn/ansible/config/zeppelin-55.90-spark-6.27.45.yml @@ -0,0 +1,280 @@ +# +# +# +# Copyright (c) 2020, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +# + +all: + + vars: + + # Hadoop vars + + hdname: "hadoop-3.1.3" + hdbase: "/opt" + hdhome: "/opt/hadoop" + + hdconf: "{{hdhome}}/etc/hadoop" + hdhost: "master01" + hduser: "fedora" + + # HDFS vars + + hdfsconf: "/var/hdfs/conf" + hdfsuser: "fedora" + + # Spark vars + + spname: "spark-2.4.7" + spfull: "spark-2.4.7-bin-hadoop2.7" + spbase: "/opt" + sphome: "/opt/spark" + sphost: "master01" + spuser: "fedora" + + # Flavor sizes + + zeppelinflavor: 'gaia.cclake.55vcpu' + masterflavor: 'gaia.cclake.2vcpu' + workerflavor: 'gaia.cclake.27vcpu' + + # Flavour values + + zeppelinmemory: 92160 + zeppelincores: 55 + + workermemory: 46080 + workercores: 27 + workercount: 6 + +# Calculated limits + + spminmem: 1024 + spmaxmem: "{{workermemory - 1024}}" + + spmincores: 1 + spmaxcores: "{{workercores}}" + + + sparkconfig: | + + # https://spark.apache.org/docs/latest/configuration.html + # https://spark.apache.org/docs/latest/running-on-yarn.html + # https://stackoverflow.com/questions/37871194/how-to-tune-spark-executor-number-cores-and-executor-memory + + spark.master yarn + + # Spark config settings calculated using Cheatsheet.xlsx + # https://www.c2fo.io/img/apache-spark-config-cheatsheet/C2FO-Spark-Config-Cheatsheet.xlsx + + # https://www.c2fo.io/c2fo/spark/aws/emr/2016/07/06/apache-spark-config-cheatsheet/ + # https://github.com/AndresNamm/SparkDebugging/tree/master/ExecutorSizing + + # Calculated using Cheatsheet.xlsx + spark.driver.memory 58982m + spark.driver.memoryOverhead 9216 + spark.driver.cores 5 + spark.driver.maxResultSize 40960m + + spark.executor.memory 7168m + spark.executor.memoryOverhead 1024 + spark.executor.cores 5 + #spark.executor.instances 30 + + spark.default.parallelism 300 + #spark.sql.shuffle.partitions 300 + + # YARN Application Master settings + spark.yarn.am.memory 2048m + spark.yarn.am.cores 1 + + spark.dynamicAllocation.enabled true + spark.shuffle.service.enabled true + spark.dynamicAllocation.minExecutors 1 + # spark.executor.instances from Cheatsheet + spark.dynamicAllocation.maxExecutors 30 + # maxExecutors / 2 + spark.dynamicAllocation.initialExecutors 15 + spark.dynamicAllocation.cachedExecutorIdleTimeout 60s + spark.dynamicAllocation.executorIdleTimeout 60s + + yarnconfig: | + + + yarn.scheduler.maximum-allocation-mb + {{spmaxmem}} + + + + + yarn.scheduler.minimum-allocation-mb + {{spminmem}} + + + + yarn.scheduler.minimum-allocation-vcores + {{spmincores}} + + + + yarn.scheduler.maximum-allocation-vcores + {{spmaxcores}} + + + + yarn.nodemanager.resource.memory-mb + {{spmaxmem}} + + + + + yarn.nodemanager.resource.cpu-vcores + {{spmaxcores}} + + + + + yarn.nodemanager.pmem-check-enabled + false + + + + yarn.nodemanager.vmem-check-enabled + false + + + #yarn.app.mapreduce.am.resource.mb = (yarn.scheduler.minimum-allocation-mb) + #mapreduce.map.memory.mb = (multiple of yarn.scheduler.minimum-allocation-mb) + #mapreduce.reduce.memory.mb = (multiple of yarn.scheduler.minimum-allocation-mb) + + # Zeppelin vars + zepname: "zeppelin-0.8.2" + zepbase: "/home/fedora" + zephome: "/home/fedora/zeppelin-0.8.2-bin-all" + zephost: "zeppelin" + zepuser: "fedora" + zepmavendest: "/var/local/zeppelin/maven" + + hosts: + + zeppelin: + login: 'fedora' + image: 'Fedora-30-1.2' + flavor: "{{zeppelinflavor}}" + discs: + - type: 'local' + format: 'ext4' + mntpath: "/mnt/local/vdb" + devname: 'vdb' + - type: 'cinder' + size: 1024 + format: 'btrfs' + mntpath: "/mnt/cinder/vdc" + devname: 'vdc' + paths: + # Empty on Zeppelin + hddatalink: "/var/hadoop/data" + hddatadest: "/mnt/local/vdb/hadoop/data" + # Empty on Zeppelin + hdtemplink: "/var/hadoop/temp" + hdtempdest: "/mnt/local/vdb/hadoop/temp" + # Empty on Zeppelin + hdlogslink: "/var/hadoop/logs" + hdlogsdest: "/mnt/local/vdb/hadoop/logs" + # Used on Zeppelin + sptemplink: "/var/spark/temp" + sptempdest: "/mnt/cinder/vdc/spark/temp" + + monitor: + login: 'fedora' + image: 'Fedora-30-1.2' + flavor: 'gaia.cclake.2vcpu' + discs: [] + + children: + + masters: + hosts: + master[01:01]: + vars: + login: 'fedora' + image: 'Fedora-30-1.2' + flavor: "{{masterflavor}}" + discs: [] + paths: + # Empty on master + hddatalink: "/var/hadoop/data" + hddatadest: "/mnt/local/vda/hadoop/data" + # Used on master + # /var/hadoop/temp/dfs/namesecondary/current/ + hdtemplink: "/var/hadoop/temp" + hdtempdest: "/mnt/local/vda/hadoop/temp" + # Used on master + hdlogslink: "/var/hadoop/logs" + hdlogsdest: "/mnt/local/vda/hadoop/logs" + # Used on master + # /var/hdfs/meta/namenode/fsimage/current/ + hdfsmetalink: "/var/hdfs/meta" + hdfsmetadest: "/mnt/local/vda/hadoop/meta" + + workers: + hosts: + worker[01:06]: + vars: + login: 'fedora' + image: 'Fedora-30-1.2' + flavor: "{{workerflavor}}" + discs: + - type: 'local' + format: 'ext4' + mntpath: "/mnt/local/vdb" + devname: 'vdb' + - type: 'cinder' + size: 1024 + format: 'btrfs' + mntpath: "/mnt/cinder/vdc" + devname: 'vdc' + paths: + # Used on workers + hddatalink: "/var/hadoop/data" + hddatadest: "/mnt/local/vdb/hadoop/data" + # Used on workers + # /var/hadoop/temp/nm-local-dir/ + hdtemplink: "/var/hadoop/temp" + hdtempdest: "/mnt/local/vdb/hadoop/temp" + # Used on workers + hdlogslink: "/var/hadoop/logs" + hdlogsdest: "/mnt/local/vdb/hadoop/logs" + # Empty on workers + hdfslogslink: "/var/hdfs/logs" + hdfslogsdest: "/mnt/local/vdb/hdfs/logs" + # Empty on workers + hdfsdatalink: "/var/hdfs/data" + hdfsdatadest: "/mnt/cinder/vdc/hdfs/data" + diff --git a/deployments/hadoop-yarn/bin/create-all.sh b/deployments/hadoop-yarn/bin/create-all.sh index fe7ac4a9..91512a93 100755 --- a/deployments/hadoop-yarn/bin/create-all.sh +++ b/deployments/hadoop-yarn/bin/create-all.sh @@ -45,6 +45,8 @@ deployname="${cloudname:?}-$(date '+%Y%m%d')" deploydate=$(date '+%Y%m%dT%H%M%S') + deploytype="${3:-prod}" + configyml='/tmp/aglais-config.yml' statusyml='/tmp/aglais-status.yml' touch "${statusyml:?}" @@ -305,3 +307,19 @@ done +# ----------------------------------------------------- +# Run Benchmarks + +if [[ "$deploytype" == "test" ]] +then + + pushd "/deployments/hadoop-yarn/ansible" + + ansible-playbook \ + --verbose \ + --inventory "${inventory:?}" \ + "36-run-benchmark.yml" + + popd + +fi diff --git a/deployments/hadoop-yarn/bin/start-zeppelin.sh b/deployments/hadoop-yarn/bin/start-zeppelin.sh index 919f1a6c..8eae502d 100755 --- a/deployments/hadoop-yarn/bin/start-zeppelin.sh +++ b/deployments/hadoop-yarn/bin/start-zeppelin.sh @@ -20,6 +20,6 @@ ssh zeppelin \ ' - /home/fedora/zeppelin-0.8.2-bin-all/bin/zeppelin-daemon.sh start + /home/fedora/zeppelin-0.10.0-bin-all/bin/zeppelin-daemon.sh start ' diff --git a/deployments/zeppelin/test/config/notebooks.json b/deployments/zeppelin/test/config/notebooks.json new file mode 100644 index 00000000..25823ae3 --- /dev/null +++ b/deployments/zeppelin/test/config/notebooks.json @@ -0,0 +1,41 @@ +{ +"notebooks" : [ + { + "name" : "SetUp", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/SetUp.json", + "totaltime" : 45, + "results" : [] + }, + { + "name" : "Mean_proper_motions_over_the_sky", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Mean_proper_motions_over_the_sky.json", + "totaltime" : 55, + "results" : [] + }, + { + "name" : "Source_counts_over_the_sky.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Source_counts_over_the_sky.json", + "totaltime" : 22, + "results" : [] + }, + { + "name" : "Good_astrometric_solutions_via_ML_Random_Forrest_classifier", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Good_astrometric_solutions_via_ML_Random_Forrest_classifier.json", + "totaltime" : 500, + "results" : [] + }, + { + "name" : "QC_cuts_dev.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/QC_cuts_dev.json", + "totaltime" : 4700, + "results" : [] + }, + { + "name" : "WD_detection_dev.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/WD_detection_dev.json", + "totaltime" : 3750, + "results" : [] + } + +] +} diff --git a/deployments/zeppelin/test/config/notebooks_pi.json b/deployments/zeppelin/test/config/notebooks_pi.json new file mode 100644 index 00000000..c96c332c --- /dev/null +++ b/deployments/zeppelin/test/config/notebooks_pi.json @@ -0,0 +1,12 @@ +{ +"notebooks" : [ + { + "name" : "pi_calculation", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/pi_calculation.json", + "totaltime" : 160, + "results" : [ "Pi is roughly 3.141854"] + + } + +] +} diff --git a/notes/stv/20210918-zeppelin-benchmarking-01.txt b/notes/stv/20210918-zeppelin-benchmarking-01.txt new file mode 100644 index 00000000..9fe22d2b --- /dev/null +++ b/notes/stv/20210918-zeppelin-benchmarking-01.txt @@ -0,0 +1,198 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +# + +# Benchmarking the Zeppelin REST API +# ------------------------------------ + +# Summary of benchmarking suit: + We use the "zdairi" Zeppelin rest client to run our benchmarks in the following way: + - Create secrets file + - Create notebooks.json config where we store the list of our curated notebooks. Each entry has a link to the raw json file of the Zeppelin notebook + + - For each notebook: + Fetch notebook json file store in temp folder on local machine + Create notebook in Zeppelin instance (Create under tmp directory in Zeppelin, i.e. change name to /tmp/notebook..) + Run notebook, and store the execution duration + Delete notebook from Zeppelin + Delete temporary notebook json file in local machine + + - Return a dictionary of the Notebook ID's and the execution duration for each + - As a one off, run the benchmark suite, to get the execution time for each notebook using our test prototype + + + +# zdairi is zeppelin CLI tool which wrapper zeppelin REST API for control notebook and interpreter. +# https://pypi.org/project/zdairi/ +# Benchmarking suite can be found here: https://github.com/wfau/aglais-testing + + + +# The following was run a local machine (Ubuntu). +# [Update] I have also tested on a remote VM which was also however an Ubuntu machine +# For the concurrent test, we need to create the users before hand in Zeppelin + + +# Install Python2.7 (Required by zdairi) +# user@local +# ----------------------------------- +apt install python-minimal + + + +# Clone Aglais-testing Github project +# user@local +# ----------------------------------- +git clone https://github.com/wfau/aglais-testing +pushd aglais-testing + + + +# Setup Virtualenv +# user@local +# -------------------- + +virtualenv --python=python2.7 mypython +source mypython/bin/activate + + + +# Install zdairi +# (mypython) user@local +# ----------------------------- + +pip install zdairi + + + +# Edit our secrets yaml files +# For a single user benchmark, we need to edit "user.yml" +# For a multi user test, we need to setup a yml for each concurrent user, numbered as: "user1.yml", "user2.yml ..." +# (mypython) user@local +# -------------------------------- + +nano config/zeppelin/user.yml +.. +zeppelin_url: http://128.232.227.178:8080 +zeppelin_auth: true +zeppelin_user: user +zeppelin_password: pass +.. + + + + + +# Optional: Edit the notebooks we want to test +# By default the project comes with two notebook configuration files, one containing a single notebook for a quick test, and one with the full list of notebooks +# (mypython) user@local +# ---------------------------------------------------------------------------- + + +nano config/notebooks/notebook.json +.. +{ +"notebooks" : [ + { + "name" : "SetUp", + "filepath" : "https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/notebooks/SetUp.json", + "totaltime" : 400, + "results" : [] + }, + { + "name" : "Good_astrometric_solutions_via_ML_Random_Forrest_classifier", + "filepath" : "https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/notebooks/Good_astrometric_solutions_via_ML_Random_Forrest_classifier.json", + "totaltime" : 900, + "results" : [] + }, + { + "name" : "Mean_proper_motions_over_the_sky", + "filepath" : "https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/notebooks/Mean_proper_motions_over_the_sky.json", + "totaltime" : 400, + "results" : [] + }, + { + "name" : "Source_counts_over_the_sky.json", + "filepath" : "https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/notebooks/Source_counts_over_the_sky.json", + "totaltime" : 1200, + "results" : [] + } + +] +} + + + + + +.. + + +# Navigate to src/ +pushd src + + +# -- Test #1 -- +# Run Single (Quick) pi calculation test +# (mypython) user@local +# -------------------------------------------------- + +python3 +>>> from benchmark import Benchmarker +>>> Benchmarker("../config/notebooks/notebooks_quick_pi.json", "../config/zeppelin/").run(concurrent=False, users=1) + + + Expected Output: ['Pi is roughly 3.141854'] + Actual output: ['Pi is roughly 3.141210'] + ----------- + Test completed after: 6.19 seconds + ----------- + {'pi_quick': {'totaltime': '6.19', 'status': 'SUCCESS', 'valid': 'FALSE'}} + + + +# -- Test #2 -- +# Run Public examples tests +# (mypython) user@local +>>> from benchmark import Benchmarker +>>> Benchmarker("../config/notebooks/notebooks.json", "../config/zeppelin/").run(concurrent=False, users=1) +Test completed after: 929.94 seconds +----------- +{'SetUp': {'totaltime': '42.66', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Mean_proper_motions_over_the_sky': {'totaltime': '107.13', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Source_counts_over_the_sky.json': {'totaltime': '36.21', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '743.94', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}} + + + +# -- Test #3 -- +# Repeat Public examples tests +# (mypython) user@local +Test completed after: 845.26 seconds +----------- +{'SetUp': {'totaltime': '41.15', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Mean_proper_motions_over_the_sky': {'totaltime': '87.54', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Source_counts_over_the_sky.json': {'totaltime': '29.30', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '687.27', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}} + + +# Duration looks about right: + 743.94 / 60 = 12 minutes + 687.27 / 60 = 11.45 minutes + +# Let's set the template values for these notebooks to slightly above the max of the two +# Update the timing values in the configuration for these notebooks + +# https://github.com/stvoutsin/aglais-testing/tree/main/notebooks/public_examples diff --git a/notes/stv/20210920-zeppelin-benchmarking-01.txt b/notes/stv/20210920-zeppelin-benchmarking-01.txt new file mode 100644 index 00000000..b54a601e --- /dev/null +++ b/notes/stv/20210920-zeppelin-benchmarking-01.txt @@ -0,0 +1,118 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +# + +# Benchmarking the Zeppelin REST API +# ------------------------------------ + +# Target: + # Test a deployed version of Aglais using the benchmarking suite at: + # https://github.com/stvoutsin/aglais-testing + +# Result: + # Success + +# ------------------------------------- + + + +# The following was run a local machine (Ubuntu). +# [Update] I have also tested on a remote VM which was also however an Ubuntu machine +# For the concurrent test, we need to create the users before hand in Zeppelin + + +# Install Python2.7 (Required by zdairi) +# user@local +# ----------------------------------- +apt install python-minimal + + + +# Clone Aglais-testing Github project +# user@local +# ----------------------------------- +git clone https://github.com/stvoutsin/aglais-testing +pushd aglais-testing + + + +# Setup Virtualenv +# user@local +# -------------------- + +virtualenv --python=python2.7 mypython +source mypython/bin/activate + + + +# Install Benchmarking Suite +# user@local +# -------------------------- + +python setup.py install + + + +# Edit our secrets yaml files +# For a single user benchmark, we need to edit "user.yml" +# For a multi user test, we need to setup a yml for each concurrent user, numbered as: "user1.yml", "user2.yml ..." +# (mypython) user@local +# -------------------------------- + +nano config/zeppelin/user.yml +.. +zeppelin_url: http://128.232.227.178:8080 +zeppelin_auth: true +zeppelin_user: user +zeppelin_password: pass +.. + + + + + +# -- Test #1 -- +# Run Public examples tests +# (mypython) user@local +>>> from aglais_benchmark import AglaisBenchmarker +>>> AglaisBenchmarker("https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/config/notebooks/notebooks.json", "./config/zeppelin/").run(concurrent=False, users=1) + + +Test completed after: 914.43 seconds +----------- +{u'SetUp': {'totaltime': '42.07', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '33.23', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '732.92', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Mean_proper_motions_over_the_sky': {'totaltime': '106.21', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}} + + +# Restart Spark Context,and try again just to double check + + +# -- Test #2 -- +# Run Public examples tests second time +# (mypython) user@local +>>> from aglais_benchmark import AglaisBenchmarker +>>> AglaisBenchmarker("https://raw.githubusercontent.com/stvoutsin/aglais-testing/main/config/notebooks/notebooks.json", "./config/zeppelin/").run(concurrent=False, users=1) + + +Test completed after: 1027.47 seconds +----------- +{'SetUp': {'totaltime': '44.70', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Mean_proper_motions_over_the_sky': {'totaltime': '132.56', 'status': 'SLOW', 'msg': '', 'valid': 'TRUE'}, 'Source_counts_over_the_sky.json': {'totaltime': '36.93', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '813.28', 'status': 'SLOW', 'msg': '', 'valid': 'TRUE'}} + + diff --git a/notes/stv/20211010-ansible-deploy-with-benchmarks.txt b/notes/stv/20211010-ansible-deploy-with-benchmarks.txt new file mode 100644 index 00000000..06b07eb0 --- /dev/null +++ b/notes/stv/20211010-ansible-deploy-with-benchmarks.txt @@ -0,0 +1,119 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Run benchmark tests via Ansible + + + Result: + + Success + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-benchmarking' + popd + + + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2020.12.02 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-medium-04' \ + 'test' + + > real 68m23.293s + > user 15m49.227s + > sys 3m38.770s + + +.. + + +TASK [Run benchmarker] ************************************************************************************************************************************************************************************** +changed: [monitor] => {"changed": true, "cmd": ["python", "/tmp/run-test.py"], "delta": "0:15:03.798760", "end": "2021-10-11 21:10:02.242377", "rc": 0, "start": "2021-10-11 20:54:58.443617", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 903.48 seconds\n{u'SetUp': {'totaltime': '44.02', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '33.37', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '721.65', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Mean_proper_motions_over_the_sky': {'totaltime': '104.43', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 903.48 seconds", "{u'SetUp': {'totaltime': '44.02', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '33.37', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '721.65', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Mean_proper_motions_over_the_sky': {'totaltime': '104.43', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}"]} + +PLAY RECAP ************************************************************************************************************************************************************************************************** +localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + + + +# Is there a better way to output the results for readability? (Probably..) + diff --git a/notes/stv/20211011-Spark3-zeppelin-0.9.2.txt b/notes/stv/20211011-Spark3-zeppelin-0.9.2.txt new file mode 100644 index 00000000..cb9a5b88 --- /dev/null +++ b/notes/stv/20211011-Spark3-zeppelin-0.9.2.txt @@ -0,0 +1,220 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy with more recent version of components: + Zeppelin 0.9.2 + Spark 3.0.3 + Hadoop 3.1.2 + + + Result: + + + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2020.12.02 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-medium-04' + + + > real 68m23.293s + > user 15m49.227s + > sys 3m38.770s + +# ----------------------------------------------------- +# Run Notebook tests + +# From Zeppelin UI + +# Run Set Up: + +# Cell #1 +java.lang.RuntimeException: Interpreter Setting 'md' is not ready, its status is DOWNLOADING_DEPENDENCIES + at org.apache.zeppelin.notebook.Paragraph.jobRun(Paragraph.java:428) + at org.apache.zeppelin.notebook.Paragraph.jobRun(Paragraph.java:72) + at org.apache.zeppelin.scheduler.Job.run(Job.java:172) + at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:132) + at org.apache.zeppelin.scheduler.RemoteScheduler$JobRunner.run(RemoteScheduler.java:182) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) + at java.util.concurrent.FutureTask.run(FutureTask.java:266) + at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) + at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + + +# Run a Spark Cell (Cell #2) + +org.apache.zeppelin.interpreter.InterpreterException: java.io.IOException: Fail to launch interpreter process: +SLF4J: Class path contains multiple SLF4J bindings. +SLF4J: Found binding in [jar:file:/home/fedora/zeppelin-0.9.0-bin-all/interpreter/spark/spark-interpreter-0.9.0.jar!/org/slf4j/impl/StaticLoggerBinder.class] +SLF4J: Found binding in [jar:file:/opt/spark-3.0.3-bin-hadoop2.7/jars/slf4j-log4j12-1.7.30.jar!/org/slf4j/impl/StaticLoggerBinder.class] +SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. +SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] +Exception in thread "main" org.apache.spark.SparkException: Master must either be yarn or start with spark, mesos, k8s, or local + at org.apache.spark.deploy.SparkSubmit.error(SparkSubmit.scala:936) + at org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:238) + at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:871) + at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) + at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) + at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) + + +# Something wrong with configuration + + +# ----------------------------------------------------- +# Stop Zeppelin, Remove Zeppelin folder, and install latest manually + +wget https://dlcdn.apache.org/zeppelin/zeppelin-0.10.0/zeppelin-0.10.0-bin-all.tgz +tar -xzvf zeppelin-0.10.0-bin-all.tgz + + +# ----------------------------------------------------- +# Set Configurations +cd zeppelin-0.10.0-bin-all/ + + +# ----------------------------------------------------- +# Create Shiro conf file, and our user: gaiauser +cp conf/shiro.ini.template conf/shiro.ini + + + +# ----------------------------------------------------- +# Create zeppelin-site file, and set Zeppelin IP Address +cp conf/zeppelin-site.xml.template conf/zeppelin-site.xml + + +# ----------------------------------------------------- +# Setup Hadoop / Spark settings in Spark Interpreter + + # In Zeppelin UI / Spark Interpreter: + + SPARK_HOME /opt/spark + spark.master yarn + spark.submit.deployMode client + + +# ---------------------------------------------------------------- +# Start Zeppelin +/home/fedora/zeppelin-0.10.0-bin-all/bin/zeppelin-daemon.sh start + + +# ------------------------------------------------------------------------------------------------ +# Import Notebooks from https://github.com/wfau/aglais-testing/tree/main/notebooks/public_examples + + + +# ----------------------------------------------------------------------------------------------------------------------------------------- +# Run notebook https://github.com/wfau/aglais-testing/blob/main/notebooks/public_examples/SetUp.json + +Exception... + +Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user= , access=WRITE, inode="/":fedora:supergroup:drwxr-xr-x zeppelin + + +[Failed] + + + +# Temp fix: +# Set hdfs-site.xml settings to (Zeppelin & Master): + + + dfs.permissions.enabled + false + + + +# ----------------------------------------------------------------------------------------------------------------------------------------- +# Run notebook https://github.com/wfau/aglais-testing/blob/main/notebooks/public_examples/SetUp.json + +[Success] + + +# ----------------------------------------------------------------------------------------------------------------------------------------- +# Run notebook https://github.com/wfau/aglais-testing/blob/main/notebooks/public_examples/Source_counts_over_the_sky.json + +# Plot up the results +> Took 2 min 31 sec. Last updated by gaiauser at October 11 2021, 11:31:04 AM. + +[Success] + + + +# Start again with newer version of Hadoop & Spark + diff --git a/notes/stv/20211013-Spark3-zeppelin-0.10.0-permission-issue.txt b/notes/stv/20211013-Spark3-zeppelin-0.10.0-permission-issue.txt new file mode 100644 index 00000000..4447c29c --- /dev/null +++ b/notes/stv/20211013-Spark3-zeppelin-0.10.0-permission-issue.txt @@ -0,0 +1,253 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy with more recent version of components: + Zeppelin 0.10.0 + Spark 3.1.2 + Hadoop 3.2.1 + + + Result: + + + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2020.12.02 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-medium-04' \ + 'test' + + + + +TASK [Gathering Facts] ************************************************************************************************************************************************************************************** +task path: /deployments/hadoop-yarn/ansible/34-setup-shuffler.yml:31 +ok: [master01] +ok: [worker01] +ok: [worker04] +ok: [worker02] +ok: [worker03] +META: ran handlers + +TASK [copy] ************************************************************************************************************************************************************************************************* +task path: /deployments/hadoop-yarn/ansible/34-setup-shuffler.yml:34 +fatal: [master01]: FAILED! => {"changed": false, "checksum": "fff5e02293bf041ae281f42d1885e0867130dd8e", "msg": "Destination /opt/hadoop/share/hadoop/yarn not writable"} +fatal: [worker02]: FAILED! => {"changed": false, "checksum": "fff5e02293bf041ae281f42d1885e0867130dd8e", "msg": "Destination /opt/hadoop/share/hadoop/yarn not writable"} +fatal: [worker03]: FAILED! => {"changed": false, "checksum": "fff5e02293bf041ae281f42d1885e0867130dd8e", "msg": "Destination /opt/hadoop/share/hadoop/yarn not writable"} +fatal: [worker01]: FAILED! => {"changed": false, "checksum": "fff5e02293bf041ae281f42d1885e0867130dd8e", "msg": "Destination /opt/hadoop/share/hadoop/yarn not writable"} +fatal: [worker04]: FAILED! => {"changed": false, "checksum": "fff5e02293bf041ae281f42d1885e0867130dd8e", "msg": "Destination /opt/hadoop/share/hadoop/yarn not writable"} + +PLAY RECAP ************************************************************************************************************************************************************************************************** +localhost : ok=78 changed=61 unreachable=0 failed=0 skipped=5 rescued=0 ignored=0 +master01 : ok=66 changed=36 unreachable=0 failed=1 skipped=6 rescued=0 ignored=0 +monitor : ok=12 changed=7 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0 +worker01 : ok=71 changed=39 unreachable=0 failed=1 skipped=5 rescued=0 ignored=0 +worker02 : ok=71 changed=39 unreachable=0 failed=1 skipped=5 rescued=0 ignored=0 +worker03 : ok=71 changed=39 unreachable=0 failed=1 skipped=5 rescued=0 ignored=0 +worker04 : ok=71 changed=39 unreachable=0 failed=1 skipped=5 rescued=0 ignored=0 +zeppelin : ok=74 changed=48 unreachable=0 failed=0 skipped=5 rescued=0 ignored=0 + + +real 22m14.802s +user 5m42.895s +sys 1m20.065s + + + +org.apache.zeppelin.interpreter.InterpreterException: org.apache.zeppelin.interpreter.InterpreterException: org.apache.zeppelin.interpreter.InterpreterException: Fail to open SparkInterpreter + at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:76) + at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:833) + at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:741) + at org.apache.zeppelin.scheduler.Job.run(Job.java:172) + at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:132) + at org.apache.zeppelin.scheduler.FIFOScheduler.lambda$runJobInScheduler$0(FIFOScheduler.java:42) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +Caused by: org.apache.zeppelin.interpreter.InterpreterException: org.apache.zeppelin.interpreter.InterpreterException: Fail to open SparkInterpreter + at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:76) + at org.apache.zeppelin.interpreter.Interpreter.getInterpreterInTheSameSessionByClassName(Interpreter.java:322) + at org.apache.zeppelin.interpreter.Interpreter.getInterpreterInTheSameSessionByClassName(Interpreter.java:333) + at org.apache.zeppelin.spark.PySparkInterpreter.open(PySparkInterpreter.java:90) + at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:70) + ... 8 more +Caused by: org.apache.zeppelin.interpreter.InterpreterException: Fail to open SparkInterpreter + at org.apache.zeppelin.spark.SparkInterpreter.open(SparkInterpreter.java:137) + at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:70) + ... 12 more +Caused by: java.lang.reflect.InvocationTargetException + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.zeppelin.spark.BaseSparkScalaInterpreter.spark2CreateContext(BaseSparkScalaInterpreter.scala:299) + at org.apache.zeppelin.spark.BaseSparkScalaInterpreter.createSparkContext(BaseSparkScalaInterpreter.scala:228) + at org.apache.zeppelin.spark.SparkScala212Interpreter.open(SparkScala212Interpreter.scala:88) + at org.apache.zeppelin.spark.SparkInterpreter.open(SparkInterpreter.java:121) + ... 13 more +Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=gaiauser, access=WRITE, inode="/":fedora:supergroup:drwxr-xr-x + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:255) + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1879) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1863) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1822) + at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:59) + at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3233) + at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1145) + at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:720) + at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) + at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:528) + at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070) + at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:999) + at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:927) + at java.security.AccessController.doPrivileged(Native Method) + at javax.security.auth.Subject.doAs(Subject.java:422) + at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730) + at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2915) + + at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) + at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) + at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) + at java.lang.reflect.Constructor.newInstance(Constructor.java:423) + at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) + at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) + at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2426) + at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2400) + at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1324) + at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1321) + at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) + at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1338) + at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1313) + at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:2275) + at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:674) + at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:447) + at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:887) + at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:202) + at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62) + at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220) + at org.apache.spark.SparkContext.(SparkContext.scala:579) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2672) + at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:945) + at scala.Option.getOrElse(Option.scala:189) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:939) + ... 21 more +Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=gaiauser, access=WRITE, inode="/":fedora:supergroup:drwxr-xr-x + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:255) + at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1879) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1863) + at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1822) + at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:59) + at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3233) + at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1145) + at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:720) + at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) + at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:528) + at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070) + at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:999) + at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:927) + at java.security.AccessController.doPrivileged(Native Method) + at javax.security.auth.Subject.doAs(Subject.java:422) + at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730) + at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2915) + + at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511) + at org.apache.hadoop.ipc.Client.call(Client.java:1457) + at org.apache.hadoop.ipc.Client.call(Client.java:1367) + at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228) + at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116) + at com.sun.proxy.$Proxy20.mkdirs(Unknown Source) + at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:656) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422) + at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165) + at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157) + at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95) + at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359) + at com.sun.proxy.$Proxy21.mkdirs(Unknown Source) + at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2424) + ... 39 more + + + + +# After some investigation, it looks like the new Zeppelin runs Spark jobs as the logged in Zeppelin user, and fails because it lacks permission. +# Turn this off for now, so that everything is sent as the main Zeppelin user (After this change Spark notebooks work) + + + diff --git a/notes/stv/20211014-ansible-deploy-with-benchmarks-01.txt b/notes/stv/20211014-ansible-deploy-with-benchmarks-01.txt new file mode 100644 index 00000000..e48ad281 --- /dev/null +++ b/notes/stv/20211014-ansible-deploy-with-benchmarks-01.txt @@ -0,0 +1,133 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Run benchmark tests via Ansible on medium cluster + + + Result: + + SUCCESS + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-benchmarking' + + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-medium-04' \ + 'test' + +TASK [Run benchmarker] ************************************************************************************************************************************************************************************** +changed: [monitor] => {"changed": true, "cmd": ["python", "/tmp/run-test.py"], "delta": "4:46:47.879708", "end": "2021-10-13 21:15:25.260592", "rc": 0, "start": "2021-10-13 16:28:37.380884", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 17207.59 seconds\n{u'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 17207.59 seconds", "{u'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}"]} + +PLAY RECAP ************************************************************************************************************************************************************************************************** +localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + + > real 329m17.950s + > user 60m8.987s + > sys 8m6.424s + +{u'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 17207.59 seconds", "{u'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}} + + +# Is there a better way to output the results for readability? (Definitely..) + + +# ----------------------------------------------------- +# Display results for medium deploy + +# Results: + + 'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, + 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + + +# First time QC_cuts_dev & WD_detection_dev work, so keep track of duration +# Try the same tests with the large deploy, and take the lowest value between the two + diff --git a/notes/stv/20211014-ansible-deploy-with-benchmarks-02.txt b/notes/stv/20211014-ansible-deploy-with-benchmarks-02.txt new file mode 100644 index 00000000..c8f32de2 --- /dev/null +++ b/notes/stv/20211014-ansible-deploy-with-benchmarks-02.txt @@ -0,0 +1,190 @@ +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Run benchmark tests via Ansible + + + Result: + + SUCCESS + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-benchmarking' + + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-large-06' \ + 'test' + + TASK [Run benchmarker] *********************************************************************************************************************************************** + changed: [monitor] => {"changed": true, "cmd": ["python", "/tmp/run-test.py"], "delta": "2:28:39.781959", "end": "2021-10-14 14:38:18.988792", "rc": 0, "start": "2021-10-14 12:09:39.206833", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 8919.50 seconds\n{u'Mean_proper_motions_over_the_sky': {'totaltime': '51.68', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '40.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '20.24', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4628.10', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3697.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '481.30', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 8919.50 seconds", "{u'Mean_proper_motions_over_the_sky': {'totaltime': '51.68', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '40.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '20.24', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4628.10', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3697.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '481.30', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}"]} + + PLAY RECAP ************************************************************************************************************************************************************ + localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + + + +> real 198m16.792s +> user 41m22.781s +> sys 6m42.860s + + +# Results: + +'Mean_proper_motions_over_the_sky': {'totaltime': '51.68', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +'SetUp': {'totaltime': '40.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +'Source_counts_over_the_sky.json': {'totaltime': '20.24', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +'QC_cuts_dev.json': {'totaltime': '4628.10', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +'WD_detection_dev.json': {'totaltime': '3697.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '481.30', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}} + +# ----------------------------------------------------- +# We are also writing results to a file on monitor +# Check output file on monitor +#[root@ansibler] + +ssh monitor +cat output.json +{"Mean_proper_motions_over_the_sky": {"totaltime": "51.68", "status": "SUCCESS", "valid": "TRUE", "msg": ""}, "SetUp": {"totaltime": "40.79", "status": "SUCCESS", "valid": "TRUE", "msg": ""}, "Source_counts_over_the_sky.json": {"totaltime": "20.24", "status": "SUCCESS", "valid": "TRUE", "msg": ""}, "QC_cuts_dev.json": {"totaltime": "4628.10", "status": "SUCCESS", "valid": "TRUE", "msg": ""}, "WD_detection_dev.json": {"totaltime": "3697.39", "status": "SUCCESS", "valid": "TRUE", "msg": ""}, "Good_astrometric_solutions_via_ML_Random_Forrest_classifier": {"totaltime": "481.30", "status": "SUCCESS", "valid": "TRUE", "msg": ""}}[ + + + +# Previous resuls from medium deploy for referece: + + + 'Mean_proper_motions_over_the_sky': {'totaltime': '63.75', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'SetUp': {'totaltime': '42.83', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'Source_counts_over_the_sky.json': {'totaltime': '21.17', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'QC_cuts_dev.json': {'totaltime': '6572.31', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, + 'WD_detection_dev.json': {'totaltime': '9976.49', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, + 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '531.04', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + + +# Take smallest of the two and update our expected runtimes in: + https://github.com/wfau/aglais-testing/blob/main/config/notebooks/notebooks.json + +# Take values and add a few seconds as a buffer, otherwise we'll get [SLOW] as the status for timing that might be close enought to the expected + + +... + { + "name" : "SetUp", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/SetUp.json", + "totaltime" : 45, + "results" : [] + }, + { + "name" : "Mean_proper_motions_over_the_sky", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Mean_proper_motions_over_the_sky.json", + "totaltime" : 55, + "results" : [] + }, + { + "name" : "Source_counts_over_the_sky.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Source_counts_over_the_sky.json", + "totaltime" : 22, + "results" : [] + }, + { + "name" : "Good_astrometric_solutions_via_ML_Random_Forrest_classifier", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Good_astrometric_solutions_via_ML_Random_Forrest_classifier.json", + "totaltime" : 500, + "results" : [] + }, + { + "name" : "QC_cuts_dev.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/QC_cuts_dev.json", + "totaltime" : 4700, + "results" : [] + }, + { + "name" : "WD_detection_dev.json", + "filepath" : "https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/WD_detection_dev.json", + "totaltime" : 3750, + "results" : [] + } + +... + + + diff --git a/notes/stv/20211015-Spark3-zeppelin-0.10.0.txt b/notes/stv/20211015-Spark3-zeppelin-0.10.0.txt new file mode 100644 index 00000000..09638513 --- /dev/null +++ b/notes/stv/20211015-Spark3-zeppelin-0.10.0.txt @@ -0,0 +1,151 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy latest versions of Zeppelin / Spark / Hadoop + + + Result: + + SUCCESS + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-upgrade-spark-3' + + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-large-06' \ + 'test' + +.. + +TASK [Install the required Python packages] ***************************************************************************************************************************************************************** +task path: /deployments/hadoop-yarn/ansible/29-install-pip-libs.yml:41 + +... + + +/pip-record-2dwvespp/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-x6volqvn/pillow/\n"} +fatal: [master01]: FAILED! => {"changed": false, "cmd": ["/usr/bin/pip3", "install", "-r", "/tmp/requirements.txt"], "msg": "stdout: Collecting numpy==1.20.3 (from -r /tmp/requirements.txt (line 1))\n Downloading https://files.pythonhosted.org/packages/a5/42/560d269f604d3e186a57c21a363e77e199358d054884e61b73e405dd217c/numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.3MB)\nCollecting scipy==1.6.3 (from -r /tmp/requirements.txt (line 2))\n Downloading https://files.pythonhosted.org/packages/7d/e8/43ffca541d2f208d516296950b25fe1084b35c2881f4d444c1346ca75815/scipy-1.6.3-cp37-cp37m-manylinux1_x86_64.whl (27.4MB)\nCollecting matplotlib==3.4.2 (from -r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/24/33/5568d443ba438d95d4db635dd69958056f087e57e1026bee56f959d53f9d/matplotlib-3.4.2-cp37-cp37m-manylinux1_x86_64.whl (10.3MB)\nCollecting grpcio==1.37.1 (from -r /tmp/requirements.txt (line 4))\n Downloading https://files.pythonhosted.org/packages/13/73/4d5d3dd3c3e31161283e4e94a098983e84de61af6bed25a2b71ab4d280b7/grpcio-1.37.1-cp37-cp37m-manylinux2010_x86_64.whl (4.1MB)\nCollecting jupyter==1.0.0 (from -r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl\nCollecting Cython==0.29.23 (from -r /tmp/requirements.txt (line 6))\n Downloading https://files.pythonhosted.org/packages/0c/15/cca3ac44776df9ee27286941315dd8b14a598e8d80970200d05f720b9274/Cython-0.29.23-cp37-cp37m-manylinux1_x86_64.whl (2.0MB)\nCollecting protobuf==3.16.0 (from -r /tmp/requirements.txt (line 7))\n Downloading https://files.pythonhosted.org/packages/cd/4d/b5088b78457f4b4b729313fa4a6c67481fe3d2c4cd0a2e5bb7c873b6bb25/protobuf-3.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0MB)\nCollecting pandas==1.2.4 (from -r /tmp/requirements.txt (line 8))\n Downloading https://files.pythonhosted.org/packages/51/51/48f3fc47c4e2144da2806dfb6629c4dd1fa3d5a143f9652b141e979a8ca9/pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl (9.9MB)\nCollecting healpy==1.14.0 (from -r /tmp/requirements.txt (line 9))\n Downloading https://files.pythonhosted.org/packages/39/66/db489e95df3091afb79289680badac1def7f7b13090f0255c1b0c750b889/healpy-1.14.0-cp37-cp37m-manylinux1_x86_64.whl (15.8MB)\nCollecting astropy==4.2.1 (from -r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/27/0c/c946f63b0a6cf4c385a96de9bffc92abc0ec4e131405d2daa7f11668086b/astropy-4.2.1-cp37-cp37m-manylinux1_x86_64.whl (9.7MB)\nCollecting astroquery==0.4.1 (from -r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/1b/f8/4690523783691ed816b3469c3ec611af3798594d37ade510dd918d59f57e/astroquery-0.4.1.tar.gz (6.5MB)\nCollecting scikit-learn==0.24.2 (from -r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/a8/eb/a48f25c967526b66d5f1fa7a984594f0bf0a5afafa94a8c4dbc317744620/scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl (22.3MB)\nCollecting hdbscan==0.8.27 (from -r /tmp/requirements.txt (line 13))\n Downloading https://files.pythonhosted.org/packages/32/bb/59a75bc5ac66a9b4f9b8f979e4545af0e98bb1ca4e6ae96b3b956b554223/hdbscan-0.8.27.tar.gz (6.4MB)\n Installing build dependencies: started\n Installing build dependencies: finished with status 'done'\n Getting requirements to build wheel: started\n Getting requirements to build wheel: finished with status 'done'\n Preparing wheel metadata: started\n Preparing wheel metadata: finished with status 'done'\nCollecting pyvo==1.1 (from -r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/cf/8d/cdef5613bb450495d6fbef2e0408062f2d11f078b045987718936498204b/pyvo-1.1-py3-none-any.whl (802kB)\nRequirement already satisfied: python-dateutil>=2.7 in /usr/lib/python3.7/site-packages (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3)) (2.8.0)\nCollecting kiwisolver>=1.0.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/09/6b/6e567cb2e86d4e5939a9233f8734e26021b6a9c1bc4b1edccba236a84cc2/kiwisolver-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting cycler>=0.10 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/f7/d2/e07d3ebb2bd7af696440ce7e754c59dd546ffe1bbe732c8ab68b9c834e61/cycler-0.10.0-py2.py3-none-any.whl\nCollecting pillow>=6.2.0 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/7d/2a/2fc11b54e2742db06297f7fa7f420a0e3069fdcf0e4b57dfec33f0b08622/Pillow-8.4.0.tar.gz (49.4MB)\nCollecting pyparsing>=2.2.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/8a/bb/488841f56197b13700afd5658fc279a2025a39e22449b7cf29864669b15d/pyparsing-2.4.7-py2.py3-none-any.whl (67kB)\nRequirement already satisfied: six>=1.5.2 in /usr/lib/python3.7/site-packages (from grpcio==1.37.1->-r /tmp/requirements.txt (line 4)) (1.12.0)\nCollecting ipykernel (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4a/c8/2a8a5cb1afdecfa92c000e3a5d63a9fdd1b7fe77570f65536b3f05a05f14/ipykernel-6.4.1-py3-none-any.whl (124kB)\nCollecting notebook (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/25/83/c711332a3531afcc1a76e523bc1ceec309497d5faa99260fd50e920e7686/notebook-6.4.4-py3-none-any.whl (9.9MB)\nCollecting qtconsole (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3a/57/c8fc1fc6fb6bc03caca20ace9cd0ac0e16cc052b51cbe3acbeeb53abcb18/qtconsole-5.1.1-py3-none-any.whl (119kB)\nCollecting jupyter-console (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/cd/aa2670ffc99eb3e5bbe2294c71e4bf46a9804af4f378d09d7a8950996c9b/jupyter_console-6.4.0-py3-none-any.whl\nCollecting ipywidgets (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6b/bb/285066ddd710779cb69f03d42fa72fbfe4352b4895eb6abab551eae1535a/ipywidgets-7.6.5-py2.py3-none-any.whl (121kB)\nCollecting nbconvert (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/19/c7/f7d49d1347b87a6c9324688ead2f02e1c119b20e0cc0474e69edfe63ff11/nbconvert-6.2.0-py3-none-any.whl (553kB)\nRequirement already satisfied: pytz>=2017.3 in /usr/lib/python3.7/site-packages (from pandas==1.2.4->-r /tmp/requirements.txt (line 8)) (2018.5)\nCollecting pyerfa (from astropy==4.2.1->-r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/7e/0d/9afb1d671a41f89411987042cd7fc3fb090478380955cf6359bcd16a1b73/pyerfa-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl (746kB)\nRequirement already satisfied: requests>=2.4.3 in /usr/lib/python3.7/site-packages (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.21.0)\nCollecting keyring>=4.0 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/58/b7/cc5a5321a6119e23ee85745ba204a67d646835e8882ba36eece32ee2b4e1/keyring-23.2.1-py3-none-any.whl\nCollecting beautifulsoup4>=4.3.2 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/69/bf/f0f194d3379d3f3347478bd267f754fc68c11cbf2fe302a6ab69447b1417/beautifulsoup4-4.10.0-py3-none-any.whl (97kB)\nCollecting html5lib>=0.999 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl (112kB)\nCollecting threadpoolctl>=2.0.0 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/ff/fe/8aaca2a0db7fd80f0b2cf8a16a034d3eea8102d58ff9331d2aaf1f06766a/threadpoolctl-3.0.0-py3-none-any.whl\nCollecting joblib>=0.11 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/3e/d5/0163eb0cfa0b673aa4fe1cd3ea9d8a81ea0f32e50807b0c295871e4aab2e/joblib-1.1.0-py2.py3-none-any.whl (306kB)\nCollecting mimeparse (from pyvo==1.1->-r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/38/0c/7b02c30765658744acc51876781c580234cb1110296b231a3a524722f9c7/mimeparse-0.1.3.tar.gz\nCollecting traitlets<6.0,>=4.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/38/4c/466298b114eea62f300dbef98cc2c33c6cbc439f1f71bc199c674ae23c2c/traitlets-5.1.0-py3-none-any.whl (101kB)\nCollecting importlib-metadata<5; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/71/c2/cb1855f0b2a0ae9ccc9b69f150a7aebd4a8d815bd951e74621c4154c52a8/importlib_metadata-4.8.1-py3-none-any.whl\nCollecting debugpy<2.0,>=1.0.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5d/db/2e47db1dc6e25741fe910bce1b7aad15b5ab53a8c683bf51108c9f9e07a3/debugpy-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.9MB)\nCollecting matplotlib-inline<0.2.0,>=0.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a6/2d/2230afd570c70074e80fd06857ba2bdc5f10c055bd9125665fe276fadb67/matplotlib_inline-0.1.3-py3-none-any.whl\nCollecting argcomplete>=1.12.3; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/b7/9e/9dc74d330c07866d72f62d553fe8bdbe32786ff247a14e68b5659963e6bd/argcomplete-1.12.3-py2.py3-none-any.whl\nCollecting ipython<8.0,>=7.23.1 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/76/d1/e6166fc278a0aab9c2997ae241346837368fc9aa0c6eea9b0dbe2d727004/ipython-7.28.0-py3-none-any.whl (788kB)\nCollecting jupyter-client<8.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/51/06efe08a819c36215e02750b50ac1e5e322303a8369ec1bc4e915d485ad4/jupyter_client-7.0.6-py3-none-any.whl (125kB)\nCollecting tornado<7.0,>=4.2 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/91/a8/9c5902233fa3c2e6a889cbd164333ddda5009669f494e3fadbeee2c03af5/tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl (428kB)\nCollecting ipython-genutils (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/fa/bc/9bd3b5c2b4774d5f33b2d544f1460be9df7df2fe42f352135381c347c69a/ipython_genutils-0.2.0-py2.py3-none-any.whl\nCollecting prometheus-client (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/da/4e8471ff825769581593b5b84769d32f58e5373b59fccaf355d3529ad530/prometheus_client-0.11.0-py2.py3-none-any.whl (56kB)\nCollecting Send2Trash>=1.5.0 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/47/26/3435896d757335ea53dce5abf8d658ca80757a7a06258451b358f10232be/Send2Trash-1.8.0-py3-none-any.whl\nCollecting pyzmq>=17 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/1e/cc/fb6b935a6c046be4b7728fea1f41998644dfaa25dab7837cf933bc4f7db9/pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting argon2-cffi (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/50/85/fa444619ba3709b8969a75bf051375261801d267bb69d6bd1764dabe528f/argon2_cffi-21.1.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl (96kB)\nRequirement already satisfied: jinja2 in /usr/lib/python3.7/site-packages (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.10)\nCollecting jupyter-core>=4.6.1 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ad/b3/160e578a3bcee2c3b2c60990f249bc84c56862757a7d2be1d6b55d66b2d3/jupyter_core-4.8.1-py3-none-any.whl (86kB)\nCollecting nbformat (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/e7/c7/dd50978c637a7af8234909277c4e7ec1b71310c13fb3135f3c8f5b6e045f/nbformat-5.1.3-py3-none-any.whl (178kB)\nCollecting terminado>=0.8.3 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/cb/17/b1162b39786c44e14d30ee557fbf41276c4a966dab01106c15fb70f5c27a/terminado-0.12.1-py3-none-any.whl\nCollecting pygments (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/78/c8/8d9be2f72d8f465461f22b5f199c04f7ada933add4dae6e2468133c17471/Pygments-2.10.0-py3-none-any.whl (1.0MB)\nCollecting qtpy (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/73/47/cc42c2b4fe4ddb7e289ef8f098c7249903ad09cd3f6ee8ec17c63de2b728/QtPy-1.11.2-py2.py3-none-any.whl (58kB)\nCollecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 (from jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/c6/37/ec72228971dbaf191243b8ee383c6a3834b5cde23daab066dfbfbbd5438b/prompt_toolkit-3.0.20-py3-none-any.whl (370kB)\nCollecting jupyterlab-widgets>=1.0.0; python_version >= \"3.6\" (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/18/4d/22a93473bca99c80f2d23f867ebbfee2f6c8e186bf678864eec641500910/jupyterlab_widgets-1.0.2-py3-none-any.whl (243kB)\nCollecting widgetsnbextension~=3.5.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6c/7b/7ac231c20d2d33c445eaacf8a433f4e22c60677eb9776c7c5262d7ddee2d/widgetsnbextension-3.5.1-py2.py3-none-any.whl (2.2MB)\nCollecting testpath (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/87/5422f6d056bfbded920ccf380a65de3713a3b95a95ba2255be2a3fb4f464/testpath-0.5.0-py3-none-any.whl (84kB)\nCollecting defusedxml (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl\nCollecting nbclient<0.6.0,>=0.5.0 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a7/ed/b764fa931614cb7ed9bebbc42532daecef405d6bef660eeda882f6c23b98/nbclient-0.5.4-py3-none-any.whl (66kB)\nCollecting jupyterlab-pygments (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a8/6f/c34288766797193b512c6508f5994b830fb06134fdc4ca8214daba0aa443/jupyterlab_pygments-0.1.2-py2.py3-none-any.whl\nCollecting pandocfilters>=1.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5e/a8/878258cffd53202a6cc1903c226cf09e58ae3df6b09f8ddfa98033286637/pandocfilters-1.5.0-py2.py3-none-any.whl\nCollecting bleach (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/64/cc/74d634e1e5659742973a23bb441404c53a7bedb6cd3962109ca5efb703e8/bleach-4.1.0-py2.py3-none-any.whl (157kB)\nCollecting entrypoints>=0.2.2 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/c6/44694103f8c221443ee6b0041f69e2740d89a25641e62fb4f2ee568f2f9c/entrypoints-0.3-py2.py3-none-any.whl\nCollecting mistune<2,>=0.8.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/ec/4b43dae793655b7d8a25f76119624350b4d65eb663459eb9603d7f1f0345/mistune-0.8.4-py2.py3-none-any.whl\nRequirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (3.0.4)\nRequirement already satisfied: idna<2.9,>=2.5 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.7)\nRequirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (1.24.1)\nCollecting SecretStorage>=3.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/d9/1e/29cd69fdac7391aa51510dfd42aa70b4e6a826c8cd019ee2a8ab9ec0777f/SecretStorage-3.3.1-py3-none-any.whl\nCollecting jeepney>=0.4.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/14/b8/bb3e34d71472140f9bfdf5d77cd063e2cc964b72b1bb0b70fe3c1e7db932/jeepney-0.7.1-py3-none-any.whl (54kB)\nCollecting soupsieve>1.2 (from beautifulsoup4>=4.3.2->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/36/69/d82d04022f02733bf9a72bc3b96332d360c0c5307096d76f6bb7489f7e57/soupsieve-2.2.1-py3-none-any.whl\nCollecting webencodings (from html5lib>=0.999->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl\nCollecting zipp>=0.5 (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/bd/df/d4a4974a3e3957fd1c1fa3082366d7fff6e428ddb55f074bf64876f8e8ad/zipp-3.6.0-py3-none-any.whl\nCollecting typing-extensions>=3.6.4; python_version < \"3.8\" (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/74/60/18783336cc7fcdd95dae91d73477830aa53f5d3181ae4fe20491d7fc3199/typing_extensions-3.10.0.2-py3-none-any.whl\nCollecting pexpect>4.3; sys_platform != \"win32\" (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/39/7b/88dbb785881c28a102619d46423cb853b46dbccc70d3ac362d99773a78ce/pexpect-4.8.0-py2.py3-none-any.whl (59kB)\nCollecting backcall (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4c/1c/ff6546b6c12603d8dd1070aa3c3d273ad4c07f5771689a7b69a550e8c951/backcall-0.2.0-py2.py3-none-any.whl\nCollecting decorator (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3d/cc/d7b758e54779f7e465179427de7e78c601d3330d6c411ea7ba9ae2f38102/decorator-5.1.0-py3-none-any.whl\nCollecting pickleshare (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/9a/41/220f49aaea88bc6fa6cba8d05ecf24676326156c23b991e80b3f2fc24c77/pickleshare-0.7.5-py2.py3-none-any.whl\nCollecting jedi>=0.16 (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/f9/36/7aa67ae2663025b49e8426ead0bad983fee1b73f472536e9790655da0277/jedi-0.18.0-py2.py3-none-any.whl (1.4MB)\nRequirement already satisfied: setuptools>=18.5 in /usr/lib/python3.7/site-packages (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (40.8.0)\nCollecting nest-asyncio>=1.5 (from jupyter-client<8.0->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/52/e2/9b37da54e6e9094d2f558ae643d1954a0fa8215dfee4fa261f31c5439796/nest_asyncio-1.5.1-py3-none-any.whl\nRequirement already satisfied: cffi>=1.0.0 in /usr/lib64/python3.7/site-packages (from argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.11.5)\nRequirement already satisfied: MarkupSafe>=0.23 in /usr/lib64/python3.7/site-packages (from jinja2->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.1.1)\nRequirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/lib/python3.7/site-packages (from nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (3.0.1)\nCollecting ptyprocess; os_name != \"nt\" (from terminado>=0.8.3->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl\nCollecting wcwidth (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/7c/e39aca596badaf1b78e8f547c807b04dae603a433d3e7a7e04d67f2ef3e5/wcwidth-0.2.5-py2.py3-none-any.whl\nCollecting packaging (from bleach->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/77/e2362b676dc5008d81be423070dd9577fa03be5da2ba1105811900fda546/packaging-21.0-py3-none-any.whl (40kB)\nRequirement already satisfied: cryptography>=2.0 in /usr/lib64/python3.7/site-packages (from SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.6.1)\nCollecting parso<0.9.0,>=0.8.0 (from jedi>=0.16->ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a9/c4/d5476373088c120ffed82f34c74b266ccae31a68d665b837354d4d8dc8be/parso-0.8.2-py2.py3-none-any.whl (94kB)\nRequirement already satisfied: pycparser in /usr/lib/python3.7/site-packages (from cffi>=1.0.0->argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.14)\nRequirement already satisfied: attrs>=17.4.0 in /usr/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (18.2.0)\nRequirement already satisfied: pyrsistent>=0.14.0 in /usr/lib64/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (0.14.11)\nRequirement already satisfied: asn1crypto>=0.21.0 in /usr/lib/python3.7/site-packages (from cryptography>=2.0->SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (0.24.0)\nBuilding wheels for collected packages: hdbscan\n Building wheel for hdbscan (PEP 517): started\n Building wheel for hdbscan (PEP 517): finished with status 'done'\n Stored in directory: /root/.cache/pip/wheels/42/63/fb/314ad6c3b270887a3ecb588b8e5aac50b0fad38ff89bb6dff2\nSuccessfully built hdbscan\nInstalling collected packages: numpy, scipy, kiwisolver, cycler, pillow, pyparsing, matplotlib, grpcio, traitlets, zipp, typing-extensions, importlib-metadata, debugpy, matplotlib-inline, argcomplete, ptyprocess, pexpect, pygments, backcall, decorator, pickleshare, parso, jedi, wcwidth, prompt-toolkit, ipython, jupyter-core, tornado, entrypoints, nest-asyncio, pyzmq, jupyter-client, ipython-genutils, ipykernel, prometheus-client, Send2Trash, argon2-cffi, nbformat, testpath, defusedxml, nbclient, jupyterlab-pygments, pandocfilters, webencodings, packaging, bleach, mistune, nbconvert, terminado, notebook, qtpy, qtconsole, jupyter-console, jupyterlab-widgets, widgetsnbextension, ipywidgets, jupyter, Cython, protobuf, pandas, pyerfa, astropy, healpy, jeepney, SecretStorage, keyring, soupsieve, beautifulsoup4, html5lib, astroquery, threadpoolctl, joblib, scikit-learn, hdbscan, mimeparse, pyvo\n Running setup.py install for pillow: started\n Running setup.py install for pillow: finished with status 'error'\n Complete output from command /usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-jruklut2/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-4kpto90b/install-record.txt --single-version-externally-managed --compile:\n running install\n running build\n running build_py\n creating build\n creating build/lib.linux-x86_64-3.7\n creating build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegPresets.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FliImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ContainerIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GifImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageWin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XbmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_version.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePalette.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_tkinter_finder.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Image.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SgiImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BdfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw2.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MicImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMode.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcnsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageOps.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/features.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GribStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XVThumbImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_binary.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTransform.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageSequence.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageStat.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageGrab.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__init__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfParser.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/EpsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WebPImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageChops.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TarIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ExifTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpPaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SunImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/McIdasImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SpiderImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IptcImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTk.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/CurImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BmpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFilter.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PyAccess.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageShow.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageEnhance.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FpxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FtexImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WalImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImtImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PalmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PixarImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Hdf5StubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageQt.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GbrImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpGradientFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PngImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MspImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FitsStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PsdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GdImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFont.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Jpeg2KImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__main__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BlpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageColor.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DdsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageCms.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TgaImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WmfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_util.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMorph.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PSDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BufrStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n running egg_info\n writing src/Pillow.egg-info/PKG-INFO\n writing dependency_links to src/Pillow.egg-info/dependency_links.txt\n writing top-level names to src/Pillow.egg-info/top_level.txt\n reading manifest file 'src/Pillow.egg-info/SOURCES.txt'\n reading manifest template 'MANIFEST.in'\n warning: no files found matching '*.c'\n warning: no files found matching '*.h'\n warning: no files found matching '*.sh'\n warning: no previously-included files found matching '.appveyor.yml'\n warning: no previously-included files found matching '.clang-format'\n warning: no previously-included files found matching '.coveragerc'\n warning: no previously-included files found matching '.editorconfig'\n warning: no previously-included files found matching '.readthedocs.yml'\n warning: no previously-included files found matching 'codecov.yml'\n warning: no previously-included files matching '.git*' found anywhere in distribution\n warning: no previously-included files matching '*.pyc' found anywhere in distribution\n warning: no previously-included files matching '*.so' found anywhere in distribution\n no previously-included directories found matching '.ci'\n writing manifest file 'src/Pillow.egg-info/SOURCES.txt'\n running build_ext\n \n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n Traceback (most recent call last):\n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 1024, in \n zip_safe=not (debug_build() or PLATFORM_MINGW),\n File \"/usr/lib/python3.7/site-packages/setuptools/__init__.py\", line 145, in setup\n return distutils.core.setup(**attrs)\n File \"/usr/lib64/python3.7/distutils/core.py\", line 148, in setup\n dist.run_commands()\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 966, in run_commands\n self.run_command(cmd)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/install.py\", line 61, in run\n return orig.install.run(self)\n File \"/usr/lib64/python3.7/distutils/command/install.py\", line 556, in run\n self.run_command('build')\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib64/python3.7/distutils/command/build.py\", line 135, in run\n self.run_command(cmd_name)\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/build_ext.py\", line 78, in run\n _build_ext.run(self)\n File \"/usr/lib64/python3.7/distutils/command/build_ext.py\", line 340, in run\n self.build_extensions()\n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 790, in build_extensions\n raise RequiredDependencyException(f)\n __main__.RequiredDependencyException: zlib\n \n During handling of the above exception, another exception occurred:\n \n Traceback (most recent call last):\n File \"\", line 1, in \n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 1037, in \n raise RequiredDependencyException(msg)\n __main__.RequiredDependencyException:\n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n \n \n ----------------------------------------\n\n:stderr: WARNING: Running pip install with root privileges is generally not a good idea. Try `pip3 install --user` instead.\nCommand \"/usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-jruklut2/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-4kpto90b/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-jruklut2/pillow/\n"} +fatal: [zeppelin]: FAILED! => {"changed": false, "cmd": ["/usr/bin/pip3", "install", "-r", "/tmp/requirements.txt"], "msg": "stdout: Collecting numpy==1.20.3 (from -r /tmp/requirements.txt (line 1))\n Downloading https://files.pythonhosted.org/packages/a5/42/560d269f604d3e186a57c21a363e77e199358d054884e61b73e405dd217c/numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.3MB)\nCollecting scipy==1.6.3 (from -r /tmp/requirements.txt (line 2))\n Downloading https://files.pythonhosted.org/packages/7d/e8/43ffca541d2f208d516296950b25fe1084b35c2881f4d444c1346ca75815/scipy-1.6.3-cp37-cp37m-manylinux1_x86_64.whl (27.4MB)\nCollecting matplotlib==3.4.2 (from -r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/24/33/5568d443ba438d95d4db635dd69958056f087e57e1026bee56f959d53f9d/matplotlib-3.4.2-cp37-cp37m-manylinux1_x86_64.whl (10.3MB)\nCollecting grpcio==1.37.1 (from -r /tmp/requirements.txt (line 4))\n Downloading https://files.pythonhosted.org/packages/13/73/4d5d3dd3c3e31161283e4e94a098983e84de61af6bed25a2b71ab4d280b7/grpcio-1.37.1-cp37-cp37m-manylinux2010_x86_64.whl (4.1MB)\nCollecting jupyter==1.0.0 (from -r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl\nCollecting Cython==0.29.23 (from -r /tmp/requirements.txt (line 6))\n Downloading https://files.pythonhosted.org/packages/0c/15/cca3ac44776df9ee27286941315dd8b14a598e8d80970200d05f720b9274/Cython-0.29.23-cp37-cp37m-manylinux1_x86_64.whl (2.0MB)\nCollecting protobuf==3.16.0 (from -r /tmp/requirements.txt (line 7))\n Downloading https://files.pythonhosted.org/packages/cd/4d/b5088b78457f4b4b729313fa4a6c67481fe3d2c4cd0a2e5bb7c873b6bb25/protobuf-3.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0MB)\nCollecting pandas==1.2.4 (from -r /tmp/requirements.txt (line 8))\n Downloading https://files.pythonhosted.org/packages/51/51/48f3fc47c4e2144da2806dfb6629c4dd1fa3d5a143f9652b141e979a8ca9/pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl (9.9MB)\nCollecting healpy==1.14.0 (from -r /tmp/requirements.txt (line 9))\n Downloading https://files.pythonhosted.org/packages/39/66/db489e95df3091afb79289680badac1def7f7b13090f0255c1b0c750b889/healpy-1.14.0-cp37-cp37m-manylinux1_x86_64.whl (15.8MB)\nCollecting astropy==4.2.1 (from -r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/27/0c/c946f63b0a6cf4c385a96de9bffc92abc0ec4e131405d2daa7f11668086b/astropy-4.2.1-cp37-cp37m-manylinux1_x86_64.whl (9.7MB)\nCollecting astroquery==0.4.1 (from -r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/1b/f8/4690523783691ed816b3469c3ec611af3798594d37ade510dd918d59f57e/astroquery-0.4.1.tar.gz (6.5MB)\nCollecting scikit-learn==0.24.2 (from -r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/a8/eb/a48f25c967526b66d5f1fa7a984594f0bf0a5afafa94a8c4dbc317744620/scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl (22.3MB)\nCollecting hdbscan==0.8.27 (from -r /tmp/requirements.txt (line 13))\n Downloading https://files.pythonhosted.org/packages/32/bb/59a75bc5ac66a9b4f9b8f979e4545af0e98bb1ca4e6ae96b3b956b554223/hdbscan-0.8.27.tar.gz (6.4MB)\n Installing build dependencies: started\n Installing build dependencies: finished with status 'done'\n Getting requirements to build wheel: started\n Getting requirements to build wheel: finished with status 'done'\n Preparing wheel metadata: started\n Preparing wheel metadata: finished with status 'done'\nCollecting pyvo==1.1 (from -r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/cf/8d/cdef5613bb450495d6fbef2e0408062f2d11f078b045987718936498204b/pyvo-1.1-py3-none-any.whl (802kB)\nRequirement already satisfied: python-dateutil>=2.7 in /usr/lib/python3.7/site-packages (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3)) (2.8.0)\nCollecting pillow>=6.2.0 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/7d/2a/2fc11b54e2742db06297f7fa7f420a0e3069fdcf0e4b57dfec33f0b08622/Pillow-8.4.0.tar.gz (49.4MB)\nCollecting pyparsing>=2.2.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/8a/bb/488841f56197b13700afd5658fc279a2025a39e22449b7cf29864669b15d/pyparsing-2.4.7-py2.py3-none-any.whl (67kB)\nCollecting kiwisolver>=1.0.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/09/6b/6e567cb2e86d4e5939a9233f8734e26021b6a9c1bc4b1edccba236a84cc2/kiwisolver-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting cycler>=0.10 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/f7/d2/e07d3ebb2bd7af696440ce7e754c59dd546ffe1bbe732c8ab68b9c834e61/cycler-0.10.0-py2.py3-none-any.whl\nRequirement already satisfied: six>=1.5.2 in /usr/lib/python3.7/site-packages (from grpcio==1.37.1->-r /tmp/requirements.txt (line 4)) (1.12.0)\nCollecting ipykernel (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4a/c8/2a8a5cb1afdecfa92c000e3a5d63a9fdd1b7fe77570f65536b3f05a05f14/ipykernel-6.4.1-py3-none-any.whl (124kB)\nCollecting ipywidgets (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6b/bb/285066ddd710779cb69f03d42fa72fbfe4352b4895eb6abab551eae1535a/ipywidgets-7.6.5-py2.py3-none-any.whl (121kB)\nCollecting nbconvert (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/19/c7/f7d49d1347b87a6c9324688ead2f02e1c119b20e0cc0474e69edfe63ff11/nbconvert-6.2.0-py3-none-any.whl (553kB)\nCollecting qtconsole (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3a/57/c8fc1fc6fb6bc03caca20ace9cd0ac0e16cc052b51cbe3acbeeb53abcb18/qtconsole-5.1.1-py3-none-any.whl (119kB)\nCollecting notebook (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/25/83/c711332a3531afcc1a76e523bc1ceec309497d5faa99260fd50e920e7686/notebook-6.4.4-py3-none-any.whl (9.9MB)\nCollecting jupyter-console (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/cd/aa2670ffc99eb3e5bbe2294c71e4bf46a9804af4f378d09d7a8950996c9b/jupyter_console-6.4.0-py3-none-any.whl\nRequirement already satisfied: pytz>=2017.3 in /usr/lib/python3.7/site-packages (from pandas==1.2.4->-r /tmp/requirements.txt (line 8)) (2018.5)\nCollecting pyerfa (from astropy==4.2.1->-r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/7e/0d/9afb1d671a41f89411987042cd7fc3fb090478380955cf6359bcd16a1b73/pyerfa-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl (746kB)\nRequirement already satisfied: requests>=2.4.3 in /usr/lib/python3.7/site-packages (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.21.0)\nCollecting keyring>=4.0 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/58/b7/cc5a5321a6119e23ee85745ba204a67d646835e8882ba36eece32ee2b4e1/keyring-23.2.1-py3-none-any.whl\nCollecting beautifulsoup4>=4.3.2 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/69/bf/f0f194d3379d3f3347478bd267f754fc68c11cbf2fe302a6ab69447b1417/beautifulsoup4-4.10.0-py3-none-any.whl (97kB)\nCollecting html5lib>=0.999 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl (112kB)\nCollecting joblib>=0.11 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/3e/d5/0163eb0cfa0b673aa4fe1cd3ea9d8a81ea0f32e50807b0c295871e4aab2e/joblib-1.1.0-py2.py3-none-any.whl (306kB)\nCollecting threadpoolctl>=2.0.0 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/ff/fe/8aaca2a0db7fd80f0b2cf8a16a034d3eea8102d58ff9331d2aaf1f06766a/threadpoolctl-3.0.0-py3-none-any.whl\nCollecting mimeparse (from pyvo==1.1->-r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/38/0c/7b02c30765658744acc51876781c580234cb1110296b231a3a524722f9c7/mimeparse-0.1.3.tar.gz\nCollecting ipython-genutils (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/fa/bc/9bd3b5c2b4774d5f33b2d544f1460be9df7df2fe42f352135381c347c69a/ipython_genutils-0.2.0-py2.py3-none-any.whl\nCollecting ipython<8.0,>=7.23.1 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/76/d1/e6166fc278a0aab9c2997ae241346837368fc9aa0c6eea9b0dbe2d727004/ipython-7.28.0-py3-none-any.whl (788kB)\nCollecting debugpy<2.0,>=1.0.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5d/db/2e47db1dc6e25741fe910bce1b7aad15b5ab53a8c683bf51108c9f9e07a3/debugpy-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.9MB)\nCollecting importlib-metadata<5; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/71/c2/cb1855f0b2a0ae9ccc9b69f150a7aebd4a8d815bd951e74621c4154c52a8/importlib_metadata-4.8.1-py3-none-any.whl\nCollecting argcomplete>=1.12.3; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/b7/9e/9dc74d330c07866d72f62d553fe8bdbe32786ff247a14e68b5659963e6bd/argcomplete-1.12.3-py2.py3-none-any.whl\nCollecting tornado<7.0,>=4.2 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/91/a8/9c5902233fa3c2e6a889cbd164333ddda5009669f494e3fadbeee2c03af5/tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl (428kB)\nCollecting matplotlib-inline<0.2.0,>=0.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a6/2d/2230afd570c70074e80fd06857ba2bdc5f10c055bd9125665fe276fadb67/matplotlib_inline-0.1.3-py3-none-any.whl\nCollecting jupyter-client<8.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/51/06efe08a819c36215e02750b50ac1e5e322303a8369ec1bc4e915d485ad4/jupyter_client-7.0.6-py3-none-any.whl (125kB)\nCollecting traitlets<6.0,>=4.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/38/4c/466298b114eea62f300dbef98cc2c33c6cbc439f1f71bc199c674ae23c2c/traitlets-5.1.0-py3-none-any.whl (101kB)\nCollecting nbformat>=4.2.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/e7/c7/dd50978c637a7af8234909277c4e7ec1b71310c13fb3135f3c8f5b6e045f/nbformat-5.1.3-py3-none-any.whl (178kB)\nCollecting jupyterlab-widgets>=1.0.0; python_version >= \"3.6\" (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/18/4d/22a93473bca99c80f2d23f867ebbfee2f6c8e186bf678864eec641500910/jupyterlab_widgets-1.0.2-py3-none-any.whl (243kB)\nCollecting widgetsnbextension~=3.5.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6c/7b/7ac231c20d2d33c445eaacf8a433f4e22c60677eb9776c7c5262d7ddee2d/widgetsnbextension-3.5.1-py2.py3-none-any.whl (2.2MB)\nCollecting jupyterlab-pygments (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a8/6f/c34288766797193b512c6508f5994b830fb06134fdc4ca8214daba0aa443/jupyterlab_pygments-0.1.2-py2.py3-none-any.whl\nRequirement already satisfied: jinja2>=2.4 in /usr/lib/python3.7/site-packages (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.10)\nCollecting bleach (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/64/cc/74d634e1e5659742973a23bb441404c53a7bedb6cd3962109ca5efb703e8/bleach-4.1.0-py2.py3-none-any.whl (157kB)\nCollecting jupyter-core (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ad/b3/160e578a3bcee2c3b2c60990f249bc84c56862757a7d2be1d6b55d66b2d3/jupyter_core-4.8.1-py3-none-any.whl (86kB)\nCollecting mistune<2,>=0.8.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/ec/4b43dae793655b7d8a25f76119624350b4d65eb663459eb9603d7f1f0345/mistune-0.8.4-py2.py3-none-any.whl\nCollecting pandocfilters>=1.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5e/a8/878258cffd53202a6cc1903c226cf09e58ae3df6b09f8ddfa98033286637/pandocfilters-1.5.0-py2.py3-none-any.whl\nCollecting testpath (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/87/5422f6d056bfbded920ccf380a65de3713a3b95a95ba2255be2a3fb4f464/testpath-0.5.0-py3-none-any.whl (84kB)\nCollecting defusedxml (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl\nCollecting entrypoints>=0.2.2 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/c6/44694103f8c221443ee6b0041f69e2740d89a25641e62fb4f2ee568f2f9c/entrypoints-0.3-py2.py3-none-any.whl\nCollecting nbclient<0.6.0,>=0.5.0 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a7/ed/b764fa931614cb7ed9bebbc42532daecef405d6bef660eeda882f6c23b98/nbclient-0.5.4-py3-none-any.whl (66kB)\nCollecting pygments>=2.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/78/c8/8d9be2f72d8f465461f22b5f199c04f7ada933add4dae6e2468133c17471/Pygments-2.10.0-py3-none-any.whl (1.0MB)\nCollecting qtpy (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/73/47/cc42c2b4fe4ddb7e289ef8f098c7249903ad09cd3f6ee8ec17c63de2b728/QtPy-1.11.2-py2.py3-none-any.whl (58kB)\nCollecting pyzmq>=17.1 (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/1e/cc/fb6b935a6c046be4b7728fea1f41998644dfaa25dab7837cf933bc4f7db9/pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting prometheus-client (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/da/4e8471ff825769581593b5b84769d32f58e5373b59fccaf355d3529ad530/prometheus_client-0.11.0-py2.py3-none-any.whl (56kB)\nCollecting Send2Trash>=1.5.0 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/47/26/3435896d757335ea53dce5abf8d658ca80757a7a06258451b358f10232be/Send2Trash-1.8.0-py3-none-any.whl\nCollecting terminado>=0.8.3 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/cb/17/b1162b39786c44e14d30ee557fbf41276c4a966dab01106c15fb70f5c27a/terminado-0.12.1-py3-none-any.whl\nCollecting argon2-cffi (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/50/85/fa444619ba3709b8969a75bf051375261801d267bb69d6bd1764dabe528f/argon2_cffi-21.1.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl (96kB)\nCollecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 (from jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/c6/37/ec72228971dbaf191243b8ee383c6a3834b5cde23daab066dfbfbbd5438b/prompt_toolkit-3.0.20-py3-none-any.whl (370kB)\nRequirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (3.0.4)\nRequirement already satisfied: idna<2.9,>=2.5 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.7)\nRequirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (1.24.1)\nCollecting jeepney>=0.4.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/14/b8/bb3e34d71472140f9bfdf5d77cd063e2cc964b72b1bb0b70fe3c1e7db932/jeepney-0.7.1-py3-none-any.whl (54kB)\nCollecting SecretStorage>=3.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/d9/1e/29cd69fdac7391aa51510dfd42aa70b4e6a826c8cd019ee2a8ab9ec0777f/SecretStorage-3.3.1-py3-none-any.whl\nCollecting soupsieve>1.2 (from beautifulsoup4>=4.3.2->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/36/69/d82d04022f02733bf9a72bc3b96332d360c0c5307096d76f6bb7489f7e57/soupsieve-2.2.1-py3-none-any.whl\nCollecting webencodings (from html5lib>=0.999->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl\nRequirement already satisfied: setuptools>=18.5 in /usr/lib/python3.7/site-packages (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (40.8.0)\nCollecting pickleshare (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/9a/41/220f49aaea88bc6fa6cba8d05ecf24676326156c23b991e80b3f2fc24c77/pickleshare-0.7.5-py2.py3-none-any.whl\nCollecting pexpect>4.3; sys_platform != \"win32\" (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/39/7b/88dbb785881c28a102619d46423cb853b46dbccc70d3ac362d99773a78ce/pexpect-4.8.0-py2.py3-none-any.whl (59kB)\nCollecting backcall (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4c/1c/ff6546b6c12603d8dd1070aa3c3d273ad4c07f5771689a7b69a550e8c951/backcall-0.2.0-py2.py3-none-any.whl\nCollecting decorator (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3d/cc/d7b758e54779f7e465179427de7e78c601d3330d6c411ea7ba9ae2f38102/decorator-5.1.0-py3-none-any.whl\nCollecting jedi>=0.16 (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/f9/36/7aa67ae2663025b49e8426ead0bad983fee1b73f472536e9790655da0277/jedi-0.18.0-py2.py3-none-any.whl (1.4MB)\nCollecting zipp>=0.5 (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/bd/df/d4a4974a3e3957fd1c1fa3082366d7fff6e428ddb55f074bf64876f8e8ad/zipp-3.6.0-py3-none-any.whl\nCollecting typing-extensions>=3.6.4; python_version < \"3.8\" (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/74/60/18783336cc7fcdd95dae91d73477830aa53f5d3181ae4fe20491d7fc3199/typing_extensions-3.10.0.2-py3-none-any.whl\nCollecting nest-asyncio>=1.5 (from jupyter-client<8.0->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/52/e2/9b37da54e6e9094d2f558ae643d1954a0fa8215dfee4fa261f31c5439796/nest_asyncio-1.5.1-py3-none-any.whl\nRequirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/lib/python3.7/site-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (3.0.1)\nRequirement already satisfied: MarkupSafe>=0.23 in /usr/lib64/python3.7/site-packages (from jinja2>=2.4->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.1.1)\nCollecting packaging (from bleach->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/77/e2362b676dc5008d81be423070dd9577fa03be5da2ba1105811900fda546/packaging-21.0-py3-none-any.whl (40kB)\nCollecting ptyprocess; os_name != \"nt\" (from terminado>=0.8.3->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl\nRequirement already satisfied: cffi>=1.0.0 in /usr/lib64/python3.7/site-packages (from argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.11.5)\nCollecting wcwidth (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/7c/e39aca596badaf1b78e8f547c807b04dae603a433d3e7a7e04d67f2ef3e5/wcwidth-0.2.5-py2.py3-none-any.whl\nRequirement already satisfied: cryptography>=2.0 in /usr/lib64/python3.7/site-packages (from SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.6.1)\nCollecting parso<0.9.0,>=0.8.0 (from jedi>=0.16->ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a9/c4/d5476373088c120ffed82f34c74b266ccae31a68d665b837354d4d8dc8be/parso-0.8.2-py2.py3-none-any.whl (94kB)\nRequirement already satisfied: attrs>=17.4.0 in /usr/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (18.2.0)\nRequirement already satisfied: pyrsistent>=0.14.0 in /usr/lib64/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (0.14.11)\nRequirement already satisfied: pycparser in /usr/lib/python3.7/site-packages (from cffi>=1.0.0->argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.14)\nRequirement already satisfied: asn1crypto>=0.21.0 in /usr/lib/python3.7/site-packages (from cryptography>=2.0->SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (0.24.0)\nBuilding wheels for collected packages: hdbscan\n Building wheel for hdbscan (PEP 517): started\n Building wheel for hdbscan (PEP 517): finished with status 'done'\n Stored in directory: /root/.cache/pip/wheels/42/63/fb/314ad6c3b270887a3ecb588b8e5aac50b0fad38ff89bb6dff2\nSuccessfully built hdbscan\nInstalling collected packages: numpy, scipy, pillow, pyparsing, kiwisolver, cycler, matplotlib, grpcio, ipython-genutils, pickleshare, wcwidth, prompt-toolkit, ptyprocess, pexpect, traitlets, matplotlib-inline, backcall, decorator, pygments, parso, jedi, ipython, debugpy, zipp, typing-extensions, importlib-metadata, argcomplete, tornado, entrypoints, nest-asyncio, pyzmq, jupyter-core, jupyter-client, ipykernel, nbformat, jupyterlab-widgets, prometheus-client, Send2Trash, jupyterlab-pygments, webencodings, packaging, bleach, mistune, pandocfilters, testpath, defusedxml, nbclient, nbconvert, terminado, argon2-cffi, notebook, widgetsnbextension, ipywidgets, qtpy, qtconsole, jupyter-console, jupyter, Cython, protobuf, pandas, pyerfa, astropy, healpy, jeepney, SecretStorage, keyring, soupsieve, beautifulsoup4, html5lib, astroquery, joblib, threadpoolctl, scikit-learn, hdbscan, mimeparse, pyvo\n Running setup.py install for pillow: started\n Running setup.py install for pillow: finished with status 'error'\n Complete output from command /usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-0bqc1gut/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-2hjj_zwp/install-record.txt --single-version-externally-managed --compile:\n running install\n running build\n running build_py\n creating build\n creating build/lib.linux-x86_64-3.7\n creating build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegPresets.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FliImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ContainerIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GifImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageWin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XbmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_version.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePalette.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_tkinter_finder.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Image.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SgiImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BdfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw2.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MicImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMode.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcnsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageOps.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/features.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GribStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XVThumbImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_binary.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTransform.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageSequence.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageStat.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageGrab.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__init__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfParser.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/EpsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WebPImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageChops.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TarIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ExifTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpPaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SunImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/McIdasImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SpiderImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IptcImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTk.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/CurImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BmpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFilter.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PyAccess.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageShow.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageEnhance.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FpxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FtexImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WalImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImtImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PalmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PixarImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Hdf5StubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageQt.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GbrImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpGradientFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PngImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MspImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FitsStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PsdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GdImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFont.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Jpeg2KImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__main__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BlpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageColor.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DdsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageCms.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TgaImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WmfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_util.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMorph.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PSDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BufrStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n running egg_info\n writing src/Pillow.egg-info/PKG-INFO\n writing dependency_links to src/Pillow.egg-info/dependency_links.txt\n writing top-level names to src/Pillow.egg-info/top_level.txt\n reading manifest file 'src/Pillow.egg-info/SOURCES.txt'\n reading manifest template 'MANIFEST.in'\n warning: no files found matching '*.c'\n warning: no files found matching '*.h'\n warning: no files found matching '*.sh'\n warning: no previously-included files found matching '.appveyor.yml'\n warning: no previously-included files found matching '.clang-format'\n warning: no previously-included files found matching '.coveragerc'\n warning: no previously-included files found matching '.editorconfig'\n warning: no previously-included files found matching '.readthedocs.yml'\n warning: no previously-included files found matching 'codecov.yml'\n warning: no previously-included files matching '.git*' found anywhere in distribution\n warning: no previously-included files matching '*.pyc' found anywhere in distribution\n warning: no previously-included files matching '*.so' found anywhere in distribution\n no previously-included directories found matching '.ci'\n writing manifest file 'src/Pillow.egg-info/SOURCES.txt'\n running build_ext\n \n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n Traceback (most recent call last):\n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 1024, in \n zip_safe=not (debug_build() or PLATFORM_MINGW),\n File \"/usr/lib/python3.7/site-packages/setuptools/__init__.py\", line 145, in setup\n return distutils.core.setup(**attrs)\n File \"/usr/lib64/python3.7/distutils/core.py\", line 148, in setup\n dist.run_commands()\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 966, in run_commands\n self.run_command(cmd)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/install.py\", line 61, in run\n return orig.install.run(self)\n File \"/usr/lib64/python3.7/distutils/command/install.py\", line 556, in run\n self.run_command('build')\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib64/python3.7/distutils/command/build.py\", line 135, in run\n self.run_command(cmd_name)\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/build_ext.py\", line 78, in run\n _build_ext.run(self)\n File \"/usr/lib64/python3.7/distutils/command/build_ext.py\", line 340, in run\n self.build_extensions()\n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 790, in build_extensions\n raise RequiredDependencyException(f)\n __main__.RequiredDependencyException: zlib\n \n During handling of the above exception, another exception occurred:\n \n Traceback (most recent call last):\n File \"\", line 1, in \n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 1037, in \n raise RequiredDependencyException(msg)\n __main__.RequiredDependencyException:\n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n \n \n ----------------------------------------\n\n:stderr: WARNING: Running pip install with root privileges is generally not a good idea. Try `pip3 install --user` instead.\nCommand \"/usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-0bqc1gut/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-2hjj_zwp/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-0bqc1gut/pillow/\n"} + + +.. + + +# After some investigation, the installation works after changing /deployments/hadoop-yarn/ansible/29-install-pip-libs.yml to: + +- name: "Install Python libraries" + hosts: masters:workers:zeppelin + gather_facts: false + vars_files: + - config/ansible.yml + - /tmp/ansible-vars.yml + + + + tasks: + + + - name: "Install required system libraries" + become: yes + yum: + name: libtiff-devel,libjpeg-devel,libzip-devel,freetype-devel,lcms2-devel,libwebp-devel,tcl-devel,tk-devel + update_cache: yes + state: present + + - name: Copy pip requirements file into tmp + become: yes + copy: + src: "{{ playbook_dir | dirname | dirname }}/common/pip/requirements.txt" + dest: "/tmp/requirements.txt" + + + - name: Install the required Python packages + become: yes + pip: + requirements: "/tmp/requirements.txt" + + + + + diff --git a/notes/stv/20211018-Zeppelin-0.10-Spark-3-test-01.txt b/notes/stv/20211018-Zeppelin-0.10-Spark-3-test-01.txt new file mode 100644 index 00000000..2ca3dfa1 --- /dev/null +++ b/notes/stv/20211018-Zeppelin-0.10-Spark-3-test-01.txt @@ -0,0 +1,154 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy and test with latest version of Spark / Hadoop & Zeppelin + Zeppelin 0.10.0 + Hadoop 3.2.1 + Spark 3.1.2 + + Result: + + SUCCESS + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-upgrade-spark-3' + + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + > real 3m48.394s + > user 0m46.416s + > sys 0m4.422s + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-large-06' \ + 'test' + +.. + +TASK [Run benchmarker] ************************************************************************************************************************************************************************************** +changed: [monitor] => {"changed": true, "cmd": ["python", "/tmp/run-test.py"], "delta": "2:21:55.710402", "end": "2021-10-18 13:16:11.156047", "rc": 0, "start": "2021-10-18 10:54:15.445645", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 8515.37 seconds\n{u'Mean_proper_motions_over_the_sky': {'totaltime': '50.47', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.67', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '20.06', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4484.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3445.55', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '471.82', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 8515.37 seconds", "{u'Mean_proper_motions_over_the_sky': {'totaltime': '50.47', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '42.67', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '20.06', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4484.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3445.55', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '471.82', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}"]} + +PLAY RECAP ************************************************************************************************************************************************************************************************** +localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + +real 194m20.050s +user 36m59.427s +sys 6m24.762s + + +# Results: + + 'Mean_proper_motions_over_the_sky': {'totaltime': '50.47', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'SetUp': {'totaltime': '42.67', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'Source_counts_over_the_sky.json': {'totaltime': '20.06', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'QC_cuts_dev.json': {'totaltime': '4484.79', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'WD_detection_dev.json': {'totaltime': '3445.55', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '471.82', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + + +# All Tests passed. + + +# ------------------------------------------------------------------------------- +# Log into Zeppelin (0.10.0) and run some queries manually +# We are logging in as a separate user purposefully, in order +# to see if multiple users are able to run notebooks without requiring a restart +#[admin@firefox] + +# Run Setup +# https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/SetUp.json + +[SUCCESS] + + +# Run Mean_proper_motions_over_the_sky +# https://raw.githubusercontent.com/wfau/aglais-testing/main/notebooks/public_examples/Mean_proper_motions_over_the_sky.json + + +Mean RA proper motion plot +> Took 50 sec. Last updated by admin at October 18 2021, 4:58:18 PM. (outdated) + + +[SUCCESS] + + + + diff --git a/notes/stv/20211019-Zeppelin-0.10-Spark-3-test-01.txt b/notes/stv/20211019-Zeppelin-0.10-Spark-3-test-01.txt new file mode 100644 index 00000000..e3839a36 --- /dev/null +++ b/notes/stv/20211019-Zeppelin-0.10-Spark-3-test-01.txt @@ -0,0 +1,121 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy and test with latest version of Spark / Hadoop & Zeppelin + Zeppelin 0.10.0 + Hadoop 3.2.1 + Spark 3.1.2 + + Result: + + SUCCESS + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-upgrade-spark-3' + + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + nohup /deployments/hadoop-yarn/bin/create-all.sh "${cloudname:?}" 'cclake-large-06' 'test' > output.log & + + + +# Leave running for a few hours + +tail -f -n 1000 output.log + +TASK [Run benchmarker] ********************************************************* +changed: [monitor] => {"changed": true, "cmd": ["python", "/tmp/run-test.py"], "delta": "2:19:44.710218", "end": "2021-10-19 13:54:52.203922", "rc": 0, "start": "2021-10-19 11:35:07.493704", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 8384.43 seconds\n{u'Mean_proper_motions_over_the_sky': {'totaltime': '48.44', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '47.72', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '18.08', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4317.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3465.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '487.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}", "stdout_lines": ["Test completed after: 8384.43 seconds", "{u'Mean_proper_motions_over_the_sky': {'totaltime': '48.44', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'SetUp': {'totaltime': '47.72', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''}, u'Source_counts_over_the_sky.json': {'totaltime': '18.08', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'QC_cuts_dev.json': {'totaltime': '4317.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'WD_detection_dev.json': {'totaltime': '3465.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}, u'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '487.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}}"]} + +PLAY RECAP ********************************************************************* +localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +# Results: + + 'Mean_proper_motions_over_the_sky': {'totaltime': '48.44', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'SetUp': {'totaltime': '47.72', 'status': 'SLOW', 'valid': 'TRUE', 'msg': ''} + 'Source_counts_over_the_sky.json': {'totaltime': '18.08', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'QC_cuts_dev.json': {'totaltime': '4317.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'WD_detection_dev.json': {'totaltime': '3465.40', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} + 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '487.39', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''}} + + +# All Tests passed. +# Setup ran a bit slower than template, but not by that much + + + diff --git a/notes/stv/20211020-debug-job-manager.txt b/notes/stv/20211020-debug-job-manager.txt new file mode 100644 index 00000000..0f5c8c8f --- /dev/null +++ b/notes/stv/20211020-debug-job-manager.txt @@ -0,0 +1,59 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Debug JobManager issue for upgraded Zeppelin (0.10.0) + With the newer deploy of Zeppelin, after clicking the JobManager UI, all we see is a Loading notification, but nothing shows up, and we see an empty list of Interpreters. + + Result: + + Success + + + +# Try a new deploy of Zeppelin 0.10.0 from scratch +# In a new Zeppelin installation under the Zeppelin config file (conf/zeppelin-site.xml), there is the following entry: + +.. + + + zeppelin.jobmanager.enable + false + The Job tab in zeppelin page seems not so useful instead it cost lots of memory and affect the performance. + Disable it can save lots of memory + + +.. + + + " The Job tab in zeppelin page seems not so useful instead it cost lots of memory and affect the performance. + Disable it can save lots of memory " + + + +# After enabling it, by setting it to true, the JobManager UI works again + +# This seems odd, I would expect that disabling it would hide the button that links to it, however this is not what happens, instead we get what looks like a bug, where the button is there, but the page it # takes us to does not work + +# For now lets set that to enabled, and think about if we want to disable later. One of the reasons for leaving it enabled is user expectations, as the alternative looks like a bug to the user. +# However on the other side the comment + diff --git a/notes/stv/20211021-pip-dependency-issue.txt b/notes/stv/20211021-pip-dependency-issue.txt new file mode 100644 index 00000000..cc419a62 --- /dev/null +++ b/notes/stv/20211021-pip-dependency-issue.txt @@ -0,0 +1,249 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Deploy a version based on the latest from wfau/aglais:master + # https://github.com/wfau/aglais/commit/d0809eb4c39f46c8db7dabfc5af79a628ca1ac70 + + Result: + + FAILED + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-lib-dependencies' + popd + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-large-06' + +.. + +TASK [Install the required Python packages] ***************************************************************************************************************************************************************** +task path: /deployments/hadoop-yarn/ansible/29-install-pip-libs.yml:41 + +... + + +/pip-record-2dwvespp/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-x6volqvn/pillow/\n"} +fatal: [master01]: FAILED! => {"changed": false, "cmd": ["/usr/bin/pip3", "install", "-r", "/tmp/requirements.txt"], "msg": "stdout: Collecting numpy==1.20.3 (from -r /tmp/requirements.txt (line 1))\n Downloading https://files.pythonhosted.org/packages/a5/42/560d269f604d3e186a57c21a363e77e199358d054884e61b73e405dd217c/numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.3MB)\nCollecting scipy==1.6.3 (from -r /tmp/requirements.txt (line 2))\n Downloading https://files.pythonhosted.org/packages/7d/e8/43ffca541d2f208d516296950b25fe1084b35c2881f4d444c1346ca75815/scipy-1.6.3-cp37-cp37m-manylinux1_x86_64.whl (27.4MB)\nCollecting matplotlib==3.4.2 (from -r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/24/33/5568d443ba438d95d4db635dd69958056f087e57e1026bee56f959d53f9d/matplotlib-3.4.2-cp37-cp37m-manylinux1_x86_64.whl (10.3MB)\nCollecting grpcio==1.37.1 (from -r /tmp/requirements.txt (line 4))\n Downloading https://files.pythonhosted.org/packages/13/73/4d5d3dd3c3e31161283e4e94a098983e84de61af6bed25a2b71ab4d280b7/grpcio-1.37.1-cp37-cp37m-manylinux2010_x86_64.whl (4.1MB)\nCollecting jupyter==1.0.0 (from -r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl\nCollecting Cython==0.29.23 (from -r /tmp/requirements.txt (line 6))\n Downloading https://files.pythonhosted.org/packages/0c/15/cca3ac44776df9ee27286941315dd8b14a598e8d80970200d05f720b9274/Cython-0.29.23-cp37-cp37m-manylinux1_x86_64.whl (2.0MB)\nCollecting protobuf==3.16.0 (from -r /tmp/requirements.txt (line 7))\n Downloading https://files.pythonhosted.org/packages/cd/4d/b5088b78457f4b4b729313fa4a6c67481fe3d2c4cd0a2e5bb7c873b6bb25/protobuf-3.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0MB)\nCollecting pandas==1.2.4 (from -r /tmp/requirements.txt (line 8))\n Downloading https://files.pythonhosted.org/packages/51/51/48f3fc47c4e2144da2806dfb6629c4dd1fa3d5a143f9652b141e979a8ca9/pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl (9.9MB)\nCollecting healpy==1.14.0 (from -r /tmp/requirements.txt (line 9))\n Downloading https://files.pythonhosted.org/packages/39/66/db489e95df3091afb79289680badac1def7f7b13090f0255c1b0c750b889/healpy-1.14.0-cp37-cp37m-manylinux1_x86_64.whl (15.8MB)\nCollecting astropy==4.2.1 (from -r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/27/0c/c946f63b0a6cf4c385a96de9bffc92abc0ec4e131405d2daa7f11668086b/astropy-4.2.1-cp37-cp37m-manylinux1_x86_64.whl (9.7MB)\nCollecting astroquery==0.4.1 (from -r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/1b/f8/4690523783691ed816b3469c3ec611af3798594d37ade510dd918d59f57e/astroquery-0.4.1.tar.gz (6.5MB)\nCollecting scikit-learn==0.24.2 (from -r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/a8/eb/a48f25c967526b66d5f1fa7a984594f0bf0a5afafa94a8c4dbc317744620/scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl (22.3MB)\nCollecting hdbscan==0.8.27 (from -r /tmp/requirements.txt (line 13))\n Downloading https://files.pythonhosted.org/packages/32/bb/59a75bc5ac66a9b4f9b8f979e4545af0e98bb1ca4e6ae96b3b956b554223/hdbscan-0.8.27.tar.gz (6.4MB)\n Installing build dependencies: started\n Installing build dependencies: finished with status 'done'\n Getting requirements to build wheel: started\n Getting requirements to build wheel: finished with status 'done'\n Preparing wheel metadata: started\n Preparing wheel metadata: finished with status 'done'\nCollecting pyvo==1.1 (from -r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/cf/8d/cdef5613bb450495d6fbef2e0408062f2d11f078b045987718936498204b/pyvo-1.1-py3-none-any.whl (802kB)\nRequirement already satisfied: python-dateutil>=2.7 in /usr/lib/python3.7/site-packages (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3)) (2.8.0)\nCollecting kiwisolver>=1.0.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/09/6b/6e567cb2e86d4e5939a9233f8734e26021b6a9c1bc4b1edccba236a84cc2/kiwisolver-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting cycler>=0.10 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/f7/d2/e07d3ebb2bd7af696440ce7e754c59dd546ffe1bbe732c8ab68b9c834e61/cycler-0.10.0-py2.py3-none-any.whl\nCollecting pillow>=6.2.0 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/7d/2a/2fc11b54e2742db06297f7fa7f420a0e3069fdcf0e4b57dfec33f0b08622/Pillow-8.4.0.tar.gz (49.4MB)\nCollecting pyparsing>=2.2.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/8a/bb/488841f56197b13700afd5658fc279a2025a39e22449b7cf29864669b15d/pyparsing-2.4.7-py2.py3-none-any.whl (67kB)\nRequirement already satisfied: six>=1.5.2 in /usr/lib/python3.7/site-packages (from grpcio==1.37.1->-r /tmp/requirements.txt (line 4)) (1.12.0)\nCollecting ipykernel (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4a/c8/2a8a5cb1afdecfa92c000e3a5d63a9fdd1b7fe77570f65536b3f05a05f14/ipykernel-6.4.1-py3-none-any.whl (124kB)\nCollecting notebook (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/25/83/c711332a3531afcc1a76e523bc1ceec309497d5faa99260fd50e920e7686/notebook-6.4.4-py3-none-any.whl (9.9MB)\nCollecting qtconsole (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3a/57/c8fc1fc6fb6bc03caca20ace9cd0ac0e16cc052b51cbe3acbeeb53abcb18/qtconsole-5.1.1-py3-none-any.whl (119kB)\nCollecting jupyter-console (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/cd/aa2670ffc99eb3e5bbe2294c71e4bf46a9804af4f378d09d7a8950996c9b/jupyter_console-6.4.0-py3-none-any.whl\nCollecting ipywidgets (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6b/bb/285066ddd710779cb69f03d42fa72fbfe4352b4895eb6abab551eae1535a/ipywidgets-7.6.5-py2.py3-none-any.whl (121kB)\nCollecting nbconvert (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/19/c7/f7d49d1347b87a6c9324688ead2f02e1c119b20e0cc0474e69edfe63ff11/nbconvert-6.2.0-py3-none-any.whl (553kB)\nRequirement already satisfied: pytz>=2017.3 in /usr/lib/python3.7/site-packages (from pandas==1.2.4->-r /tmp/requirements.txt (line 8)) (2018.5)\nCollecting pyerfa (from astropy==4.2.1->-r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/7e/0d/9afb1d671a41f89411987042cd7fc3fb090478380955cf6359bcd16a1b73/pyerfa-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl (746kB)\nRequirement already satisfied: requests>=2.4.3 in /usr/lib/python3.7/site-packages (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.21.0)\nCollecting keyring>=4.0 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/58/b7/cc5a5321a6119e23ee85745ba204a67d646835e8882ba36eece32ee2b4e1/keyring-23.2.1-py3-none-any.whl\nCollecting beautifulsoup4>=4.3.2 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/69/bf/f0f194d3379d3f3347478bd267f754fc68c11cbf2fe302a6ab69447b1417/beautifulsoup4-4.10.0-py3-none-any.whl (97kB)\nCollecting html5lib>=0.999 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl (112kB)\nCollecting threadpoolctl>=2.0.0 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/ff/fe/8aaca2a0db7fd80f0b2cf8a16a034d3eea8102d58ff9331d2aaf1f06766a/threadpoolctl-3.0.0-py3-none-any.whl\nCollecting joblib>=0.11 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/3e/d5/0163eb0cfa0b673aa4fe1cd3ea9d8a81ea0f32e50807b0c295871e4aab2e/joblib-1.1.0-py2.py3-none-any.whl (306kB)\nCollecting mimeparse (from pyvo==1.1->-r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/38/0c/7b02c30765658744acc51876781c580234cb1110296b231a3a524722f9c7/mimeparse-0.1.3.tar.gz\nCollecting traitlets<6.0,>=4.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/38/4c/466298b114eea62f300dbef98cc2c33c6cbc439f1f71bc199c674ae23c2c/traitlets-5.1.0-py3-none-any.whl (101kB)\nCollecting importlib-metadata<5; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/71/c2/cb1855f0b2a0ae9ccc9b69f150a7aebd4a8d815bd951e74621c4154c52a8/importlib_metadata-4.8.1-py3-none-any.whl\nCollecting debugpy<2.0,>=1.0.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5d/db/2e47db1dc6e25741fe910bce1b7aad15b5ab53a8c683bf51108c9f9e07a3/debugpy-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.9MB)\nCollecting matplotlib-inline<0.2.0,>=0.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a6/2d/2230afd570c70074e80fd06857ba2bdc5f10c055bd9125665fe276fadb67/matplotlib_inline-0.1.3-py3-none-any.whl\nCollecting argcomplete>=1.12.3; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/b7/9e/9dc74d330c07866d72f62d553fe8bdbe32786ff247a14e68b5659963e6bd/argcomplete-1.12.3-py2.py3-none-any.whl\nCollecting ipython<8.0,>=7.23.1 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/76/d1/e6166fc278a0aab9c2997ae241346837368fc9aa0c6eea9b0dbe2d727004/ipython-7.28.0-py3-none-any.whl (788kB)\nCollecting jupyter-client<8.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/51/06efe08a819c36215e02750b50ac1e5e322303a8369ec1bc4e915d485ad4/jupyter_client-7.0.6-py3-none-any.whl (125kB)\nCollecting tornado<7.0,>=4.2 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/91/a8/9c5902233fa3c2e6a889cbd164333ddda5009669f494e3fadbeee2c03af5/tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl (428kB)\nCollecting ipython-genutils (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/fa/bc/9bd3b5c2b4774d5f33b2d544f1460be9df7df2fe42f352135381c347c69a/ipython_genutils-0.2.0-py2.py3-none-any.whl\nCollecting prometheus-client (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/da/4e8471ff825769581593b5b84769d32f58e5373b59fccaf355d3529ad530/prometheus_client-0.11.0-py2.py3-none-any.whl (56kB)\nCollecting Send2Trash>=1.5.0 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/47/26/3435896d757335ea53dce5abf8d658ca80757a7a06258451b358f10232be/Send2Trash-1.8.0-py3-none-any.whl\nCollecting pyzmq>=17 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/1e/cc/fb6b935a6c046be4b7728fea1f41998644dfaa25dab7837cf933bc4f7db9/pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting argon2-cffi (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/50/85/fa444619ba3709b8969a75bf051375261801d267bb69d6bd1764dabe528f/argon2_cffi-21.1.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl (96kB)\nRequirement already satisfied: jinja2 in /usr/lib/python3.7/site-packages (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.10)\nCollecting jupyter-core>=4.6.1 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ad/b3/160e578a3bcee2c3b2c60990f249bc84c56862757a7d2be1d6b55d66b2d3/jupyter_core-4.8.1-py3-none-any.whl (86kB)\nCollecting nbformat (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/e7/c7/dd50978c637a7af8234909277c4e7ec1b71310c13fb3135f3c8f5b6e045f/nbformat-5.1.3-py3-none-any.whl (178kB)\nCollecting terminado>=0.8.3 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/cb/17/b1162b39786c44e14d30ee557fbf41276c4a966dab01106c15fb70f5c27a/terminado-0.12.1-py3-none-any.whl\nCollecting pygments (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/78/c8/8d9be2f72d8f465461f22b5f199c04f7ada933add4dae6e2468133c17471/Pygments-2.10.0-py3-none-any.whl (1.0MB)\nCollecting qtpy (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/73/47/cc42c2b4fe4ddb7e289ef8f098c7249903ad09cd3f6ee8ec17c63de2b728/QtPy-1.11.2-py2.py3-none-any.whl (58kB)\nCollecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 (from jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/c6/37/ec72228971dbaf191243b8ee383c6a3834b5cde23daab066dfbfbbd5438b/prompt_toolkit-3.0.20-py3-none-any.whl (370kB)\nCollecting jupyterlab-widgets>=1.0.0; python_version >= \"3.6\" (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/18/4d/22a93473bca99c80f2d23f867ebbfee2f6c8e186bf678864eec641500910/jupyterlab_widgets-1.0.2-py3-none-any.whl (243kB)\nCollecting widgetsnbextension~=3.5.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6c/7b/7ac231c20d2d33c445eaacf8a433f4e22c60677eb9776c7c5262d7ddee2d/widgetsnbextension-3.5.1-py2.py3-none-any.whl (2.2MB)\nCollecting testpath (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/87/5422f6d056bfbded920ccf380a65de3713a3b95a95ba2255be2a3fb4f464/testpath-0.5.0-py3-none-any.whl (84kB)\nCollecting defusedxml (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl\nCollecting nbclient<0.6.0,>=0.5.0 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a7/ed/b764fa931614cb7ed9bebbc42532daecef405d6bef660eeda882f6c23b98/nbclient-0.5.4-py3-none-any.whl (66kB)\nCollecting jupyterlab-pygments (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a8/6f/c34288766797193b512c6508f5994b830fb06134fdc4ca8214daba0aa443/jupyterlab_pygments-0.1.2-py2.py3-none-any.whl\nCollecting pandocfilters>=1.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5e/a8/878258cffd53202a6cc1903c226cf09e58ae3df6b09f8ddfa98033286637/pandocfilters-1.5.0-py2.py3-none-any.whl\nCollecting bleach (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/64/cc/74d634e1e5659742973a23bb441404c53a7bedb6cd3962109ca5efb703e8/bleach-4.1.0-py2.py3-none-any.whl (157kB)\nCollecting entrypoints>=0.2.2 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/c6/44694103f8c221443ee6b0041f69e2740d89a25641e62fb4f2ee568f2f9c/entrypoints-0.3-py2.py3-none-any.whl\nCollecting mistune<2,>=0.8.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/ec/4b43dae793655b7d8a25f76119624350b4d65eb663459eb9603d7f1f0345/mistune-0.8.4-py2.py3-none-any.whl\nRequirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (3.0.4)\nRequirement already satisfied: idna<2.9,>=2.5 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.7)\nRequirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (1.24.1)\nCollecting SecretStorage>=3.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/d9/1e/29cd69fdac7391aa51510dfd42aa70b4e6a826c8cd019ee2a8ab9ec0777f/SecretStorage-3.3.1-py3-none-any.whl\nCollecting jeepney>=0.4.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/14/b8/bb3e34d71472140f9bfdf5d77cd063e2cc964b72b1bb0b70fe3c1e7db932/jeepney-0.7.1-py3-none-any.whl (54kB)\nCollecting soupsieve>1.2 (from beautifulsoup4>=4.3.2->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/36/69/d82d04022f02733bf9a72bc3b96332d360c0c5307096d76f6bb7489f7e57/soupsieve-2.2.1-py3-none-any.whl\nCollecting webencodings (from html5lib>=0.999->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl\nCollecting zipp>=0.5 (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/bd/df/d4a4974a3e3957fd1c1fa3082366d7fff6e428ddb55f074bf64876f8e8ad/zipp-3.6.0-py3-none-any.whl\nCollecting typing-extensions>=3.6.4; python_version < \"3.8\" (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/74/60/18783336cc7fcdd95dae91d73477830aa53f5d3181ae4fe20491d7fc3199/typing_extensions-3.10.0.2-py3-none-any.whl\nCollecting pexpect>4.3; sys_platform != \"win32\" (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/39/7b/88dbb785881c28a102619d46423cb853b46dbccc70d3ac362d99773a78ce/pexpect-4.8.0-py2.py3-none-any.whl (59kB)\nCollecting backcall (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4c/1c/ff6546b6c12603d8dd1070aa3c3d273ad4c07f5771689a7b69a550e8c951/backcall-0.2.0-py2.py3-none-any.whl\nCollecting decorator (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3d/cc/d7b758e54779f7e465179427de7e78c601d3330d6c411ea7ba9ae2f38102/decorator-5.1.0-py3-none-any.whl\nCollecting pickleshare (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/9a/41/220f49aaea88bc6fa6cba8d05ecf24676326156c23b991e80b3f2fc24c77/pickleshare-0.7.5-py2.py3-none-any.whl\nCollecting jedi>=0.16 (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/f9/36/7aa67ae2663025b49e8426ead0bad983fee1b73f472536e9790655da0277/jedi-0.18.0-py2.py3-none-any.whl (1.4MB)\nRequirement already satisfied: setuptools>=18.5 in /usr/lib/python3.7/site-packages (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (40.8.0)\nCollecting nest-asyncio>=1.5 (from jupyter-client<8.0->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/52/e2/9b37da54e6e9094d2f558ae643d1954a0fa8215dfee4fa261f31c5439796/nest_asyncio-1.5.1-py3-none-any.whl\nRequirement already satisfied: cffi>=1.0.0 in /usr/lib64/python3.7/site-packages (from argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.11.5)\nRequirement already satisfied: MarkupSafe>=0.23 in /usr/lib64/python3.7/site-packages (from jinja2->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.1.1)\nRequirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/lib/python3.7/site-packages (from nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (3.0.1)\nCollecting ptyprocess; os_name != \"nt\" (from terminado>=0.8.3->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl\nCollecting wcwidth (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/7c/e39aca596badaf1b78e8f547c807b04dae603a433d3e7a7e04d67f2ef3e5/wcwidth-0.2.5-py2.py3-none-any.whl\nCollecting packaging (from bleach->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/77/e2362b676dc5008d81be423070dd9577fa03be5da2ba1105811900fda546/packaging-21.0-py3-none-any.whl (40kB)\nRequirement already satisfied: cryptography>=2.0 in /usr/lib64/python3.7/site-packages (from SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.6.1)\nCollecting parso<0.9.0,>=0.8.0 (from jedi>=0.16->ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a9/c4/d5476373088c120ffed82f34c74b266ccae31a68d665b837354d4d8dc8be/parso-0.8.2-py2.py3-none-any.whl (94kB)\nRequirement already satisfied: pycparser in /usr/lib/python3.7/site-packages (from cffi>=1.0.0->argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.14)\nRequirement already satisfied: attrs>=17.4.0 in /usr/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (18.2.0)\nRequirement already satisfied: pyrsistent>=0.14.0 in /usr/lib64/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (0.14.11)\nRequirement already satisfied: asn1crypto>=0.21.0 in /usr/lib/python3.7/site-packages (from cryptography>=2.0->SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (0.24.0)\nBuilding wheels for collected packages: hdbscan\n Building wheel for hdbscan (PEP 517): started\n Building wheel for hdbscan (PEP 517): finished with status 'done'\n Stored in directory: /root/.cache/pip/wheels/42/63/fb/314ad6c3b270887a3ecb588b8e5aac50b0fad38ff89bb6dff2\nSuccessfully built hdbscan\nInstalling collected packages: numpy, scipy, kiwisolver, cycler, pillow, pyparsing, matplotlib, grpcio, traitlets, zipp, typing-extensions, importlib-metadata, debugpy, matplotlib-inline, argcomplete, ptyprocess, pexpect, pygments, backcall, decorator, pickleshare, parso, jedi, wcwidth, prompt-toolkit, ipython, jupyter-core, tornado, entrypoints, nest-asyncio, pyzmq, jupyter-client, ipython-genutils, ipykernel, prometheus-client, Send2Trash, argon2-cffi, nbformat, testpath, defusedxml, nbclient, jupyterlab-pygments, pandocfilters, webencodings, packaging, bleach, mistune, nbconvert, terminado, notebook, qtpy, qtconsole, jupyter-console, jupyterlab-widgets, widgetsnbextension, ipywidgets, jupyter, Cython, protobuf, pandas, pyerfa, astropy, healpy, jeepney, SecretStorage, keyring, soupsieve, beautifulsoup4, html5lib, astroquery, threadpoolctl, joblib, scikit-learn, hdbscan, mimeparse, pyvo\n Running setup.py install for pillow: started\n Running setup.py install for pillow: finished with status 'error'\n Complete output from command /usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-jruklut2/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-4kpto90b/install-record.txt --single-version-externally-managed --compile:\n running install\n running build\n running build_py\n creating build\n creating build/lib.linux-x86_64-3.7\n creating build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegPresets.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FliImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ContainerIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GifImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageWin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XbmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_version.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePalette.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_tkinter_finder.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Image.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SgiImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BdfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw2.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MicImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMode.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcnsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageOps.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/features.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GribStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XVThumbImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_binary.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTransform.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageSequence.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageStat.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageGrab.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__init__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfParser.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/EpsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WebPImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageChops.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TarIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ExifTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpPaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SunImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/McIdasImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SpiderImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IptcImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTk.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/CurImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BmpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFilter.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PyAccess.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageShow.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageEnhance.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FpxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FtexImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WalImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImtImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PalmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PixarImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Hdf5StubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageQt.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GbrImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpGradientFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PngImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MspImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FitsStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PsdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GdImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFont.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Jpeg2KImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__main__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BlpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageColor.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DdsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageCms.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TgaImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WmfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_util.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMorph.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PSDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BufrStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n running egg_info\n writing src/Pillow.egg-info/PKG-INFO\n writing dependency_links to src/Pillow.egg-info/dependency_links.txt\n writing top-level names to src/Pillow.egg-info/top_level.txt\n reading manifest file 'src/Pillow.egg-info/SOURCES.txt'\n reading manifest template 'MANIFEST.in'\n warning: no files found matching '*.c'\n warning: no files found matching '*.h'\n warning: no files found matching '*.sh'\n warning: no previously-included files found matching '.appveyor.yml'\n warning: no previously-included files found matching '.clang-format'\n warning: no previously-included files found matching '.coveragerc'\n warning: no previously-included files found matching '.editorconfig'\n warning: no previously-included files found matching '.readthedocs.yml'\n warning: no previously-included files found matching 'codecov.yml'\n warning: no previously-included files matching '.git*' found anywhere in distribution\n warning: no previously-included files matching '*.pyc' found anywhere in distribution\n warning: no previously-included files matching '*.so' found anywhere in distribution\n no previously-included directories found matching '.ci'\n writing manifest file 'src/Pillow.egg-info/SOURCES.txt'\n running build_ext\n \n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n Traceback (most recent call last):\n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 1024, in \n zip_safe=not (debug_build() or PLATFORM_MINGW),\n File \"/usr/lib/python3.7/site-packages/setuptools/__init__.py\", line 145, in setup\n return distutils.core.setup(**attrs)\n File \"/usr/lib64/python3.7/distutils/core.py\", line 148, in setup\n dist.run_commands()\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 966, in run_commands\n self.run_command(cmd)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/install.py\", line 61, in run\n return orig.install.run(self)\n File \"/usr/lib64/python3.7/distutils/command/install.py\", line 556, in run\n self.run_command('build')\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib64/python3.7/distutils/command/build.py\", line 135, in run\n self.run_command(cmd_name)\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/build_ext.py\", line 78, in run\n _build_ext.run(self)\n File \"/usr/lib64/python3.7/distutils/command/build_ext.py\", line 340, in run\n self.build_extensions()\n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 790, in build_extensions\n raise RequiredDependencyException(f)\n __main__.RequiredDependencyException: zlib\n \n During handling of the above exception, another exception occurred:\n \n Traceback (most recent call last):\n File \"\", line 1, in \n File \"/tmp/pip-install-jruklut2/pillow/setup.py\", line 1037, in \n raise RequiredDependencyException(msg)\n __main__.RequiredDependencyException:\n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n \n \n ----------------------------------------\n\n:stderr: WARNING: Running pip install with root privileges is generally not a good idea. Try `pip3 install --user` instead.\nCommand \"/usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-jruklut2/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-4kpto90b/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-jruklut2/pillow/\n"} +fatal: [zeppelin]: FAILED! => {"changed": false, "cmd": ["/usr/bin/pip3", "install", "-r", "/tmp/requirements.txt"], "msg": "stdout: Collecting numpy==1.20.3 (from -r /tmp/requirements.txt (line 1))\n Downloading https://files.pythonhosted.org/packages/a5/42/560d269f604d3e186a57c21a363e77e199358d054884e61b73e405dd217c/numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.3MB)\nCollecting scipy==1.6.3 (from -r /tmp/requirements.txt (line 2))\n Downloading https://files.pythonhosted.org/packages/7d/e8/43ffca541d2f208d516296950b25fe1084b35c2881f4d444c1346ca75815/scipy-1.6.3-cp37-cp37m-manylinux1_x86_64.whl (27.4MB)\nCollecting matplotlib==3.4.2 (from -r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/24/33/5568d443ba438d95d4db635dd69958056f087e57e1026bee56f959d53f9d/matplotlib-3.4.2-cp37-cp37m-manylinux1_x86_64.whl (10.3MB)\nCollecting grpcio==1.37.1 (from -r /tmp/requirements.txt (line 4))\n Downloading https://files.pythonhosted.org/packages/13/73/4d5d3dd3c3e31161283e4e94a098983e84de61af6bed25a2b71ab4d280b7/grpcio-1.37.1-cp37-cp37m-manylinux2010_x86_64.whl (4.1MB)\nCollecting jupyter==1.0.0 (from -r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl\nCollecting Cython==0.29.23 (from -r /tmp/requirements.txt (line 6))\n Downloading https://files.pythonhosted.org/packages/0c/15/cca3ac44776df9ee27286941315dd8b14a598e8d80970200d05f720b9274/Cython-0.29.23-cp37-cp37m-manylinux1_x86_64.whl (2.0MB)\nCollecting protobuf==3.16.0 (from -r /tmp/requirements.txt (line 7))\n Downloading https://files.pythonhosted.org/packages/cd/4d/b5088b78457f4b4b729313fa4a6c67481fe3d2c4cd0a2e5bb7c873b6bb25/protobuf-3.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0MB)\nCollecting pandas==1.2.4 (from -r /tmp/requirements.txt (line 8))\n Downloading https://files.pythonhosted.org/packages/51/51/48f3fc47c4e2144da2806dfb6629c4dd1fa3d5a143f9652b141e979a8ca9/pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl (9.9MB)\nCollecting healpy==1.14.0 (from -r /tmp/requirements.txt (line 9))\n Downloading https://files.pythonhosted.org/packages/39/66/db489e95df3091afb79289680badac1def7f7b13090f0255c1b0c750b889/healpy-1.14.0-cp37-cp37m-manylinux1_x86_64.whl (15.8MB)\nCollecting astropy==4.2.1 (from -r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/27/0c/c946f63b0a6cf4c385a96de9bffc92abc0ec4e131405d2daa7f11668086b/astropy-4.2.1-cp37-cp37m-manylinux1_x86_64.whl (9.7MB)\nCollecting astroquery==0.4.1 (from -r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/1b/f8/4690523783691ed816b3469c3ec611af3798594d37ade510dd918d59f57e/astroquery-0.4.1.tar.gz (6.5MB)\nCollecting scikit-learn==0.24.2 (from -r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/a8/eb/a48f25c967526b66d5f1fa7a984594f0bf0a5afafa94a8c4dbc317744620/scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl (22.3MB)\nCollecting hdbscan==0.8.27 (from -r /tmp/requirements.txt (line 13))\n Downloading https://files.pythonhosted.org/packages/32/bb/59a75bc5ac66a9b4f9b8f979e4545af0e98bb1ca4e6ae96b3b956b554223/hdbscan-0.8.27.tar.gz (6.4MB)\n Installing build dependencies: started\n Installing build dependencies: finished with status 'done'\n Getting requirements to build wheel: started\n Getting requirements to build wheel: finished with status 'done'\n Preparing wheel metadata: started\n Preparing wheel metadata: finished with status 'done'\nCollecting pyvo==1.1 (from -r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/cf/8d/cdef5613bb450495d6fbef2e0408062f2d11f078b045987718936498204b/pyvo-1.1-py3-none-any.whl (802kB)\nRequirement already satisfied: python-dateutil>=2.7 in /usr/lib/python3.7/site-packages (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3)) (2.8.0)\nCollecting pillow>=6.2.0 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/7d/2a/2fc11b54e2742db06297f7fa7f420a0e3069fdcf0e4b57dfec33f0b08622/Pillow-8.4.0.tar.gz (49.4MB)\nCollecting pyparsing>=2.2.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/8a/bb/488841f56197b13700afd5658fc279a2025a39e22449b7cf29864669b15d/pyparsing-2.4.7-py2.py3-none-any.whl (67kB)\nCollecting kiwisolver>=1.0.1 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/09/6b/6e567cb2e86d4e5939a9233f8734e26021b6a9c1bc4b1edccba236a84cc2/kiwisolver-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting cycler>=0.10 (from matplotlib==3.4.2->-r /tmp/requirements.txt (line 3))\n Downloading https://files.pythonhosted.org/packages/f7/d2/e07d3ebb2bd7af696440ce7e754c59dd546ffe1bbe732c8ab68b9c834e61/cycler-0.10.0-py2.py3-none-any.whl\nRequirement already satisfied: six>=1.5.2 in /usr/lib/python3.7/site-packages (from grpcio==1.37.1->-r /tmp/requirements.txt (line 4)) (1.12.0)\nCollecting ipykernel (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4a/c8/2a8a5cb1afdecfa92c000e3a5d63a9fdd1b7fe77570f65536b3f05a05f14/ipykernel-6.4.1-py3-none-any.whl (124kB)\nCollecting ipywidgets (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6b/bb/285066ddd710779cb69f03d42fa72fbfe4352b4895eb6abab551eae1535a/ipywidgets-7.6.5-py2.py3-none-any.whl (121kB)\nCollecting nbconvert (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/19/c7/f7d49d1347b87a6c9324688ead2f02e1c119b20e0cc0474e69edfe63ff11/nbconvert-6.2.0-py3-none-any.whl (553kB)\nCollecting qtconsole (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3a/57/c8fc1fc6fb6bc03caca20ace9cd0ac0e16cc052b51cbe3acbeeb53abcb18/qtconsole-5.1.1-py3-none-any.whl (119kB)\nCollecting notebook (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/25/83/c711332a3531afcc1a76e523bc1ceec309497d5faa99260fd50e920e7686/notebook-6.4.4-py3-none-any.whl (9.9MB)\nCollecting jupyter-console (from jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/cd/aa2670ffc99eb3e5bbe2294c71e4bf46a9804af4f378d09d7a8950996c9b/jupyter_console-6.4.0-py3-none-any.whl\nRequirement already satisfied: pytz>=2017.3 in /usr/lib/python3.7/site-packages (from pandas==1.2.4->-r /tmp/requirements.txt (line 8)) (2018.5)\nCollecting pyerfa (from astropy==4.2.1->-r /tmp/requirements.txt (line 10))\n Downloading https://files.pythonhosted.org/packages/7e/0d/9afb1d671a41f89411987042cd7fc3fb090478380955cf6359bcd16a1b73/pyerfa-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl (746kB)\nRequirement already satisfied: requests>=2.4.3 in /usr/lib/python3.7/site-packages (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.21.0)\nCollecting keyring>=4.0 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/58/b7/cc5a5321a6119e23ee85745ba204a67d646835e8882ba36eece32ee2b4e1/keyring-23.2.1-py3-none-any.whl\nCollecting beautifulsoup4>=4.3.2 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/69/bf/f0f194d3379d3f3347478bd267f754fc68c11cbf2fe302a6ab69447b1417/beautifulsoup4-4.10.0-py3-none-any.whl (97kB)\nCollecting html5lib>=0.999 (from astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl (112kB)\nCollecting joblib>=0.11 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/3e/d5/0163eb0cfa0b673aa4fe1cd3ea9d8a81ea0f32e50807b0c295871e4aab2e/joblib-1.1.0-py2.py3-none-any.whl (306kB)\nCollecting threadpoolctl>=2.0.0 (from scikit-learn==0.24.2->-r /tmp/requirements.txt (line 12))\n Downloading https://files.pythonhosted.org/packages/ff/fe/8aaca2a0db7fd80f0b2cf8a16a034d3eea8102d58ff9331d2aaf1f06766a/threadpoolctl-3.0.0-py3-none-any.whl\nCollecting mimeparse (from pyvo==1.1->-r /tmp/requirements.txt (line 14))\n Downloading https://files.pythonhosted.org/packages/38/0c/7b02c30765658744acc51876781c580234cb1110296b231a3a524722f9c7/mimeparse-0.1.3.tar.gz\nCollecting ipython-genutils (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/fa/bc/9bd3b5c2b4774d5f33b2d544f1460be9df7df2fe42f352135381c347c69a/ipython_genutils-0.2.0-py2.py3-none-any.whl\nCollecting ipython<8.0,>=7.23.1 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/76/d1/e6166fc278a0aab9c2997ae241346837368fc9aa0c6eea9b0dbe2d727004/ipython-7.28.0-py3-none-any.whl (788kB)\nCollecting debugpy<2.0,>=1.0.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5d/db/2e47db1dc6e25741fe910bce1b7aad15b5ab53a8c683bf51108c9f9e07a3/debugpy-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.9MB)\nCollecting importlib-metadata<5; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/71/c2/cb1855f0b2a0ae9ccc9b69f150a7aebd4a8d815bd951e74621c4154c52a8/importlib_metadata-4.8.1-py3-none-any.whl\nCollecting argcomplete>=1.12.3; python_version < \"3.8.0\" (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/b7/9e/9dc74d330c07866d72f62d553fe8bdbe32786ff247a14e68b5659963e6bd/argcomplete-1.12.3-py2.py3-none-any.whl\nCollecting tornado<7.0,>=4.2 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/91/a8/9c5902233fa3c2e6a889cbd164333ddda5009669f494e3fadbeee2c03af5/tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl (428kB)\nCollecting matplotlib-inline<0.2.0,>=0.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a6/2d/2230afd570c70074e80fd06857ba2bdc5f10c055bd9125665fe276fadb67/matplotlib_inline-0.1.3-py3-none-any.whl\nCollecting jupyter-client<8.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/51/06efe08a819c36215e02750b50ac1e5e322303a8369ec1bc4e915d485ad4/jupyter_client-7.0.6-py3-none-any.whl (125kB)\nCollecting traitlets<6.0,>=4.1.0 (from ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/38/4c/466298b114eea62f300dbef98cc2c33c6cbc439f1f71bc199c674ae23c2c/traitlets-5.1.0-py3-none-any.whl (101kB)\nCollecting nbformat>=4.2.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/e7/c7/dd50978c637a7af8234909277c4e7ec1b71310c13fb3135f3c8f5b6e045f/nbformat-5.1.3-py3-none-any.whl (178kB)\nCollecting jupyterlab-widgets>=1.0.0; python_version >= \"3.6\" (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/18/4d/22a93473bca99c80f2d23f867ebbfee2f6c8e186bf678864eec641500910/jupyterlab_widgets-1.0.2-py3-none-any.whl (243kB)\nCollecting widgetsnbextension~=3.5.0 (from ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/6c/7b/7ac231c20d2d33c445eaacf8a433f4e22c60677eb9776c7c5262d7ddee2d/widgetsnbextension-3.5.1-py2.py3-none-any.whl (2.2MB)\nCollecting jupyterlab-pygments (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a8/6f/c34288766797193b512c6508f5994b830fb06134fdc4ca8214daba0aa443/jupyterlab_pygments-0.1.2-py2.py3-none-any.whl\nRequirement already satisfied: jinja2>=2.4 in /usr/lib/python3.7/site-packages (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.10)\nCollecting bleach (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/64/cc/74d634e1e5659742973a23bb441404c53a7bedb6cd3962109ca5efb703e8/bleach-4.1.0-py2.py3-none-any.whl (157kB)\nCollecting jupyter-core (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ad/b3/160e578a3bcee2c3b2c60990f249bc84c56862757a7d2be1d6b55d66b2d3/jupyter_core-4.8.1-py3-none-any.whl (86kB)\nCollecting mistune<2,>=0.8.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/ec/4b43dae793655b7d8a25f76119624350b4d65eb663459eb9603d7f1f0345/mistune-0.8.4-py2.py3-none-any.whl\nCollecting pandocfilters>=1.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/5e/a8/878258cffd53202a6cc1903c226cf09e58ae3df6b09f8ddfa98033286637/pandocfilters-1.5.0-py2.py3-none-any.whl\nCollecting testpath (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/87/5422f6d056bfbded920ccf380a65de3713a3b95a95ba2255be2a3fb4f464/testpath-0.5.0-py3-none-any.whl (84kB)\nCollecting defusedxml (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl\nCollecting entrypoints>=0.2.2 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/ac/c6/44694103f8c221443ee6b0041f69e2740d89a25641e62fb4f2ee568f2f9c/entrypoints-0.3-py2.py3-none-any.whl\nCollecting nbclient<0.6.0,>=0.5.0 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a7/ed/b764fa931614cb7ed9bebbc42532daecef405d6bef660eeda882f6c23b98/nbclient-0.5.4-py3-none-any.whl (66kB)\nCollecting pygments>=2.4.1 (from nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/78/c8/8d9be2f72d8f465461f22b5f199c04f7ada933add4dae6e2468133c17471/Pygments-2.10.0-py3-none-any.whl (1.0MB)\nCollecting qtpy (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/73/47/cc42c2b4fe4ddb7e289ef8f098c7249903ad09cd3f6ee8ec17c63de2b728/QtPy-1.11.2-py2.py3-none-any.whl (58kB)\nCollecting pyzmq>=17.1 (from qtconsole->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/1e/cc/fb6b935a6c046be4b7728fea1f41998644dfaa25dab7837cf933bc4f7db9/pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.1MB)\nCollecting prometheus-client (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/09/da/4e8471ff825769581593b5b84769d32f58e5373b59fccaf355d3529ad530/prometheus_client-0.11.0-py2.py3-none-any.whl (56kB)\nCollecting Send2Trash>=1.5.0 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/47/26/3435896d757335ea53dce5abf8d658ca80757a7a06258451b358f10232be/Send2Trash-1.8.0-py3-none-any.whl\nCollecting terminado>=0.8.3 (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/cb/17/b1162b39786c44e14d30ee557fbf41276c4a966dab01106c15fb70f5c27a/terminado-0.12.1-py3-none-any.whl\nCollecting argon2-cffi (from notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/50/85/fa444619ba3709b8969a75bf051375261801d267bb69d6bd1764dabe528f/argon2_cffi-21.1.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl (96kB)\nCollecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 (from jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/c6/37/ec72228971dbaf191243b8ee383c6a3834b5cde23daab066dfbfbbd5438b/prompt_toolkit-3.0.20-py3-none-any.whl (370kB)\nRequirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (3.0.4)\nRequirement already satisfied: idna<2.9,>=2.5 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.7)\nRequirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/lib/python3.7/site-packages (from requests>=2.4.3->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (1.24.1)\nCollecting jeepney>=0.4.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/14/b8/bb3e34d71472140f9bfdf5d77cd063e2cc964b72b1bb0b70fe3c1e7db932/jeepney-0.7.1-py3-none-any.whl (54kB)\nCollecting SecretStorage>=3.2; sys_platform == \"linux\" (from keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/d9/1e/29cd69fdac7391aa51510dfd42aa70b4e6a826c8cd019ee2a8ab9ec0777f/SecretStorage-3.3.1-py3-none-any.whl\nCollecting soupsieve>1.2 (from beautifulsoup4>=4.3.2->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/36/69/d82d04022f02733bf9a72bc3b96332d360c0c5307096d76f6bb7489f7e57/soupsieve-2.2.1-py3-none-any.whl\nCollecting webencodings (from html5lib>=0.999->astroquery==0.4.1->-r /tmp/requirements.txt (line 11))\n Downloading https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl\nRequirement already satisfied: setuptools>=18.5 in /usr/lib/python3.7/site-packages (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (40.8.0)\nCollecting pickleshare (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/9a/41/220f49aaea88bc6fa6cba8d05ecf24676326156c23b991e80b3f2fc24c77/pickleshare-0.7.5-py2.py3-none-any.whl\nCollecting pexpect>4.3; sys_platform != \"win32\" (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/39/7b/88dbb785881c28a102619d46423cb853b46dbccc70d3ac362d99773a78ce/pexpect-4.8.0-py2.py3-none-any.whl (59kB)\nCollecting backcall (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/4c/1c/ff6546b6c12603d8dd1070aa3c3d273ad4c07f5771689a7b69a550e8c951/backcall-0.2.0-py2.py3-none-any.whl\nCollecting decorator (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3d/cc/d7b758e54779f7e465179427de7e78c601d3330d6c411ea7ba9ae2f38102/decorator-5.1.0-py3-none-any.whl\nCollecting jedi>=0.16 (from ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/f9/36/7aa67ae2663025b49e8426ead0bad983fee1b73f472536e9790655da0277/jedi-0.18.0-py2.py3-none-any.whl (1.4MB)\nCollecting zipp>=0.5 (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/bd/df/d4a4974a3e3957fd1c1fa3082366d7fff6e428ddb55f074bf64876f8e8ad/zipp-3.6.0-py3-none-any.whl\nCollecting typing-extensions>=3.6.4; python_version < \"3.8\" (from importlib-metadata<5; python_version < \"3.8.0\"->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/74/60/18783336cc7fcdd95dae91d73477830aa53f5d3181ae4fe20491d7fc3199/typing_extensions-3.10.0.2-py3-none-any.whl\nCollecting nest-asyncio>=1.5 (from jupyter-client<8.0->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/52/e2/9b37da54e6e9094d2f558ae643d1954a0fa8215dfee4fa261f31c5439796/nest_asyncio-1.5.1-py3-none-any.whl\nRequirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/lib/python3.7/site-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (3.0.1)\nRequirement already satisfied: MarkupSafe>=0.23 in /usr/lib64/python3.7/site-packages (from jinja2>=2.4->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.1.1)\nCollecting packaging (from bleach->nbconvert->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/3c/77/e2362b676dc5008d81be423070dd9577fa03be5da2ba1105811900fda546/packaging-21.0-py3-none-any.whl (40kB)\nCollecting ptyprocess; os_name != \"nt\" (from terminado>=0.8.3->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl\nRequirement already satisfied: cffi>=1.0.0 in /usr/lib64/python3.7/site-packages (from argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (1.11.5)\nCollecting wcwidth (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/59/7c/e39aca596badaf1b78e8f547c807b04dae603a433d3e7a7e04d67f2ef3e5/wcwidth-0.2.5-py2.py3-none-any.whl\nRequirement already satisfied: cryptography>=2.0 in /usr/lib64/python3.7/site-packages (from SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (2.6.1)\nCollecting parso<0.9.0,>=0.8.0 (from jedi>=0.16->ipython<8.0,>=7.23.1->ipykernel->jupyter==1.0.0->-r /tmp/requirements.txt (line 5))\n Downloading https://files.pythonhosted.org/packages/a9/c4/d5476373088c120ffed82f34c74b266ccae31a68d665b837354d4d8dc8be/parso-0.8.2-py2.py3-none-any.whl (94kB)\nRequirement already satisfied: attrs>=17.4.0 in /usr/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (18.2.0)\nRequirement already satisfied: pyrsistent>=0.14.0 in /usr/lib64/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (0.14.11)\nRequirement already satisfied: pycparser in /usr/lib/python3.7/site-packages (from cffi>=1.0.0->argon2-cffi->notebook->jupyter==1.0.0->-r /tmp/requirements.txt (line 5)) (2.14)\nRequirement already satisfied: asn1crypto>=0.21.0 in /usr/lib/python3.7/site-packages (from cryptography>=2.0->SecretStorage>=3.2; sys_platform == \"linux\"->keyring>=4.0->astroquery==0.4.1->-r /tmp/requirements.txt (line 11)) (0.24.0)\nBuilding wheels for collected packages: hdbscan\n Building wheel for hdbscan (PEP 517): started\n Building wheel for hdbscan (PEP 517): finished with status 'done'\n Stored in directory: /root/.cache/pip/wheels/42/63/fb/314ad6c3b270887a3ecb588b8e5aac50b0fad38ff89bb6dff2\nSuccessfully built hdbscan\nInstalling collected packages: numpy, scipy, pillow, pyparsing, kiwisolver, cycler, matplotlib, grpcio, ipython-genutils, pickleshare, wcwidth, prompt-toolkit, ptyprocess, pexpect, traitlets, matplotlib-inline, backcall, decorator, pygments, parso, jedi, ipython, debugpy, zipp, typing-extensions, importlib-metadata, argcomplete, tornado, entrypoints, nest-asyncio, pyzmq, jupyter-core, jupyter-client, ipykernel, nbformat, jupyterlab-widgets, prometheus-client, Send2Trash, jupyterlab-pygments, webencodings, packaging, bleach, mistune, pandocfilters, testpath, defusedxml, nbclient, nbconvert, terminado, argon2-cffi, notebook, widgetsnbextension, ipywidgets, qtpy, qtconsole, jupyter-console, jupyter, Cython, protobuf, pandas, pyerfa, astropy, healpy, jeepney, SecretStorage, keyring, soupsieve, beautifulsoup4, html5lib, astroquery, joblib, threadpoolctl, scikit-learn, hdbscan, mimeparse, pyvo\n Running setup.py install for pillow: started\n Running setup.py install for pillow: finished with status 'error'\n Complete output from command /usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-0bqc1gut/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-2hjj_zwp/install-record.txt --single-version-externally-managed --compile:\n running install\n running build\n running build_py\n creating build\n creating build/lib.linux-x86_64-3.7\n creating build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegPresets.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FliImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ContainerIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GifImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageWin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XbmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_version.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImagePalette.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_tkinter_finder.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Image.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SgiImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BdfFontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw2.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MicImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMode.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcnsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageOps.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/features.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GribStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XVThumbImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_binary.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTransform.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageSequence.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageStat.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageGrab.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__init__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TiffTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfParser.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/JpegImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/EpsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PdfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WebPImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageChops.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TarIO.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ExifTags.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpPaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SunImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/McIdasImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FontFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/SpiderImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IptcImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageTk.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/CurImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/XpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BmpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFilter.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PyAccess.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageShow.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageEnhance.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FpxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FtexImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WalImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImtImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PalmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PixarImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Hdf5StubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageQt.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GbrImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GimpGradientFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PngImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/MspImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/FitsStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PsdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/GdImageFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PaletteFile.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageFont.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/Jpeg2KImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/__main__.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BlpImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcxImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageColor.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/DdsImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageCms.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/TgaImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PpmImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/WmfImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PcdImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/_util.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMorph.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/PSDraw.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/ImageMath.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/BufrStubImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n copying src/PIL/IcoImagePlugin.py -> build/lib.linux-x86_64-3.7/PIL\n running egg_info\n writing src/Pillow.egg-info/PKG-INFO\n writing dependency_links to src/Pillow.egg-info/dependency_links.txt\n writing top-level names to src/Pillow.egg-info/top_level.txt\n reading manifest file 'src/Pillow.egg-info/SOURCES.txt'\n reading manifest template 'MANIFEST.in'\n warning: no files found matching '*.c'\n warning: no files found matching '*.h'\n warning: no files found matching '*.sh'\n warning: no previously-included files found matching '.appveyor.yml'\n warning: no previously-included files found matching '.clang-format'\n warning: no previously-included files found matching '.coveragerc'\n warning: no previously-included files found matching '.editorconfig'\n warning: no previously-included files found matching '.readthedocs.yml'\n warning: no previously-included files found matching 'codecov.yml'\n warning: no previously-included files matching '.git*' found anywhere in distribution\n warning: no previously-included files matching '*.pyc' found anywhere in distribution\n warning: no previously-included files matching '*.so' found anywhere in distribution\n no previously-included directories found matching '.ci'\n writing manifest file 'src/Pillow.egg-info/SOURCES.txt'\n running build_ext\n \n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n Traceback (most recent call last):\n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 1024, in \n zip_safe=not (debug_build() or PLATFORM_MINGW),\n File \"/usr/lib/python3.7/site-packages/setuptools/__init__.py\", line 145, in setup\n return distutils.core.setup(**attrs)\n File \"/usr/lib64/python3.7/distutils/core.py\", line 148, in setup\n dist.run_commands()\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 966, in run_commands\n self.run_command(cmd)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/install.py\", line 61, in run\n return orig.install.run(self)\n File \"/usr/lib64/python3.7/distutils/command/install.py\", line 556, in run\n self.run_command('build')\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib64/python3.7/distutils/command/build.py\", line 135, in run\n self.run_command(cmd_name)\n File \"/usr/lib64/python3.7/distutils/cmd.py\", line 313, in run_command\n self.distribution.run_command(command)\n File \"/usr/lib64/python3.7/distutils/dist.py\", line 985, in run_command\n cmd_obj.run()\n File \"/usr/lib/python3.7/site-packages/setuptools/command/build_ext.py\", line 78, in run\n _build_ext.run(self)\n File \"/usr/lib64/python3.7/distutils/command/build_ext.py\", line 340, in run\n self.build_extensions()\n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 790, in build_extensions\n raise RequiredDependencyException(f)\n __main__.RequiredDependencyException: zlib\n \n During handling of the above exception, another exception occurred:\n \n Traceback (most recent call last):\n File \"\", line 1, in \n File \"/tmp/pip-install-0bqc1gut/pillow/setup.py\", line 1037, in \n raise RequiredDependencyException(msg)\n __main__.RequiredDependencyException:\n \n The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions at:\n https://pillow.readthedocs.io/en/latest/installation.html\n \n \n \n ----------------------------------------\n\n:stderr: WARNING: Running pip install with root privileges is generally not a good idea. Try `pip3 install --user` instead.\nCommand \"/usr/bin/python3 -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-0bqc1gut/pillow/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-2hjj_zwp/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-0bqc1gut/pillow/\n"} + + +.. + + +# Error message: +# The headers or library files could not be found for zlib,\n a required dependency when compiling Pillow from source.\n \n Please see the install instructions + +# zlib missing?? + + +# https://stackoverflow.com/questions/34631806/fail-during-installation-of-pillow-python-module-in-linux/34631976 +# "There is a bug reported for Pillow here, which indicates that libjpeg and zlib are now required as of Pillow 3.0.0." + + +# Change /deployments/hadoop-yarn/ansible/29-install-pip-libs.yml to: + +.. + +- name: "Install Python libraries" + hosts: masters:workers:zeppelin + gather_facts: false + vars_files: + - config/ansible.yml + - /tmp/ansible-vars.yml + + + + tasks: + + + - name: "Install required system libraries" + become: yes + yum: + name: libtiff-devel,libjpeg-devel,libzip-devel,freetype-devel,lcms2-devel,libwebp-devel,tcl-devel,tk-devel + update_cache: yes + state: present + + - name: Copy pip requirements file into tmp + become: yes + copy: + src: "{{ playbook_dir | dirname | dirname }}/common/pip/requirements.txt" + dest: "/tmp/requirements.txt" + + + - name: Install the required Python packages + become: yes + pip: + requirements: "/tmp/requirements.txt" + + +.. + + + + +# ------------------------------------------ +# Exit deployer.. +# Run test deploy again + + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'cclake-large-06' + + + +PLAY RECAP ************************************************************************************************************************************************************************************************** +master01 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker01 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker02 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker03 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker04 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker05 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +worker06 : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +zeppelin : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + +real 116m46.636s +user 24m51.897s +sys 6m17.502s + + +# --------------------------------------------------------------------------------------------------- +# Run Test Notebooks +# Run all tests in: https://github.com/wfau/aglais-testing/blob/main/config/notebooks/notebooks.json + + +SetUp [SUCCESS] + +Mean_proper_motions_over_the_sky [SUCCESS] + +Source_counts_over_the_sky.json [SUCCESS] + +Good_astrometric_solutions_via_ML_Random_Forrest_classifier [SUCCESS] + +QC_cuts_dev.json [SUCCESS] + +WD_detection_dev.json [SUCCESS] + + +[SUCCESS] diff --git a/notes/stv/20211022-ansible-deploy-with-benchmarks-01.txt b/notes/stv/20211022-ansible-deploy-with-benchmarks-01.txt new file mode 100644 index 00000000..426699d8 --- /dev/null +++ b/notes/stv/20211022-ansible-deploy-with-benchmarks-01.txt @@ -0,0 +1,132 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# + + Target: + + Run benchmark tests via Ansible + + + Result: + + Success + + +# ----------------------------------------------------- +# Fetch target branch +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + git checkout 'issue-benchmarking' + popd + + + + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + source "${HOME:?}/aglais.env" + + docker run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --publish 3000:3000 \ + --publish 8088:8088 \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + cloudname=gaia-test + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > Done + + + +# ----------------------------------------------------- +# Create everything, using a standard config. +#[root@ansibler] + + + nohup /deployments/hadoop-yarn/bin/create-all.sh "${cloudname:?}" 'cclake-large-06' 'test' > output.log & + + > real 68m23.293s + > user 15m49.227s + > sys 3m38.770s + + + +tail -f -n 1000 output.log + +.. + + +TASK [Run benchmarker] ********************************************************* +changed: [localhost] => {"changed": true, "cmd": ["python3", "/tmp/run-test.py"], "delta": "2:23:01.745218", "end": "2021-10-22 14:38:16.417264", "rc": 0, "start": "2021-10-22 12:15:14.672046", "stderr": "", "stderr_lines": [], "stdout": "Test completed after: 8581.61 seconds\n{'SetUp': {'totaltime': '42.28', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Mean_proper_motions_over_the_sky': {'totaltime': '50.16', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Source_counts_over_the_sky.json': {'totaltime': '17.21', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '479.61', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'QC_cuts_dev.json': {'totaltime': '4717.80', 'status': 'SLOW', 'msg': '', 'valid': 'TRUE'}, 'WD_detection_dev.json': {'totaltime': '3274.56', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}}", "stdout_lines": ["Test completed after: 8581.61 seconds", "{'SetUp': {'totaltime': '42.28', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Mean_proper_motions_over_the_sky': {'totaltime': '50.16', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Source_counts_over_the_sky.json': {'totaltime': '17.21', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '479.61', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}, 'QC_cuts_dev.json': {'totaltime': '4717.80', 'status': 'SLOW', 'msg': '', 'valid': 'TRUE'}, 'WD_detection_dev.json': {'totaltime': '3274.56', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'}}"]} + +PLAY RECAP ********************************************************************* +localhost : ok=9 changed=6 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + + +PLAY RECAP ************************************************************************************************************************************************************************************************** +localhost : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +monitor : ok=6 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + +/ + + +# Test completed after: 903.48 seconds + +SetUp: {'totaltime': '44.02', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +Source_counts_over_the_sky.json': {'totaltime': '33.37', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +Good_astrometric_solutions_via_ML_Random_Forrest_classifier': {'totaltime': '721.65', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +Mean_proper_motions_over_the_sky': {'totaltime': '104.43', 'status': u'SUCCESS', 'valid': 'TRUE', 'msg': ''} +QC_cuts_dev.json: {'totaltime': '4717.80', 'status': 'SLOW', 'msg': '', 'valid': 'TRUE'} +WD_detection_dev.json': {'totaltime': '3274.56', 'status': 'SUCCESS', 'msg': '', 'valid': 'TRUE'} + +# SUCCESS (1 SLOW) + + + diff --git a/notes/zrq/20211007-02-slack-export.txt b/notes/zrq/20211007-02-slack-export.txt index 652cffb6..b8df6fc2 100644 --- a/notes/zrq/20211007-02-slack-export.txt +++ b/notes/zrq/20211007-02-slack-export.txt @@ -26,8 +26,11 @@ #zrq-notes-zeppelin # -https://github.com/ErikKalkoken/slackchannel2pdf + How to export our data out of Slack ... + + https://github.com/ErikKalkoken/slackchannel2pdf + + https://webapps.stackexchange.com/questions/130485/how-to-export-slack-conversation-thread-without-admin-account -https://webapps.stackexchange.com/questions/130485/how-to-export-slack-conversation-thread-without-admin-account diff --git a/notes/zrq/20211011-01-hdbscan-config.txt b/notes/zrq/20211011-01-hdbscan-config.txt new file mode 100644 index 00000000..f0a4e71f --- /dev/null +++ b/notes/zrq/20211011-01-hdbscan-config.txt @@ -0,0 +1,738 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +#zrq-notes-time +#zrq-notes-indent +#zrq-notes-crypto +#zrq-notes-ansible +#zrq-notes-osformat +#zrq-notes-zeppelin +# + + Target: + + Create a new deploy capable of handling 10^8 rows in HDBSCAN. + + Result: + + Work in progress + + +# ----------------------------------------------------- +# Synchronise our master with upstream changes. +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + + git checkout master + + git fetch upstream + + git merge upstream/master + + git push + + popd + + +# ----------------------------------------------------- +# Create a new branch, following on from previous branch. +#[user@desktop] + + prev_branch=20210907-zrq-config-merge + next_branch=$(date '+%Y%m%d')-zrq-hdbscan-config + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + + git checkout "${prev_branch:?}" + + git checkout -b "${next_branch:?}" + + git push --set-upstream 'origin' "$(git branch --show-current)" + + popd + + > Switched to branch '20210907-zrq-config-merge' + + > Switched to a new branch '20211011-zrq-hdbscan-config' + + > * [new branch] 20211011-zrq-hdbscan-config -> 20211011-zrq-hdbscan-config + > Branch '20211011-zrq-hdbscan-config' set up to track remote branch '20211011-zrq-hdbscan-config' from 'origin'. + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Edit the deployment configuration. +#[user@desktop] + + source "${HOME:?}/aglais.env" + pushd "${AGLAIS_CODE}" + + pushd deployments/hadoop-yarn/ansible/config + + cp cclake-large-06.yml \ + hdbscan-large-06.yml + + gedit hdbscan-large-06.yml & + + popd + popd + + > .... + > .... + + +# ----------------------------------------------------- +# Create a container to work with. +#[user@desktop] + + source "${HOME:?}/aglais.env" + + podman run \ + --rm \ + --tty \ + --interactive \ + --name ansibler \ + --hostname ansibler \ + --env "SSH_AUTH_SOCK=/mnt/ssh_auth_sock" \ + --volume "${SSH_AUTH_SOCK}:/mnt/ssh_auth_sock:rw,z" \ + --volume "${HOME:?}/clouds.yaml:/etc/openstack/clouds.yaml:ro,z" \ + --volume "${AGLAIS_CODE:?}/deployments:/deployments:ro,z" \ + atolmis/ansible-client:2021.08.25 \ + bash + + +# ----------------------------------------------------- +# Set the target cloud. +#[root@ansibler] + + # Using prod as dev because Dennis is using dev as prod. + + cloudname=gaia-prod + + +# ----------------------------------------------------- +# Delete everything. +#[root@ansibler] + + time \ + /deployments/openstack/bin/delete-all.sh \ + "${cloudname:?}" + + > real 3m36.914s + > user 1m20.854s + > sys 0m10.362s + + +# ----------------------------------------------------- +# Create everything, using the new config. +#[root@ansibler] + + time \ + /deployments/hadoop-yarn/bin/create-all.sh \ + "${cloudname:?}" \ + 'hdbscan-large-06' + + > real 36m29.272s + > user 10m48.747s + > sys 4m15.340s + + +# ----------------------------------------------------- +# Check the deployment status. +#[root@ansibler] + + cat '/tmp/aglais-status.yml' + + > aglais: + > status: + > deployment: + > type: hadoop-yarn + > conf: hdbscan-large-06 + > name: gaia-prod-20211011 + > date: 20211011T141830 + > spec: + > openstack: + > cloud: gaia-prod + + +# ----------------------------------------------------- +# Add the Zeppelin user accounts. +#[root@ansibler] + + ssh zeppelin + + pushd "${HOME}" + ln -s "zeppelin-0.8.2-bin-all" "zeppelin" + + pushd "zeppelin" + + # Manual edit to add names and passwords + vi conf/shiro.ini + + # Restart Zeppelin for the changes to take. + bin/zeppelin-daemon.sh restart + + popd + popd + exit + + > Zeppelin stop [ OK ] + > Zeppelin start [ OK ] + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Commit current changes from dev server. +#[user@desktop] + + ssh zeppelin-dev + + pushd /home/fedora/zeppelin/notebook + + git status + + > Your branch is ahead of 'origin/main' by 4 commits. + > (use "git push" to publish your local commits) + > + > Changes not staged for commit: + > (use "git add ..." to update what will be committed) + > (use "git checkout -- ..." to discard changes in working directory) + > + > modified: 2G7GZKWUH/note.json + > modified: 2G9BXYCKP/note.json + > modified: 2GGTT4U7N/note.json + > modified: 2GH936JT3/note.json + + + git add . + + git commit -m "Adding latest changes" + + > [main 0619cc8] Adding latest changes + > 4 files changed, 113 insertions(+), 82 deletions(-) + + + git push + + > Enumerating objects: 29, done. + > Counting objects: 100% (29/29), done. + > Delta compression using up to 27 threads + > Compressing objects: 100% (14/14), done. + > Writing objects: 100% (20/20), 7.60 KiB | 216.00 KiB/s, done. + > Total 20 (delta 11), reused 0 (delta 0) + > remote: Resolving deltas: 100% (11/11), completed with 5 local objects. + > To github.com:wfau/aglais-notebooks.git + > d89f96f..0619cc8 main -> main + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Pull the latest changes from GitHub. +#[root@ansibler] + + ssh zeppelin + + pushd /home/fedora/zeppelin + + mv -b notebook \ + notebook-origin + + git clone git@github.com:wfau/aglais-notebooks.git notebook + + bin/zeppelin-daemon.sh restart + + popd + exit + + > Zeppelin stop [ OK ] + > Zeppelin start [ OK ] + + +# ----------------------------------------------------- +# Add our secret function to the ansibler container. +# https://github.com/wfau/aglais/issues/525 +#[root@ansibler] + + # TODO Move this into the Ansible setup. + # TODO Move our secrets onto our infra-ops server. + + if [ ! -e "${HOME}/bin" ] + then + mkdir "${HOME}/bin" + fi + + cat > "${HOME}/bin/secret" << 'EOF' +ssh -n \ + 'secretserver' \ + "bin/secret '${1}'" +EOF + + chmod u+x "${HOME}/bin/secret" + + if [ ! -e "${HOME}/.ssh" ] + then + mkdir "${HOME}/.ssh" + fi + + cat >> "${HOME}/.ssh/config" << 'EOF' +Host secretserver + User Zarquan + Hostname data.metagrid.co.uk + PubkeyAcceptedKeyTypes +ssh-rsa +EOF + + ssh-keyscan 'data.metagrid.co.uk' >> "${HOME}/.ssh/known_hosts" + + secret frog + + > Green Frog + + +# ----------------------------------------------------- +# Get the public IP address of our Zeppelin node. +#[root@ansibler] + + deployname=$( + yq eval \ + '.aglais.status.deployment.name' \ + '/tmp/aglais-status.yml' + ) + + zeppelinid=$( + openstack \ + --os-cloud "${cloudname:?}" \ + server list \ + --format json \ + | jq -r '.[] | select(.Name == "'${deployname:?}'-zeppelin") | .ID' + ) + + zeppelinip=$( + openstack \ + --os-cloud "${cloudname:?}" \ + server show \ + --format json \ + "${zeppelinid:?}" \ + | jq -r ".addresses | .\"${deployname}-internal-network\" | .[1]" + ) + +cat << EOF +Zeppelin ID [${zeppelinid:?}] +Zeppelin IP [${zeppelinip:?}] +EOF + + > Zeppelin ID [47189cb3-561d-4aac-aff8-2235b3910380] + > Zeppelin IP [128.232.227.234] + + +# ----------------------------------------------------- +# Update our DuckDNS record. +#[root@ansibler] + + # Using prod as dev because Dennis is using dev as prod. + + ducktoken=$(secret 'aglais.duckdns.token') + duckipv4=${zeppelinip:?} + duckhost=aglais-prod + + curl "https://www.duckdns.org/update/${duckhost:?}/${ducktoken:?}/${duckipv4:?}" + + > OK + + +# ----------------------------------------------------- +# Add bind-utils to the client. +# https://github.com/wfau/atolmis/issues/17 +#[root@ansibler] + + dnf -y install bind-utils + + > .... + > Installed: + > bind-libs-32:9.16.21-1.fc34.x86_64 + > bind-license-32:9.16.21-1.fc34.noarch + > bind-utils-32:9.16.21-1.fc34.x86_64 + + +# ----------------------------------------------------- +# Check the DNS record. +#[root@ansibler] + + dig "${duckhost:?}.duckdns.org" + + > ;; ANSWER SECTION: + > aglais-prod.duckdns.org. 60 IN A 128.232.227.234 + + + dig "zeppelin.${cloudname}.aglais.uk" + + > ;; ANSWER SECTION: + > zeppelin.gaia-prod.aglais.uk. 600 IN CNAME aglais-prod.duckdns.org. + > aglais-prod.duckdns.org. 44 IN A 128.232.227.234 + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Login to our Zeppelin node and generate a new interpreter.json file. +# TODO Convert this to an Ansible playbook. +#[root@ansibler] + + ssh zeppelin + + # Create a new list of interpreter bindings. + find /home/fedora/zeppelin/notebook \ + -mindepth 1 \ + -maxdepth 1 \ + -type d \ + ! -name '.git' \ + -printf '%f\n' \ + | sed ' + 1 i \ +"interpreterBindings": { + s/^\(.*\)$/"\1": ["spark", "md", "sh"]/ + $ ! s/^\(.*\)$/\1,/ + $ a \ +}, + ' \ + | tee /tmp/bindings.json + + > "interpreterBindings": { + > "2C35YU814": ["spark", "md", "sh"], + > "2EZ3MQG4S": ["spark", "md", "sh"], + > .... + > .... + > "2G9BXYCKP": ["spark", "md", "sh"], + > "2FF2VTAAM": ["spark", "md", "sh"] + > }, + + + # Replace the existing interpreter bindings. + jq ' + del(.interpreterBindings[]) + ' \ + /home/fedora/zeppelin/conf/interpreter.json \ + | sed ' + /interpreterBindings/ { + r /tmp/bindings.json + d + } + ' \ + | jq '.' \ + | tee /tmp/interpreter-new.json + + > .... + > .... + + + # Replace the original interpreter.json + mv /home/fedora/zeppelin/conf/interpreter.json \ + /home/fedora/zeppelin/conf/interpreter.origin + + cp /tmp/interpreter-new.json \ + /home/fedora/zeppelin/conf/interpreter.json + + # Restart Zeppelin to take effect + /home/fedora/zeppelin/bin/zeppelin-daemon.sh restart + + exit + + > Zeppelin stop [ OK ] + > Zeppelin start [ OK ] + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Setup a SSH tunnel SOCKS proxy. +# https://www.digitalocean.com/community/tutorials/how-to-route-web-traffic-securely-without-a-vpn-using-a-socks-tunnel +# Running 'htop' on the Zeppelin node to keep the connection alive. +#[user@desktop] + + ssh \ + -t \ + -D "3000" \ + zeppelin-prod \ + " + htop + " + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Login to the Spark UI using Firefox. +# (*) using FoxyProxy Firefox plugin to select the SOCKS proxy for internal hostnames. +#[user@desktop] + + firefox --new-window 'http://master01:8088/cluster' & + + +# ----------------------------------------------------- +# Login to Grafana using Firefox. +# (*) using FoxyProxy Firefox plugin to select the SOCKS proxy for internal hostnames. +#[user@desktop] + + firefox --new-window 'http://monitor:3000/login' & + + user: admin + pass: admin + + + # Set new password in the next page + ######## + + +# ----------------------------------------------------- +# Add Prometheus Data Source + + http://monitor:3000/datasources/new + + URL : http://monitor:9090 + Scrape interval : 5s + + +# ----------------------------------------------------- +# Add our dashboards from local JSON files. +#[user@desktop] + + http://monitor:3000/dashboard/import + + deployments/common/grafana/20210705-02-grafana-dash.json + deployments/common/grafana/node-exporter-v20201010-1633446087511.json + + +# ----------------------------------------------------- +# Select our dashboards. +#[user@desktop] + + http://monitor:3000/d/xfpJB9FGz/1-node-exporter-for-prometheus-dashboard-en-v20201010?orgId=1&refresh=15s + http://monitor:3000/d/34S3C8k7z/my-first-dash?orgId=1&refresh=5s + + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Load our tests scripts. +#[root@ansibler] + + cloudname=gaia-prod + + zeppelinhost=zeppelin.${cloudname:?}.aglais.uk + zeppelinport=8080 + zeppelinurl=http://${zeppelinhost:?}:${zeppelinport:?} + + source /deployments/zeppelin/test/bin/rest-tests.sh + + +# ----------------------------------------------------- +# Login to Zeppelin as a normal user. +#[root@ansibler] + + gaiauser=$(secret aglais.zeppelin.gaiauser) + gaiapass=$(secret aglais.zeppelin.gaiapass) + + zeplogin "${gaiauser:?}" "${gaiapass}" + + > { + > "status": "OK", + > "message": "", + > "body": { + > "principal": "gaiauser", + > "ticket": "a5586754-407b-482d-81ac-b83acd669f0b", + > "roles": "[\"user\"]" + > } + > } + + +# ----------------------------------------------------- +# Run the SetUp notebook. +#[root@ansibler] + + noteid=2G7GZKWUH + + zepnbclear ${noteid} + zepnbexecstep ${noteid} + + zepnbstatus ${noteid} + zepnbtotaltime ${noteid} + + > Result [SUCCESS] + + > { + > "status": "OK", + > "message": "", + > "body": { + > "paragraphs": [], + > "name": "/AglaisPublicExamples/SetUp", + > "id": "2G7GZKWUH", + > "noteParams": {}, + > "noteForms": {}, + > "angularObjects": { + > "md:shared_process": [], + > "sh:shared_process": [], + > "spark:dcr:": [] + > }, + > "config": { + > "isZeppelinNotebookCronEnable": false + > }, + > "info": {} + > } + > } + + > 0:0:36 + + +# ----------------------------------------------------- +# Run the HealpixSourceCounts notebook +#[root@ansibler] + + noteid=2FKJ25GVF + + zepnbclear ${noteid} + zepnbexecstep ${noteid} + + zepnbstatus ${noteid} + zepnbtotaltime ${noteid} + + > Result [SUCCESS] + + > { + > "status": "OK", + > "message": "", + > "body": { + > "paragraphs": [], + > "name": "/AglaisPublicExamples/Source counts over the sky", + > "id": "2FKJ25GVF", + > "noteParams": {}, + > "noteForms": {}, + > "angularObjects": { + > "md:shared_process": [], + > "spark:zrq:": [] + > }, + > "config": { + > "isZeppelinNotebookCronEnable": false + > }, + > "info": {} + > } + > } + + > 0:0:27 + + +# ----------------------------------------------------- +# Run the MeanProperMotions notebook +#[root@ansibler] + + noteid=2G748GZSW + + zepnbclear ${noteid} + zepnbexecstep ${noteid} + + zepnbstatus ${noteid} + zepnbtotaltime ${noteid} + + > Result [SUCCESS] + + > { + > "status": "OK", + > "message": "", + > "body": { + > "paragraphs": [], + > "name": "AglaisPublicExamples/Mean proper motions over the sky", + > "id": "2G748GZSW", + > "noteParams": {}, + > "noteForms": {}, + > "angularObjects": { + > "md:shared_process": [], + > "spark:zrq:": [] + > }, + > "config": { + > "isZeppelinNotebookCronEnable": false + > }, + > "info": {} + > } + > } + + > 0:0:48 + + +# ----------------------------------------------------- +# Run the RandomForest notebook. +#[root@ansibler] + + noteid=2G5NU6HTK + + zepnbclear ${noteid} + zepnbexecstep ${noteid} + + zepnbstatus ${noteid} + zepnbtotaltime ${noteid} + + > Result [SUCCESS] + + > { + > "status": "OK", + > "message": "", + > "body": { + > "paragraphs": [], + > "name": "/AglaisPublicExamples/Good astrometric solutions via ML Random Forrest classifier", + > "id": "2G5NU6HTK", + > "noteParams": {}, + > "noteForms": {}, + > "angularObjects": { + > "md:shared_process": [], + > "spark:gaiauser:": [] + > }, + > "config": { + > "isZeppelinNotebookCronEnable": false + > }, + > "info": {} + > } + > } + + > 0:7:53 + + +# ----------------------------------------------------- +# Run the RandomForest notebook (second pass, no restart). +#[root@ansibler] + + noteid=2G5NU6HTK + + zepnbclear ${noteid} + zepnbexecstep ${noteid} + + zepnbstatus ${noteid} + zepnbtotaltime ${noteid} + + > .... + > .... + + > .... + > .... + + > .... + > .... + + + + + + diff --git a/notes/zrq/20211018-01-debugging.txt b/notes/zrq/20211018-01-debugging.txt new file mode 100644 index 00000000..11304fc2 --- /dev/null +++ b/notes/zrq/20211018-01-debugging.txt @@ -0,0 +1,146 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +#zrq-notes-time +#zrq-notes-indent +#zrq-notes-crypto +#zrq-notes-ansible +#zrq-notes-osformat +#zrq-notes-zeppelin +# + + + # Spent 1/4 a day debugging but found nothing + # Time short and other projects pending + +# ----------------------------------------------------- +# Restart everything .... +#[user@zeppelin] + + /home/fedora/zeppelin/bin/zeppelin-daemon.sh stop + + > Zeppelin stop [ OK ] + + rm /home/fedora/zeppelin/logs/* + + + + ssh master01 \ + ' + /opt/hadoop/sbin/stop-all.sh + ' + + > WARNING: Stopping all Apache Hadoop daemons as fedora in 10 seconds. + > WARNING: Use CTRL-C to abort. + > Stopping namenodes on [master01] + > Stopping datanodes + > Stopping secondary namenodes [gaia-prod-20211011-master01.novalocal] + > gaia-prod-20211011-master01.novalocal: fedora@gaia-prod-20211011-master01.novalocal: Permission denied (publickey,gssapi-keyex,gssapi-with-mic). + > Stopping nodemanagers + > worker02: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > worker03: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > worker05: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > worker01: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > worker06: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > worker04: WARNING: nodemanager did not stop gracefully after 5 seconds: Trying to kill with kill -9 + > Stopping resourcemanager + + + workers=( + worker01 + worker02 + worker03 + worker04 + worker05 + worker06 + ) + + for worker in ${workers[*]} + do + echo "Worker [${worker}]" + ssh "${worker}" \ + ' + hostname + date + rm -rf /var/hadoop/data/* + rm -rf /var/hadoop/logs/* + + du -h /var/hadoop/data + du -h /var/hadoop/logs + + ' + done + + ssh master01 \ + ' + /opt/hadoop/sbin/start-all.sh + ' + + > WARNING: Attempting to start all Apache Hadoop daemons as fedora in 10 seconds. + > WARNING: This is not a recommended production deployment configuration. + > WARNING: Use CTRL-C to abort. + > Starting namenodes on [master01] + > Starting datanodes + > Starting secondary namenodes [gaia-prod-20211011-master01.novalocal] + > gaia-prod-20211011-master01.novalocal: fedora@gaia-prod-20211011-master01.novalocal: Permission denied (publickey,gssapi-keyex,gssapi-with-mic). + > Starting resourcemanager + > Starting nodemanagers + + + /home/fedora/zeppelin/bin/zeppelin-daemon.sh start + + > Zeppelin start [ OK ] + + + +# ----------------------------------------------------- +# ----------------------------------------------------- +# Run the SOCKS proxy +#[user@desktop] + + firefox --new-window 'http://master01:8088/cluster' & + + firefox --new-window 'http://monitor:3000/login' & + + ssh zeppelin-prod \ + -t \ + -D "3000" \ + ' + htop + ' + + +# ----------------------------------------------------- +# Tail the application logs ... +#[user@zeppelin] + + + lastapp=$( + ls -1 /var/hadoop/logs | grep '^application' | tail -n 1 + ) + lastcont=$( + ls -1 "/var/hadoop/logs/${lastapp}" | tail -n 1 + ) + tail -f /var/hadoop/logs/${lastapp}/${lastcont}/stderr + + + + + diff --git a/notes/zrq/20211019-01-iris-resources.txt b/notes/zrq/20211019-01-iris-resources.txt new file mode 100644 index 00000000..bc4f8768 --- /dev/null +++ b/notes/zrq/20211019-01-iris-resources.txt @@ -0,0 +1,154 @@ +# +# +# +# Copyright (c) 2021, ROE (http://www.roe.ac.uk/) +# +# This information is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This information is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# +#zrq-notes-time +#zrq-notes-indent +#zrq-notes-crypto +#zrq-notes-ansible +#zrq-notes-osformat +#zrq-notes-zeppelin +# + + + # + # Quote from the Cambridge cloud website + # https://rse-cambridge.github.io/iris-openstack/cambridge#cascade-lake + + Each hypervisor (Dell PowerEdge C6420) has two Intel Xeon Platinum 8276 + (i.e. a total of 112 hyperthreaded cores runing at 2.20-4.00 GHz per hypervisor) + with 192GB RAM (i.e. 1.7GB per hyperthreaded core) and around 800GB of local SSD. + + There is a single 50GbE Mellanox ConnectEx-6 ethernet link (with the option for RoCEv2 via SR-IOV). + + The hardware also includes a (currently unused by IRIS) HDR100 Mellanox Infiniband connection. + + For VM sizing, two 90GB VMs, using under 400GB of local disk, should fit into a single hypervisor. + Typiucally there are 108 vCPUs available for VMs. + If you are in a dedicated aggregate, this can be 1:1 hyperthreads to vCPUs. + +---------------------------------------------------------------- + + On Tue, Oct 19, 2021 at 7:44 AM Dave Morris wrote: + + Hi John, + + I'm putting together our IRIS resource request for 2022+ and I'm + researching the current state of the art in terms of cloud compute for + machine learning on BigData. + + In a recent Slack discussion you said + + "This sounds like something you want on a hyperconverged file system + that uses the local SSD storage .." + "Certainly local NVMe is more typical for this sort of ML pipeline." + + and + + "Yep, that is my current ML recommendation, hyperconverged on local + disk." + + What would your recommendation be for a Spark ML application like ours? + and may I quote you in our resource request? + + Cheers, + -- Dave + + On 2021-10-19 09:46, John Garbutt wrote: + + Good questions. (I am including JohnT for visibility.) + + Essentially, many Machine Learning algorithms can be data parallel. + This allows for shards of data to be staged (or cached) locally. + Generally, training appears to be very IO bound. If you are not + careful, expensive CPU and GPU resources are starved due to poor + storage. + + However, there are some workloads that need shared storage, as those + algorithms are not data parallel, or at least not predictably so. + Sometimes for only part of a pipeline. + + This mix suggested we need a way to have substantial fast local + storage for those that need it, but a way to convert some (or most) of + that to shared storage, as required. Currently we are looking at + Rook.io, OpenEBS and other similar hyperconverged solutions. Similar + to the Ceph hyperconverged solution we tried with Euclid within slurm. + The hope is that kubernetes operators reduce the operational overhead + of such a solution, particularly when created via the cloud portal we + are developing. + + Certainly we are seeing systems optimised for ML having many local + NVMe drives, as opposed to a single spinning disk, so 20GB/s locally + on each node shouldn't be a problem (with Gen4 PCIe) + + Also having good Ceph storage, that is based on SSD or NVMe and not + spinning disk, will make a huge difference to additional storage on + nodes that don't have enough local storage. + + I think it is too early to suggest that this has been "solved" or that + there is much consensus yet. + + I hope that helps? + + Thanks, + John + +---------------------------------------------------------------- + +On Tue, 19 Oct 2021 at 07:18, Dave Morris > wrote: + + Hi Paul, + + I'm writing up our IRIS resource request for 2022+. + + As a baseline for next years request, could you confirm exactly what + resources we currently have. + + At the start of this year I know our tasks were pinned to four Cascade + Lake hosts, but since then we have been through a couple of rounds of + new deployments. + + Cheers, + -- Dave + +On 2021-10-19 14:23, Paul Browne wrote: + + Hi Dave, + + Currently you have access to an aggregate of 8 of our Cascade Lake hosts; + + (oscli) [pfb29@cumulus-seed ansible]$ openstack aggregate show gaia-cclake-agg + +-------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | Field | Value | + +-------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | availability_zone | None | + | created_at | 2020-09-29T22:11:14.000000 | + | deleted | False | + | deleted_at | None | + | hosts | cpu-p-629, cpu-p-630, cpu-p-631, cpu-p-632, cpu-p-633, cpu-p-634, cpu-p-635, cpu-p-636 | + | id | 17 | + | name | gaia-cclake-agg | + | properties | filter_tenant_id1='08e24c6d87f94740aa59c172462ed927', filter_tenant_id2='21b4ae3a2ea44bc5a9c14005ed2963af', filter_tenant_id3='bea28e83e6aa47a8962b59c3b24495fe' | + | updated_at | None | + +-------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + + Thanks, + Paul B. + +