diff --git a/tools/snitch/.dockerignore b/tools/snitch/.dockerignore new file mode 100644 index 00000000000..6876168744a --- /dev/null +++ b/tools/snitch/.dockerignore @@ -0,0 +1,3 @@ +target/ +.vscode/ +scripts/ \ No newline at end of file diff --git a/tools/snitch/Cargo.lock b/tools/snitch/Cargo.lock index f808413e37b..2b9b83dbf29 100644 --- a/tools/snitch/Cargo.lock +++ b/tools/snitch/Cargo.lock @@ -747,10 +747,15 @@ name = "prep-mail" version = "0.1.0" dependencies = [ "env_logger 0.5.12 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "handlebars 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper-tls 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.70 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", "snitcher 0.1.0", + "tokio 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] diff --git a/tools/snitch/prep-mail/Cargo.toml b/tools/snitch/prep-mail/Cargo.toml index 841a610bc61..5dea5b44e22 100644 --- a/tools/snitch/prep-mail/Cargo.toml +++ b/tools/snitch/prep-mail/Cargo.toml @@ -6,8 +6,13 @@ publish = false [dependencies] env_logger = "0.5" +futures = "0.1" handlebars = "1.0" +hyper = "0.12" +hyper-tls = "0.3.0" serde = "1.0" serde_json = "1.0" +tokio = "0.1" +url = "1.7" snitcher = { path = "../snitcher" } \ No newline at end of file diff --git a/tools/snitch/prep-mail/docker/linux/amd64/Dockerfile b/tools/snitch/prep-mail/docker/linux/amd64/Dockerfile index 027d2ad882f..b9f62311f46 100644 --- a/tools/snitch/prep-mail/docker/linux/amd64/Dockerfile +++ b/tools/snitch/prep-mail/docker/linux/amd64/Dockerfile @@ -3,7 +3,7 @@ FROM ekidd/rust-musl-builder AS builder ARG SRC_DIR=. # Add source code -COPY $EXE_DIR/ ./ +COPY $SRC_DIR/ ./ # Fix permissions on source code. RUN sudo chown -R rust:rust /home/rust diff --git a/tools/snitch/prep-mail/src/error.rs b/tools/snitch/prep-mail/src/error.rs index b8a9f9ecd4f..42ec75cadbf 100644 --- a/tools/snitch/prep-mail/src/error.rs +++ b/tools/snitch/prep-mail/src/error.rs @@ -3,7 +3,10 @@ use std::fmt; use handlebars::TemplateRenderError; +use hyper_tls::Error as HyperTlsError; use serde_json::Error as SerdeJsonError; +use snitcher::error::Error as SnitcherError; +use url::ParseError as ParseUrlError; pub type Result = ::std::result::Result; @@ -12,6 +15,10 @@ pub enum Error { Env(String), SerdeJson(SerdeJsonError), Handlebars(TemplateRenderError), + HyperTls(HyperTlsError), + ParseUrl(ParseUrlError), + Snitcher(SnitcherError), + NoReportJsonFound, } impl fmt::Display for Error { @@ -31,3 +38,21 @@ impl From for Error { Error::Handlebars(err) } } + +impl From for Error { + fn from(err: ParseUrlError) -> Error { + Error::ParseUrl(err) + } +} + +impl From for Error { + fn from(err: HyperTlsError) -> Error { + Error::HyperTls(err) + } +} + +impl From for Error { + fn from(err: SnitcherError) -> Error { + Error::Snitcher(err) + } +} diff --git a/tools/snitch/prep-mail/src/main.rs b/tools/snitch/prep-mail/src/main.rs index e9613b2fd76..541776ab7a7 100644 --- a/tools/snitch/prep-mail/src/main.rs +++ b/tools/snitch/prep-mail/src/main.rs @@ -1,21 +1,36 @@ // Copyright (c) Microsoft. All rights reserved. +#![deny(warnings)] + extern crate env_logger; +extern crate futures; extern crate handlebars; +extern crate hyper; +extern crate hyper_tls; extern crate serde; extern crate serde_json; extern crate snitcher; +extern crate tokio; +extern crate url; use std::env; +use std::sync::{Arc, Mutex}; +use futures::future::{self, Either}; +use futures::Future; use handlebars::Handlebars; +use hyper::{Client as HyperClient, Method}; +use hyper_tls::HttpsConnector; +use snitcher::client; +use snitcher::connect::HyperClientService; +use snitcher::report::Report; +use url::Url; mod error; use error::{Error, Result}; -use snitcher::report::Report; -const REPORT_JSON_KEY: &str = "REPORT_JSON"; +const REPORT_JSON_URL_KEY: &str = "REPORT_JSON_URL"; const REPORT_TEMPLATE_KEY: &str = "REPORT_TEMPLATE"; const DEFAULT_REPORT_TEMPLATE: &str = include_str!("mail-template.hbs"); @@ -23,18 +38,55 @@ fn main() -> Result<()> { env_logger::init(); // read the report JSON from the environment - let report_json = get_env(REPORT_JSON_KEY)?; - let report: Report = serde_json::from_str(&report_json)?; - let template = - get_env(REPORT_TEMPLATE_KEY).unwrap_or_else(|_| String::from(DEFAULT_REPORT_TEMPLATE)); + let report_url = Url::parse(&get_env(REPORT_JSON_URL_KEY)?)?; + + let task = get_report_json(report_url) + .and_then(|report_json| report_json.ok_or(Error::NoReportJsonFound)) + .and_then(|report_json| serde_json::from_str(&report_json).map_err(Error::from)) + .and_then(|report: Report| { + let template = get_env(REPORT_TEMPLATE_KEY) + .unwrap_or_else(|_| String::from(DEFAULT_REPORT_TEMPLATE)); + + // render the template and generate report + let reg = Handlebars::new(); + reg.render_template(&template, &report).map_err(Error::from) + }) + .map(|html| println!("{}", html)); - // render the template and generate report - let reg = Handlebars::new(); - println!("{}", reg.render_template(&template, &report)?); + let error = Arc::new(Mutex::new(None)); + let error_copy = error.clone(); + tokio::run(task.map_err(move |err| { + *error_copy.lock().unwrap() = Some(err); + })); - Ok(()) + let lock = Arc::try_unwrap(error).expect("Error lock still has multiple owners."); + let error = lock.into_inner().expect("Error mutex cannot be locked."); + + // we want to propagate any errors we might have encountered from 'main' + // because we want to exit with a non-zero error code when something goes + // wrong + Ok(error.map(|err| Err(err)).unwrap_or_else(|| Ok(()))?) } fn get_env(key: &str) -> Result { env::var(key).map_err(|_| Error::Env(key.to_string())) } + +fn get_report_json(report_url: Url) -> impl Future, Error = Error> + Send { + HttpsConnector::new(4) + .map(|connector| { + let path = report_url.path().to_owned(); + let client = client::Client::new( + HyperClientService::new(HyperClient::builder().build(connector)), + report_url, + ); + + Either::A( + client + .request_str::<()>(Method::GET, &path, None, None, false) + .map_err(Error::from), + ) + }) + .map_err(Error::from) + .unwrap_or_else(|err| Either::B(future::err(err))) +} diff --git a/tools/snitch/scripts/buildImage.sh b/tools/snitch/scripts/buildImage.sh new file mode 100755 index 00000000000..6769c501b0e --- /dev/null +++ b/tools/snitch/scripts/buildImage.sh @@ -0,0 +1,180 @@ +#!/bin/bash + +set -e + +SCRIPT_NAME=$(basename "$0") +DIR=$(cd "$(dirname "$0")"/.. && pwd) +DEFAULT_DOCKER_NAMESPACE="microsoft" +DOCKER_NAMESPACE=$DEFAULT_DOCKER_NAMESPACE +DOCKERFILE= +SKIP_PUSH=0 + +usage() +{ + echo "$SCRIPT_NAME [options]" + echo "Note: Depending on the options you might have to run this as root or sudo." + echo "" + echo "options" + echo " -i, --image-name Image name (e.g. snitcher)" + echo " -P, --project Project to build image for. Must be 'snitcher' or 'prep-mail'" + echo " -r, --registry Docker registry required to build, tag and run the module" + echo " -u, --username Docker Registry Username" + echo " -p, --password Docker Username's password" + echo " -n, --namespace Docker namespace (default: $DEFAULT_DOCKER_NAMESPACE)" + echo " -v, --image-version Docker Image Version." + echo "--skip-push Build images, but don't push them" + exit 1; +} + +print_help_and_exit() +{ + echo "Run $SCRIPT_NAME --help for more information." + exit 1 +} + +process_args() +{ + save_next_arg=0 + for arg in "$@" + do + if [ $save_next_arg -eq 1 ]; then + DOCKER_REGISTRY="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 2 ]; then + DOCKER_USERNAME="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 3 ]; then + DOCKER_PASSWORD="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 4 ]; then + DOCKER_IMAGEVERSION="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 5 ]; then + PROJECT="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 6 ]; then + DOCKER_IMAGENAME="$arg" + save_next_arg=0 + elif [ $save_next_arg -eq 7 ]; then + DOCKER_NAMESPACE="$arg" + save_next_arg=0 + else + case "$arg" in + "-h" | "--help" ) usage;; + "-r" | "--registry" ) save_next_arg=1;; + "-u" | "--username" ) save_next_arg=2;; + "-p" | "--password" ) save_next_arg=3;; + "-v" | "--image-version" ) save_next_arg=4;; + "-P" | "--project" ) save_next_arg=5;; + "-i" | "--image-name" ) save_next_arg=6;; + "-n" | "--namespace" ) save_next_arg=7;; + "--skip-push" ) SKIP_PUSH=1 ;; + * ) usage;; + esac + fi + done + + if [[ -z ${DOCKER_REGISTRY} ]]; then + echo "Registry parameter invalid" + print_help_and_exit + fi + + if [[ $SKIP_PUSH -eq 0 ]]; then + if [[ -z ${DOCKER_USERNAME} ]]; then + echo "Docker username parameter invalid" + print_help_and_exit + fi + + if [[ -z ${DOCKER_PASSWORD} ]]; then + echo "Docker password parameter invalid" + print_help_and_exit + fi + fi + + if [[ -z ${DOCKER_IMAGENAME} ]]; then + echo "Docker image name parameter invalid" + print_help_and_exit + fi + + if [[ -z ${DOCKER_IMAGEVERSION} ]]; then + echo "Docker image version not found." + print_help_and_exit + fi + + DOCKERFILE="$DIR/$PROJECT/docker/linux/amd64/Dockerfile" + if [[ ! -f $DOCKERFILE ]]; then + echo "No Dockerfile at $DOCKERFILE" + print_help_and_exit + fi +} + +############################################################################### +# Build docker image and push it to private repo +# +# @param[1] - imagename; Name of the docker edge image to publish; Required; +# @param[2] - arch; Arch of base image; Required; +# @param[3] - dockerfile; Path to the dockerfile; Optional; +# Leave as "" and defaults will be chosen. +# @param[4] - context_path; docker context path; Required; +# @param[5] - build_args; docker context path; Optional; +# Leave as "" and no build args will be supplied. +############################################################################### +docker_build_and_tag_and_push() +{ + imagename="$1" + arch="$2" + dockerfile="$3" + context_path="$4" + build_args="${*:5}" + + if [ -z "${imagename}" ] || [ -z "${arch}" ] || [ -z "${context_path}" ]; then + echo "Error: Arguments are invalid [$imagename] [$arch] [$context_path]" + exit 1 + fi + + echo "Building and pushing Docker image $imagename for $arch" + docker_build_cmd="docker build --no-cache" + docker_build_cmd+=" -t $DOCKER_REGISTRY/$DOCKER_NAMESPACE/$imagename:$DOCKER_IMAGEVERSION-linux-$arch" + if [ ! -z "${dockerfile}" ]; then + docker_build_cmd+=" --file $dockerfile" + fi + docker_build_cmd+=" $build_args $context_path" + + echo "Running... $docker_build_cmd" + + if ! $docker_build_cmd; then + echo "Docker build failed with exit code $?" + exit 1 + fi + + if [ $SKIP_PUSH -eq 0 ]; then + docker_push_cmd="docker push $DOCKER_REGISTRY/$DOCKER_NAMESPACE/$imagename:$DOCKER_IMAGEVERSION-linux-$arch" + echo "Running... $docker_push_cmd" + if ! $docker_push_cmd; then + echo "Docker push failed with exit code $?" + exit 1 + fi + fi +} + +process_args "$@" + +# log in to container registry +if [ $SKIP_PUSH -eq 0 ]; then + if ! docker login "$DOCKER_REGISTRY" -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD"; then + echo "Docker login failed!" + exit 1 + fi +fi + +# push image +docker_build_and_tag_and_push \ + "$DOCKER_IMAGENAME" \ + "amd64" \ + "$DOCKERFILE" \ + "$DIR" \ + "" + +echo "Done building and pushing Docker image $DOCKER_IMAGENAME for $PROJECT" + +exit $? diff --git a/tools/snitch/snitcher/docker/linux/amd64/Dockerfile b/tools/snitch/snitcher/docker/linux/amd64/Dockerfile index 88ccddb82f3..dfaaf2a6183 100644 --- a/tools/snitch/snitcher/docker/linux/amd64/Dockerfile +++ b/tools/snitch/snitcher/docker/linux/amd64/Dockerfile @@ -3,7 +3,7 @@ FROM ekidd/rust-musl-builder AS builder ARG SRC_DIR=. # Add source code -COPY $EXE_DIR/ ./ +COPY $SRC_DIR/ ./ # Fix permissions on source code. RUN sudo chown -R rust:rust /home/rust diff --git a/tools/snitch/snitcher/src/client.rs b/tools/snitch/snitcher/src/client.rs index f1b99f68141..5c9a4fcf9dc 100644 --- a/tools/snitch/snitcher/src/client.rs +++ b/tools/snitch/snitcher/src/client.rs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. use std::collections::HashMap; +use std::str; use std::sync::{Arc, Mutex}; use bytes::Bytes; @@ -151,6 +152,30 @@ where .unwrap_or_else(|| future::ok(None)) }) } + + pub fn request_str( + &self, + method: Method, + path: &str, + query: Option>, + body: Option, + add_if_match: bool, + ) -> impl Future, Error = Error> + Send + where + BodyT: Serialize, + { + self.request_bytes(method, path, query, body, add_if_match) + .and_then(|bytes| { + bytes + .map(|bytes| { + str::from_utf8(&bytes) + .map_err(Error::from) + .map(|s| future::ok(Some(s.to_owned()))) + .unwrap_or_else(future::err) + }) + .unwrap_or_else(|| future::ok(None)) + }) + } } impl Clone for Client diff --git a/tools/snitch/snitcher/src/lib.rs b/tools/snitch/snitcher/src/lib.rs index b2be01c8147..07396635d6e 100644 --- a/tools/snitch/snitcher/src/lib.rs +++ b/tools/snitch/snitcher/src/lib.rs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -// #![deny(warnings)] +#![deny(warnings)] extern crate azure_sdk_for_rust; extern crate backtrace; extern crate byteorder; @@ -433,7 +433,6 @@ where let iter = i.into_iter(); loop_fn((vec![], iter), |(mut output, mut iter)| { let fut = if let Some(next) = iter.next() { - debug!("Running next future. Output len: {}", output.len()); Either::A(next.into_future().map(|v| Some(v))) } else { Either::B(future::ok(None)) @@ -444,7 +443,6 @@ where output.push(val); Ok(Loop::Continue((output, iter))) } else { - debug!("All futures in iter complete. Output len: {}", output.len()); Ok(Loop::Break(output)) } }) diff --git a/tools/snitch/snitcher/src/report.rs b/tools/snitch/snitcher/src/report.rs index 02f10ce74e5..d515c8704b6 100644 --- a/tools/snitch/snitcher/src/report.rs +++ b/tools/snitch/snitcher/src/report.rs @@ -4,6 +4,7 @@ use std::collections::HashMap; use std::io::Write; use bytes::Bytes; +use chrono::{DateTime, Utc}; use libflate::gzip::Encoder as GzipEncoder; use tar::{Builder as TarBuilder, Header as TarHeader}; @@ -14,8 +15,8 @@ use influx::QueryResults; #[serde(rename_all = "camelCase")] pub struct Interval { missed_messages_count: u64, - start_date_time: String, - end_date_time: String, + start_date_time: DateTime, + end_date_time: DateTime, } #[derive(Debug, Serialize, Deserialize)] @@ -25,7 +26,7 @@ pub struct MessageAnalysis { status_code: u16, status_message: String, received_messages_count: u64, - last_message_received_at: String, + last_message_received_at: DateTime, missed_messages: Vec, } diff --git a/tools/snitch/snitcher/src/settings.yaml b/tools/snitch/snitcher/src/settings.yaml index 0e4e7778200..4b9c76c888e 100644 --- a/tools/snitch/snitcher/src/settings.yaml +++ b/tools/snitch/snitcher/src/settings.yaml @@ -13,11 +13,46 @@ alert: sig: password influx_url: http://influxdb:8086 influx_db_name: metricsdatabase -# influx_queries: -# all: select * from /.*/ +influx_queries: + db: SELECT non_negative_derivative(sum("value"), 1s) AS "rate_stored" + FROM "metricsdatabase"."autogen"."application__endpointmessagestoredcount" + WHERE time > now() - 8h AND "EndpointId"='iothub' + GROUP BY time(1m) FILL(null) + cloud: SELECT non_negative_derivative(sum("value"), 1s) AS "sum_value" + FROM "metricsdatabase"."autogen"."application__edgehubtocloudmessagesentcount" + WHERE time > now() - 8h + GROUP BY time(1m) FILL(null) + dbstd: SELECT mean("stddev") AS "mean_stddev" + FROM "metricsdatabase"."autogen"."application__endpointmessagestoredlatencyms" + WHERE time > now() - 8h AND "EndpointId"='iothub' + GROUP BY time(1m) FILL(null) + cloudstd: SELECT mean("stddev") AS "mean_stddev" + FROM "metricsdatabase"."autogen"."application__edgehubtocloudmessagelatencyms" + WHERE time > now() - 8h + GROUP BY time(1m) FILL(null) + dbmean: SELECT mean("mean") AS "mean_mean" + FROM "metricsdatabase"."autogen"."application__endpointmessagestoredlatencyms" + WHERE time > now() - 8h AND "EndpointId"='iothub' + GROUP BY time(1m) FILL(null) + dbmeanentity: SELECT mean("mean") AS "mean_mean" + FROM "metricsdatabase"."autogen"."application__messageentitystoreputorupdatelatencyms" + WHERE time > now() - 8h AND "EndpointId"='iothub' + GROUP BY time(1m) FILL(null) + dbmeanseq: SELECT mean("mean") AS "mean_mean" + FROM "metricsdatabase"."autogen"."application__sequentialstoreappendlatencyms" + WHERE time > now() - 8h AND "EndpointId"='iothub' + GROUP BY time(1m) FILL(null) + dbgetstd: SELECT mean("stddev") AS "mean_stddev" + FROM "metricsdatabase"."autogen"."application__dbgetlatencyms" + WHERE time > now() - 8h AND "EndpointId"='all' + GROUP BY time(1m) FILL(null) + dbputstd: SELECT mean("stddev") AS "mean_stddev" + FROM "metricsdatabase"."autogen"."application__dbgetlatencyms" + WHERE time > now() - 8h AND "EndpointId"='all' + GROUP BY time(1m) FILL(null) analyzer_url: http://analyzer:15000/api/report -blob_storage_account: "" -blob_storage_master_key: "" -blob_container_name: "" +blob_storage_account: '' +blob_storage_master_key: '' +blob_container_name: '' reporting_interval: null docker_url: unix:///var/run/docker.sock