From 3d1d3f7440a1bebe9f82e03747fe558f5f0b5435 Mon Sep 17 00:00:00 2001 From: Igor Pashev Date: Sun, 7 Oct 2018 12:33:33 +0300 Subject: Add check_aws_s3_file --- modules/pkgs/check_aws_s3_file/check_aws_s3_file | 163 +++++++++++++++++++++ .../pkgs/check_aws_s3_file/check_aws_s3_file.conf | 18 +++ modules/pkgs/check_aws_s3_file/default.nix | 22 +++ 3 files changed, 203 insertions(+) create mode 100755 modules/pkgs/check_aws_s3_file/check_aws_s3_file create mode 100644 modules/pkgs/check_aws_s3_file/check_aws_s3_file.conf create mode 100644 modules/pkgs/check_aws_s3_file/default.nix diff --git a/modules/pkgs/check_aws_s3_file/check_aws_s3_file b/modules/pkgs/check_aws_s3_file/check_aws_s3_file new file mode 100755 index 0000000..82c8745 --- /dev/null +++ b/modules/pkgs/check_aws_s3_file/check_aws_s3_file @@ -0,0 +1,163 @@ +#!/usr/bin/env bash + +set -euo pipefail + +sizeWarn='' +sizeCrit='' +timeWarn='' +timeCrit='' +bucket='' +key='' + +usage() { + cat < AWS CLI credentials file + + -b S3 bucket name, required + -k object key, e. g. "hosts/foo/bar.gz" + + -s object size warning threshold + -S object size critical threshold + + -t object age warning threshold + -T object age critical threshold + +Examples: + + $0 -b foobucket # check the bucket is available + $0 -b foobucket -k hosts/foo/file.gz + $0 -b foobucket -k hosts/foo/file.gz -t 86400 + $0 -b foobucket -k hosts/foo/file.gz -t 3600: -s 1000: + +USAGE +} + +die () { + echo "$0: " "$@" >&2 + exit 1 +} + +while [ $# -gt 0 ]; do + case "$1" in + -f) export AWS_SHARED_CREDENTIALS_FILE="$2"; shift 2;; + -b) bucket="$2"; shift 2;; + -k) key="$2"; shift 2;; + -s) sizeWarn="$2"; shift 2;; + -S) sizeCrit="$2"; shift 2;; + -t) timeWarn="$2"; shift 2;; + -T) timeCrit="$2"; shift 2;; + -h|--help) usage; exit 1;; + *) die "unsupported argument: $1";; + esac +done + +inrange () { + local r v + local v1 v2 + local outter + local sIFS + + r="$1" + v="$2" + + case "$r" in + @*) outter=true; r="${r/@/}";; + *) outter=false;; + esac + + sIFS=$IFS + + IFS=: + set -- $r + v1=${1-} + v2=${2-} + IFS=$sIFS + + case "$v1" in + $r) v2=$v1; v1=0;; + ~*) v1=;; + esac + + if $outter; then + { [ -n "$v1" ] && [ "$v" -lt "$v1" ]; } || { [ -n "$v2" ] && [ "$v" -gt "$v2" ]; } + else + { [ -z "$v1" ] || [ "$v" -gt "$v1" ]; } && { [ -z "$v2" ] || [ "$v" -lt "$v2" ]; } + fi +} + +humanSize() { + local n u + + n=$1 + for u in B KiB MiB GiB TiB; do + if (( n > 1024 )); then + (( n /= 1024 )) + else + break + fi + done + + echo "${n} ${u}" +} + + +[ -n "$bucket" ] || die "missing S3 bucket" + +if [ -z "$key" ]; then + if out=$(aws s3api head-bucket --bucket "$bucket" 2>&1); then + echo "OK: bucket $bucket exists and is accessible" + exit 0 + else + printf 'CRITICAL: %s\n' "$(echo "$out" | grep .)" + exit 2 + fi +fi + +if ! out=$(aws s3api head-object --bucket "$bucket" --key "$key" 2>&1); then + printf 'UNKNOWN: %s\n' "$(echo "$out" | grep .)" + exit 3 +fi + +json=$out + +size=$(echo "$json" | jq -r .ContentLength) +date=$(echo "$json" | jq -r .LastModified) + +date_s=$(date -d "$date" +%s) +now_s=$(date -d now +%s) +age_s=$((now_s - date_s)) +size_h=$(humanSize "$size") + +stat="size=${size}B;${sizeWarn};${sizeCrit};0 age=${age_s}s;${timeWarn};${timeCrit}" + +if [ -n "$timeCrit" ] && ! inrange "$timeCrit" "$age_s"; then + echo "CRITICAL: last modified $date|$stat" + exit 2 +fi + +if [ -n "$sizeCrit" ] && ! inrange "$sizeCrit" "$size"; then + echo "CRITICAL: size $size_h|$stat" + exit 2 +fi + +if [ -n "$timeWarn" ] && ! inrange "$timeWarn" "$age_s"; then + echo "WARNING: last modified $date|$stat" + exit 1 +fi + +if [ -n "$sizeWarn" ] && ! inrange "$sizeWarn" "$size"; then + echo "WARNING: size $size_h|$stat" + exit 1 +fi + +echo "OK: size $size_h, last modified $date|$stat" +exit 0 + diff --git a/modules/pkgs/check_aws_s3_file/check_aws_s3_file.conf b/modules/pkgs/check_aws_s3_file/check_aws_s3_file.conf new file mode 100644 index 0000000..e2ec2b5 --- /dev/null +++ b/modules/pkgs/check_aws_s3_file/check_aws_s3_file.conf @@ -0,0 +1,18 @@ +object CheckCommand "aws-s3-file" { + import "plugin-check-command" + + command = [ "check_aws_s3_file" ] + + arguments = { + "-f" = "$aws_s3_file_credentials$" + "-b" = "$aws_s3_file_bucket$" + "-k" = "$aws_s3_file_key$" + "-s" = "$aws_s3_file_size_warn$" + "-S" = "$aws_s3_file_size_crit$" + "-t" = "$aws_s3_file_age_warn$" + "-T" = "$aws_s3_file_age_crit$" + } + vars.aws_s3_file_bucket = "$host.name$" + vars.aws_s3_file_key = "$service.name$" +} + diff --git a/modules/pkgs/check_aws_s3_file/default.nix b/modules/pkgs/check_aws_s3_file/default.nix new file mode 100644 index 0000000..df84c79 --- /dev/null +++ b/modules/pkgs/check_aws_s3_file/default.nix @@ -0,0 +1,22 @@ +{ stdenv, pkgs, makeWrapper }: + +stdenv.mkDerivation { + name = "check_aws_s3_file"; + outputs = [ "out" "conf" ]; + unpackPhase = ":"; + nativeBuildInputs = [ makeWrapper ]; + installPhase = '' + mkdir -p $out/bin + + cp ${./check_aws_s3_file} $out/bin/check_aws_s3_file + cp ${./check_aws_s3_file.conf} $conf + + chmod +x "$out/bin/"* + + substituteInPlace "$conf" \ + --replace check_aws_s3_file "$out/bin/check_aws_s3_file" + + wrapProgram "$out/bin/check_aws_s3_file" \ + --prefix PATH : "${pkgs.awscli}/bin:${pkgs.gnugrep}/bin:${pkgs.jq}/bin" + ''; +} -- cgit v1.2.3