Diff of the two buildlogs: -- --- b1/build.log 2023-05-20 01:54:42.264980081 +0000 +++ b2/build.log 2023-05-20 02:01:44.070568168 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Thu Jun 20 20:15:08 -12 2024 -I: pbuilder-time-stamp: 1718957708 +I: Current time: Sat May 20 15:54:44 +14 2023 +I: pbuilder-time-stamp: 1684547684 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/bookworm-reproducible-base.tgz] I: copying local configuration @@ -16,7 +16,7 @@ I: copying [./elasticsearch-curator_5.8.1.orig.tar.gz] I: copying [./elasticsearch-curator_5.8.1-4.1.debian.tar.xz] I: Extracting source -gpgv: Signature made Sat Mar 4 04:21:34 2023 -12 +gpgv: Signature made Sun Mar 5 06:21:34 2023 +14 gpgv: using RSA key 3AFA757FAC6EA11D2FF45DF088D24287A2D898B1 gpgv: Can't check signature: No public key dpkg-source: warning: cannot verify inline signature for ./elasticsearch-curator_5.8.1-4.1.dsc: no acceptable signature found @@ -32,135 +32,167 @@ dpkg-source: info: applying 0001-Version-bump-to-pyyaml-5.4.1-1596.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/3421144/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/D01_modify_environment starting +debug: Running on ionos11-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 May 20 15:55 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=16 ' - DISTRIBUTION='bookworm' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="15" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.2.15(1)-release' + BUILDDIR=/build + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=15 ' + DIRSTACK=() + DISTRIBUTION=bookworm + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='52e0ab2f7b3c415f916a9044bc431207' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='3421144' - PS1='# ' - PS2='> ' + INVOCATION_ID=924f14e59e704afc988359e346969c29 + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=2453881 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.ygiPiqhU/pbuilderrc_Lm8l --distribution bookworm --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.ygiPiqhU/b1 --logfile b1/build.log elasticsearch-curator_5.8.1-4.1.dsc' - SUDO_GID='111' - SUDO_UID='106' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://85.184.249.68:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.ygiPiqhU/pbuilderrc_Kks3 --distribution bookworm --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.ygiPiqhU/b2 --logfile b2/build.log --extrapackages usrmerge elasticsearch-curator_5.8.1-4.1.dsc' + SUDO_GID=111 + SUDO_UID=106 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://78.137.99.97:3128 I: uname -a - Linux ionos15-amd64 6.1.0-0.deb11.6-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.15-1~bpo11+1 (2023-03-16) x86_64 GNU/Linux + Linux i-capture-the-hostname 5.10.0-23-amd64 #1 SMP Debian 5.10.179-1 (2023-05-12) x86_64 GNU/Linux I: ls -l /bin total 5632 - -rwxr-xr-x 1 root root 1265648 Apr 23 2023 bash - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bunzip2 - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bzcat - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzcmp -> bzdiff - -rwxr-xr-x 1 root root 2225 Sep 18 2022 bzdiff - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzegrep -> bzgrep - -rwxr-xr-x 1 root root 4893 Nov 27 2021 bzexe - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzfgrep -> bzgrep - -rwxr-xr-x 1 root root 3775 Sep 18 2022 bzgrep - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bzip2 - -rwxr-xr-x 1 root root 14568 Sep 18 2022 bzip2recover - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzless -> bzmore - -rwxr-xr-x 1 root root 1297 Sep 18 2022 bzmore - -rwxr-xr-x 1 root root 44016 Sep 20 2022 cat - -rwxr-xr-x 1 root root 68656 Sep 20 2022 chgrp - -rwxr-xr-x 1 root root 64496 Sep 20 2022 chmod - -rwxr-xr-x 1 root root 72752 Sep 20 2022 chown - -rwxr-xr-x 1 root root 151152 Sep 20 2022 cp - -rwxr-xr-x 1 root root 125640 Jan 5 2023 dash - -rwxr-xr-x 1 root root 121904 Sep 20 2022 date - -rwxr-xr-x 1 root root 89240 Sep 20 2022 dd - -rwxr-xr-x 1 root root 102200 Sep 20 2022 df - -rwxr-xr-x 1 root root 151344 Sep 20 2022 dir - -rwxr-xr-x 1 root root 88656 Mar 22 2023 dmesg - lrwxrwxrwx 1 root root 8 Dec 19 2022 dnsdomainname -> hostname - lrwxrwxrwx 1 root root 8 Dec 19 2022 domainname -> hostname - -rwxr-xr-x 1 root root 43856 Sep 20 2022 echo - -rwxr-xr-x 1 root root 41 Jan 24 2023 egrep - -rwxr-xr-x 1 root root 35664 Sep 20 2022 false - -rwxr-xr-x 1 root root 41 Jan 24 2023 fgrep - -rwxr-xr-x 1 root root 85600 Mar 22 2023 findmnt - -rwsr-xr-x 1 root root 35128 Mar 22 2023 fusermount - -rwxr-xr-x 1 root root 203152 Jan 24 2023 grep - -rwxr-xr-x 2 root root 2346 Apr 9 2022 gunzip - -rwxr-xr-x 1 root root 6447 Apr 9 2022 gzexe - -rwxr-xr-x 1 root root 98136 Apr 9 2022 gzip - -rwxr-xr-x 1 root root 22680 Dec 19 2022 hostname - -rwxr-xr-x 1 root root 72824 Sep 20 2022 ln - -rwxr-xr-x 1 root root 53024 Mar 23 2023 login - -rwxr-xr-x 1 root root 151344 Sep 20 2022 ls - -rwxr-xr-x 1 root root 207168 Mar 22 2023 lsblk - -rwxr-xr-x 1 root root 97552 Sep 20 2022 mkdir - -rwxr-xr-x 1 root root 72912 Sep 20 2022 mknod - -rwxr-xr-x 1 root root 43952 Sep 20 2022 mktemp - -rwxr-xr-x 1 root root 59712 Mar 22 2023 more - -rwsr-xr-x 1 root root 59704 Mar 22 2023 mount - -rwxr-xr-x 1 root root 18744 Mar 22 2023 mountpoint - -rwxr-xr-x 1 root root 142968 Sep 20 2022 mv - lrwxrwxrwx 1 root root 8 Dec 19 2022 nisdomainname -> hostname - lrwxrwxrwx 1 root root 14 Apr 2 2023 pidof -> /sbin/killall5 - -rwxr-xr-x 1 root root 43952 Sep 20 2022 pwd - lrwxrwxrwx 1 root root 4 Apr 23 2023 rbash -> bash - -rwxr-xr-x 1 root root 52112 Sep 20 2022 readlink - -rwxr-xr-x 1 root root 72752 Sep 20 2022 rm - -rwxr-xr-x 1 root root 56240 Sep 20 2022 rmdir - -rwxr-xr-x 1 root root 27560 Nov 2 2022 run-parts - -rwxr-xr-x 1 root root 126424 Jan 5 2023 sed - lrwxrwxrwx 1 root root 4 Jan 5 2023 sh -> dash - -rwxr-xr-x 1 root root 43888 Sep 20 2022 sleep - -rwxr-xr-x 1 root root 85008 Sep 20 2022 stty - -rwsr-xr-x 1 root root 72000 Mar 22 2023 su - -rwxr-xr-x 1 root root 39824 Sep 20 2022 sync - -rwxr-xr-x 1 root root 531984 Apr 6 2023 tar - -rwxr-xr-x 1 root root 14520 Nov 2 2022 tempfile - -rwxr-xr-x 1 root root 109616 Sep 20 2022 touch - -rwxr-xr-x 1 root root 35664 Sep 20 2022 true - -rwxr-xr-x 1 root root 14568 Mar 22 2023 ulockmgr_server - -rwsr-xr-x 1 root root 35128 Mar 22 2023 umount - -rwxr-xr-x 1 root root 43888 Sep 20 2022 uname - -rwxr-xr-x 2 root root 2346 Apr 9 2022 uncompress - -rwxr-xr-x 1 root root 151344 Sep 20 2022 vdir - -rwxr-xr-x 1 root root 72024 Mar 22 2023 wdctl - lrwxrwxrwx 1 root root 8 Dec 19 2022 ypdomainname -> hostname - -rwxr-xr-x 1 root root 1984 Apr 9 2022 zcat - -rwxr-xr-x 1 root root 1678 Apr 9 2022 zcmp - -rwxr-xr-x 1 root root 6460 Apr 9 2022 zdiff - -rwxr-xr-x 1 root root 29 Apr 9 2022 zegrep - -rwxr-xr-x 1 root root 29 Apr 9 2022 zfgrep - -rwxr-xr-x 1 root root 2081 Apr 9 2022 zforce - -rwxr-xr-x 1 root root 8103 Apr 9 2022 zgrep - -rwxr-xr-x 1 root root 2206 Apr 9 2022 zless - -rwxr-xr-x 1 root root 1842 Apr 9 2022 zmore - -rwxr-xr-x 1 root root 4577 Apr 9 2022 znew -I: user script /srv/workspace/pbuilder/3421144/tmp/hooks/D02_print_environment finished + -rwxr-xr-x 1 root root 1265648 Apr 24 11:23 bash + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bunzip2 + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bzcat + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzcmp -> bzdiff + -rwxr-xr-x 1 root root 2225 Sep 19 2022 bzdiff + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzegrep -> bzgrep + -rwxr-xr-x 1 root root 4893 Nov 28 2021 bzexe + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzfgrep -> bzgrep + -rwxr-xr-x 1 root root 3775 Sep 19 2022 bzgrep + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bzip2 + -rwxr-xr-x 1 root root 14568 Sep 19 2022 bzip2recover + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzless -> bzmore + -rwxr-xr-x 1 root root 1297 Sep 19 2022 bzmore + -rwxr-xr-x 1 root root 44016 Sep 21 2022 cat + -rwxr-xr-x 1 root root 68656 Sep 21 2022 chgrp + -rwxr-xr-x 1 root root 64496 Sep 21 2022 chmod + -rwxr-xr-x 1 root root 72752 Sep 21 2022 chown + -rwxr-xr-x 1 root root 151152 Sep 21 2022 cp + -rwxr-xr-x 1 root root 125640 Jan 6 03:20 dash + -rwxr-xr-x 1 root root 121904 Sep 21 2022 date + -rwxr-xr-x 1 root root 89240 Sep 21 2022 dd + -rwxr-xr-x 1 root root 102200 Sep 21 2022 df + -rwxr-xr-x 1 root root 151344 Sep 21 2022 dir + -rwxr-xr-x 1 root root 88656 Mar 24 00:02 dmesg + lrwxrwxrwx 1 root root 8 Dec 20 03:33 dnsdomainname -> hostname + lrwxrwxrwx 1 root root 8 Dec 20 03:33 domainname -> hostname + -rwxr-xr-x 1 root root 43856 Sep 21 2022 echo + -rwxr-xr-x 1 root root 41 Jan 25 04:43 egrep + -rwxr-xr-x 1 root root 35664 Sep 21 2022 false + -rwxr-xr-x 1 root root 41 Jan 25 04:43 fgrep + -rwxr-xr-x 1 root root 85600 Mar 24 00:02 findmnt + -rwsr-xr-x 1 root root 35128 Mar 23 22:35 fusermount + -rwxr-xr-x 1 root root 203152 Jan 25 04:43 grep + -rwxr-xr-x 2 root root 2346 Apr 10 2022 gunzip + -rwxr-xr-x 1 root root 6447 Apr 10 2022 gzexe + -rwxr-xr-x 1 root root 98136 Apr 10 2022 gzip + -rwxr-xr-x 1 root root 22680 Dec 20 03:33 hostname + -rwxr-xr-x 1 root root 72824 Sep 21 2022 ln + -rwxr-xr-x 1 root root 53024 Mar 24 02:40 login + -rwxr-xr-x 1 root root 151344 Sep 21 2022 ls + -rwxr-xr-x 1 root root 207168 Mar 24 00:02 lsblk + -rwxr-xr-x 1 root root 97552 Sep 21 2022 mkdir + -rwxr-xr-x 1 root root 72912 Sep 21 2022 mknod + -rwxr-xr-x 1 root root 43952 Sep 21 2022 mktemp + -rwxr-xr-x 1 root root 59712 Mar 24 00:02 more + -rwsr-xr-x 1 root root 59704 Mar 24 00:02 mount + -rwxr-xr-x 1 root root 18744 Mar 24 00:02 mountpoint + -rwxr-xr-x 1 root root 142968 Sep 21 2022 mv + lrwxrwxrwx 1 root root 8 Dec 20 03:33 nisdomainname -> hostname + lrwxrwxrwx 1 root root 14 Apr 3 20:25 pidof -> /sbin/killall5 + -rwxr-xr-x 1 root root 43952 Sep 21 2022 pwd + lrwxrwxrwx 1 root root 4 Apr 24 11:23 rbash -> bash + -rwxr-xr-x 1 root root 52112 Sep 21 2022 readlink + -rwxr-xr-x 1 root root 72752 Sep 21 2022 rm + -rwxr-xr-x 1 root root 56240 Sep 21 2022 rmdir + -rwxr-xr-x 1 root root 27560 Nov 3 2022 run-parts + -rwxr-xr-x 1 root root 126424 Jan 6 09:55 sed + lrwxrwxrwx 1 root root 9 May 20 15:55 sh -> /bin/bash + -rwxr-xr-x 1 root root 43888 Sep 21 2022 sleep + -rwxr-xr-x 1 root root 85008 Sep 21 2022 stty + -rwsr-xr-x 1 root root 72000 Mar 24 00:02 su + -rwxr-xr-x 1 root root 39824 Sep 21 2022 sync + -rwxr-xr-x 1 root root 531984 Apr 7 04:25 tar + -rwxr-xr-x 1 root root 14520 Nov 3 2022 tempfile + -rwxr-xr-x 1 root root 109616 Sep 21 2022 touch + -rwxr-xr-x 1 root root 35664 Sep 21 2022 true + -rwxr-xr-x 1 root root 14568 Mar 23 22:35 ulockmgr_server + -rwsr-xr-x 1 root root 35128 Mar 24 00:02 umount + -rwxr-xr-x 1 root root 43888 Sep 21 2022 uname + -rwxr-xr-x 2 root root 2346 Apr 10 2022 uncompress + -rwxr-xr-x 1 root root 151344 Sep 21 2022 vdir + -rwxr-xr-x 1 root root 72024 Mar 24 00:02 wdctl + lrwxrwxrwx 1 root root 8 Dec 20 03:33 ypdomainname -> hostname + -rwxr-xr-x 1 root root 1984 Apr 10 2022 zcat + -rwxr-xr-x 1 root root 1678 Apr 10 2022 zcmp + -rwxr-xr-x 1 root root 6460 Apr 10 2022 zdiff + -rwxr-xr-x 1 root root 29 Apr 10 2022 zegrep + -rwxr-xr-x 1 root root 29 Apr 10 2022 zfgrep + -rwxr-xr-x 1 root root 2081 Apr 10 2022 zforce + -rwxr-xr-x 1 root root 8103 Apr 10 2022 zgrep + -rwxr-xr-x 1 root root 2206 Apr 10 2022 zless + -rwxr-xr-x 1 root root 1842 Apr 10 2022 zmore + -rwxr-xr-x 1 root root 4577 Apr 10 2022 znew +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -353,7 +385,7 @@ Get: 109 http://deb.debian.org/debian bookworm/main amd64 python3-voluptuous all 0.12.2-1 [43.4 kB] Get: 110 http://deb.debian.org/debian bookworm/main amd64 python3-yaml amd64 6.0-3+b2 [119 kB] Get: 111 http://deb.debian.org/debian bookworm/main amd64 w3m amd64 0.5.3+git20230121-2 [1102 kB] -Fetched 64.1 MB in 1s (64.0 MB/s) +Fetched 64.1 MB in 4s (18.1 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package fonts-lato. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19596 files and directories currently installed.) @@ -827,10 +859,17 @@ Reading package lists... Building dependency tree... Reading state information... +usrmerge is already the newest version (35). fakeroot is already the newest version (1.31-1.2). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/A99_set_merged_usr starting +Re-configuring usrmerge... +removed '/etc/unsupported-skip-usrmerge-conversion' +The system has been successfully converted. +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes dpkg-buildpackage: info: source package elasticsearch-curator dpkg-buildpackage: info: source version 5.8.1-4.1 dpkg-buildpackage: info: source distribution unstable @@ -1083,7 +1122,7 @@ writing manifest file 'elasticsearch_curator.egg-info/SOURCES.txt' running build_ext Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.010s] +GET http://localhost:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1137,10 +1176,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1194,10 +1233,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1251,10 +1290,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1308,10 +1347,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1365,7 +1404,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1422,10 +1461,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1479,7 +1518,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1536,7 +1575,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1593,7 +1632,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1650,7 +1689,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1707,7 +1746,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1764,10 +1803,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1821,7 +1860,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1878,10 +1917,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1935,7 +1974,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -1992,7 +2031,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2049,10 +2088,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2106,7 +2145,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2163,7 +2202,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2220,7 +2259,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2277,7 +2316,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2334,7 +2373,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2391,7 +2430,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2448,7 +2487,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2505,7 +2544,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2562,10 +2601,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2619,7 +2658,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2676,7 +2715,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2733,10 +2772,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2790,7 +2829,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2847,7 +2886,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -2904,10 +2943,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2961,7 +3000,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3018,7 +3057,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3075,7 +3114,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3132,7 +3171,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3189,7 +3228,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3246,7 +3285,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3303,7 +3342,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3360,7 +3399,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3417,10 +3456,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3474,7 +3513,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3531,7 +3570,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3588,7 +3627,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3645,7 +3684,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3702,7 +3741,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3759,7 +3798,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3816,10 +3855,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3873,7 +3912,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -3930,10 +3969,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3987,7 +4026,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4044,7 +4083,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4101,7 +4140,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4158,7 +4197,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4215,7 +4254,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4272,10 +4311,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4329,7 +4368,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4386,7 +4425,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4443,10 +4482,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4500,10 +4539,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4557,7 +4596,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4614,7 +4653,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4671,7 +4710,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4728,7 +4767,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4785,7 +4824,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4842,7 +4881,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -4899,10 +4938,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4956,7 +4995,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5013,7 +5052,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5070,7 +5109,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5127,7 +5166,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5184,7 +5223,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5241,7 +5280,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5298,10 +5337,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5355,7 +5394,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5412,7 +5451,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5469,7 +5508,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5526,7 +5565,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5583,7 +5622,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5640,7 +5679,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5697,7 +5736,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5754,7 +5793,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5811,7 +5850,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5868,7 +5907,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -5925,10 +5964,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5982,10 +6021,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6039,7 +6078,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6096,10 +6135,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6153,7 +6192,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6210,7 +6249,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6267,7 +6306,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6324,7 +6363,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6381,7 +6420,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6438,7 +6477,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6495,10 +6534,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6552,10 +6591,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6609,10 +6648,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6666,7 +6705,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6723,7 +6762,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -6780,7 +6819,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -8134,7 +8173,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -8170,7 +8209,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -9486,9 +9525,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684461620) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684461620) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9541,9 +9580,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684461620) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684461620) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9670,9 +9709,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684461621) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684461621) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9693,9 +9732,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684461621) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684461621) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9739,9 +9778,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684461621) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1718871454) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684461621) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9762,9 +9801,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684461621) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1718871454) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684461621) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -11083,7 +11122,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -11116,7 +11155,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -11148,7 +11187,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -11177,7 +11216,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -11212,7 +11251,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11240,7 +11279,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11271,7 +11310,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11291,7 +11330,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -11339,7 +11378,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -11359,7 +11398,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -11383,7 +11422,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -11417,7 +11456,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -11454,7 +11493,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -11501,7 +11540,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11550,30 +11589,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461621 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1718871455) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684461621) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1718871455) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684461621) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -11582,14 +11621,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -11600,7 +11639,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -11618,11 +11657,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461621 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1718871455) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684461621) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1718871455) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684461621) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -11632,11 +11671,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1718871455) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1684461622) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1718871455) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1684461622) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -11646,14 +11685,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1718871455) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1684461622) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -11663,11 +11702,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1718871455) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1684461622) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -11677,13 +11716,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1718871455 +Point of Reference: 1684461622 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -11932,7 +11971,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.005s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -11963,7 +12002,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.004s] +GET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -11993,7 +12032,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12006,7 +12045,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12019,9 +12058,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -12029,7 +12068,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12059,7 +12098,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.004s] +GET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12089,7 +12128,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12102,7 +12141,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12115,9 +12154,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12129,7 +12168,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.004s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12159,7 +12198,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12172,7 +12211,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12185,9 +12224,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12195,7 +12234,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.004s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12225,7 +12264,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12238,7 +12277,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12251,11 +12290,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -12330,7 +12369,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -12348,93 +12387,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0x7f8835a57560> +......Schema: .f at 0x7f3239e43560> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} -.Schema: .f at 0x7f8835a57380> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'timestring': None, 'exclude': False, 'stats_result': 'min_value'} +.Schema: .f at 0x7f3239e43380> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} -.Schema: .f at 0x7f8835d0bd80> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'timestring': None, 'exclude': False, 'stats_result': 'min_value'} +.Schema: .f at 0x7f323a0f7d80> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0x7f8835d0b380> +.Schema: .f at 0x7f323a0f7380> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0x7f8835d0b9c0> +.Schema: .f at 0x7f323a0f79c0> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0x7f8835d0bba0> +.Schema: .f at 0x7f323a0f7ba0> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'timestring': None, 'use_age': False} -.Schema: .f at 0x7f8835d0bba0> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'timestring': None, 'stats_result': 'min_value', 'use_age': False} +.Schema: .f at 0x7f323a0f7ba0> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0x7f8835d0bba0> +.Schema: .f at 0x7f323a0f7ba0> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0x7f8835a6e2a0> +.Schema: .f at 0x7f3239e562a0> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0x7f8835a6e020> +.Schema: .f at 0x7f3239e56020> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0x7f8835a1ae80> +.Schema: .f at 0x7f3239e06e80> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'threshold_behavior': 'greater_than', 'reverse': True, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0x7f8835a1a980> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7f3239e06980> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'threshold_behavior': 'greater_than', 'reverse': True, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0x7f8835a3eb60> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7f3239e2ab60> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0x7f8835a6d800> +.Schema: .f at 0x7f3239e55800> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f8835a6eca0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7f3239e56ca0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0x7f8835a6ed40> +.Schema: .f at 0x7f3239e54ae0> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f8835a6eca0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7f3239e56ca0> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -12470,7 +12509,7 @@ /build/elasticsearch-curator-5.8.1/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? 'to' if job is 'add' else 'from', ---------------------------------------------------------------------- -Ran 529 tests in 14.906s +Ran 529 tests in 17.555s OK (SKIP=3) create-stamp debian/debhelper-build-stamp @@ -12712,8 +12751,8 @@ dh_md5sums -O--buildsystem=pybuild dh_builddeb -O--buildsystem=pybuild dpkg-deb: building package 'elasticsearch-curator' in '../elasticsearch-curator_5.8.1-4.1_all.deb'. -dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. +dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-genbuildinfo --build=binary -O../elasticsearch-curator_5.8.1-4.1_amd64.buildinfo dpkg-genchanges --build=binary -O../elasticsearch-curator_5.8.1-4.1_amd64.changes dpkg-genchanges: info: binary-only upload (no source code included) @@ -12721,12 +12760,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/2453881/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/3421144 and its subdirectories -I: Current time: Thu Jun 20 20:17:47 -12 2024 -I: pbuilder-time-stamp: 1718957867 +I: removing directory /srv/workspace/pbuilder/2453881 and its subdirectories +I: Current time: Sat May 20 16:01:43 +14 2023 +I: pbuilder-time-stamp: 1684548103